summaryrefslogtreecommitdiff
path: root/ybd/rpm.py
blob: 31a8c9ee6282f34a3a5760e122e663cb9fa1c2e8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
import os
import sys
from collections import Mapping
from cache import cache_key, get_cache
from app import log, timer
import time
import app
import sandbox
import shutil
import yaml
import repos


# Because rpm is otherwise totally broken
#
common_rpm_args = (
  '--dbpath=/var/lib/rpm '
  '--define "_rpmconfigdir /usr/lib/rpm" '
  '--define "_rpmlock_path /var/lib/rpm/.rpm.lock" '
  '--define "_fileattrsdir /usr/lib/rpm/fileattrs" '
  '--define "_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm" '
  '--define "_rpmfilename %{_build_name_fmt}" '
  '--define "_tmppath /tmp" '
  '--define "_unpackaged_files_terminate_build 0" '
  '--define "__spec_clean_cmd echo" ')


# foreach_def
# @dn: The target definition or name
# @callback: The callback to call
# @user_data: Something to be passed to @callback along with the definition
#
# Iterates over each chunk which is to be included
# in the target @dn in order of dependency, the passed
# @dn definition will be the last one called.
#
def foreach_def(dn, callback, user_data, traversed=None, whitelist=None):
    if traversed is None:
        traversed = {}

    if type(dn) is not dict:
        dn = app.defs.get(dn)

    # if we can't calculate cache key, we can't create this component
    if cache_key(dn) is False:
        if 'tried' not in dn:
            log(dn, 'No cache_key, so skipping compose')
            dn['tried'] = True
        yield False, None

    # if dn is already cached, we're done
    if not get_cache(dn):
        log('SMTH', '%s is not cached, try again sucker !' % dn['name'],
            exit=True)

    systems = dn.get('systems', [])
    for system in systems:
        for s in system.get('subsystems', []):
            subsystem = app.defs.get(s['path'])
            for res in foreach_def(
                    subsystem, callback, user_data, traversed, whitelist):
                yield res[0], res[1]
        for res in foreach_def(
                system['path'], callback, user_data, traversed, whitelist):
            yield res[0], res[1]

    contents = dn.get('contents', [])
    for it in contents:
        item = app.defs.get(it)

        if traversed.get(item.get('name')):
            continue

        if item.get('build-mode', 'staging') != 'bootstrap':

            if not traversed.get(item.get('name')):
                for res in foreach_def(
                        item, callback, user_data, traversed, whitelist):
                    yield res[0], res[1]

    if whitelist is None or dn.get('name') in whitelist:
        result = callback(dn, user_data)

    traversed[dn.get('name')] = True
    yield True, result


def expand_macro(system, dn, text):

    defines = ['--define "{} {}"'.format(ent['key'], ent['value'])
               for ent in dn['rpm-metadata'].get('defines', [])]
    defines.append('--define "{} {}"'.format('name', dn['name']))
    command = ('rpm ' + common_rpm_args + ' '.join(defines) +
               ' --eval=%s' % text)
    env_vars = sandbox.env_vars_for_build(system)
    # Force subprocess to return output, we will log after the call.
    _, output, _ = sandbox.run_sandboxed(
        system, command, env_vars, run_logged=False)
    with open(system['log'], 'a') as logfile:
        logfile.write(output)
    return output.strip()


def is_main_package(system, dn, package):
    return expand_macro(system, dn, package) == dn['name']


def generate_spec(dn, stage_dir, metafile, output, name, time, system):
    if 'rpm-metadata' not in dn:
        return False
    with open(metafile, "r") as metafile_f:
        metadata = yaml.safe_load(metafile_f)

    with open(output, "w") as output_f:
        # Support two formats of 'rpm-metadata', the older
        # format specifies 'rpm-metadata' as a list of packages,
        # the new format specifies 'rpm-metadata' as a dictionary
        # and the package list is found in it's 'packages' member.
        rpm_metadata = dn['rpm-metadata']
        if isinstance(rpm_metadata, Mapping):
            package_list = rpm_metadata.get('packages', [])
        else:
            package_list = rpm_metadata

        description = 'No Description'

        if metadata.get('description') is not None:
            description = metadata.get('description')

        # Write out the package header first
        output_f.write('Name: %s\n' % name)
        output_f.write('Summary: %s\n' % description)
        output_f.write('Version: %s\n' % time)
        output_f.write('Release: %s\n' % dn['sha'])
        output_f.write('License: %s\n' % 'Undetermined')
        output_f.write('Prefix: %s\n' % dn.get('prefix', '/usr'))
        # Add all the main package Provides:
        for package in package_list:
            if is_main_package(system, dn, package['name']):
                if 'provides' in package:
                    for provide in package['provides']:
                        output_f.write('Provides: %s\n' % provide)
        output_f.write('\n')

        output_f.write('%%description\n')
        output_f.write('%s\n' % description)
        output_f.write('\n')

        for package in package_list:
            if not is_main_package(system, dn, package['name']):
                # Sub-Package header
                output_f.write('%%package -n %s\n' % package['name'])
                output_f.write('Summary: %s\n' % description)
                output_f.write('\n')
                if 'provides' in package:
                    for provide in package['provides']:
                        output_f.write('Provides: %s\n' % provide)
                output_f.write('%%description -n %s\n' % package['name'])
                output_f.write('%s\n' % description)
                output_f.write('\n')

            for cmd in ['pre', 'preun', 'post', 'postun']:
                if cmd in package:
                    if 'interpreter' in package[cmd]:
                        interp_str = ' -p %s' % package[cmd]['interpreter']
                    else:
                        # To be explicit, this might default to /bin/sh,
                        # but that's more verbose than is strictly needed.
                        interp_str = ''
                    output_f.write('%%%s -n %s%s\n'
                                   % (cmd, package['name'], interp_str))
                    if 'body' in package[cmd]:
                        # Expecting it to be a single multi-line command,
                        # since running as separate commands discards env.
                        output_f.write(package[cmd]['body'] + '\n')
                    output_f.write('\n')

            # Skip writing files if nonexistent/empty
            if type(package.get('files')) is not list:
                continue

            # Sub-Package files
            if package['name'] == name:
                output_f.write('%%files\n')
            else:
                output_f.write('%%files -n %s\n' % package['name'])
            for filename in package['files']:
                output_f.write(filename + "\n")
            output_f.write('\n')

    return True


def extract_defines(dn):
    meta = dn.get('rpm-metadata')
    if meta is None or isinstance(meta, list):
        return ''

    defines = meta.get('defines')
    if defines is None:
        return ''

    strings = []
    for define in defines:
        key = define.get('key')
        value = define.get('value')

        strings.append(' --define "{} {}"'.format(key, value))

    return ''.join(strings)


def package_one_rpm(dn, userdata):

    system = userdata['system']
    time = userdata['time']
    kind = dn.get('kind')
    name = dn.get('name')

    if kind == 'chunk' or kind is None:

        with timer(name):
            subdir = '%s.inst' % name
            fulldir = os.path.join(system['sandbox'], subdir)
            metadir = os.path.join(system['sandbox'], '%s.meta' % name)
            baserockdir = os.path.join(fulldir, 'baserock')

            # Install the chunk we're gonna package under subdir
            sandbox.install(system, dn, subdir)

            # Move the baserock directory out of the way,
            # we don't package the metadata
            shutil.move(baserockdir, metadir)

            # Generate the specfile in the metadir, note that we use
            # the metadata for the given package from the system metadata
            # directory, not the metadata for the specific chunk artifact

            # XXX Right now the chunk's individual metadata is richer, it
            # includes the desciption, change this to use the system metadata
            # for that chunk later !!!
            metafile = os.path.join(metadir, '%s.meta' % name)
            specfile = os.path.join(metadir, '%s.spec' % name)
            success = True

            if generate_spec(dn, fulldir, metafile, specfile,
                             name, time, system):
                defines = extract_defines(dn)

                # XXX Now we gonna run rpmbuild in the sandbox !!!
                command = ('rpmbuild ' + common_rpm_args +
                           ' --buildroot=/%s.inst' % name +
                           ' --define "_rpmdir /RPMS"' + defines +
                           ' --target %s' % app.config['cpu'] +
                           ' -bb /%s.meta/%s.spec' % (name, name))
                env_vars = sandbox.env_vars_for_build(system)

                # Keep building all the rpms we can even if one fails,
                # we'd rather see everything that failed in a log at once.
                success, _, _ = sandbox.run_sandboxed(
                    system, command, env_vars, exit_on_error=False)
            else:
                app.log(dn, "Failed to generate spec, not generating rpm")

            if success:
                app.log(dn, "Removing sandbox dir", fulldir, verbose=True)
                shutil.rmtree(fulldir)
                shutil.rmtree(metadir)
            else:
                app.log(dn, "Failed rpm build sandbox dir:", fulldir)

            return success
    return False


# package_rpms
# @system: The system to package rpms for
# @time: The number of whole seconds since the epoch
# @whitelist: A whitelist of chunk names to package rpms for
#
# This function will first stage the given @system, which
# must have an rpm installation, and then it will use the
# metadata in the system's baserock directory to package
# each individual chunk, by staging those chunks one by one
# and packaging them in a chroot.
#
# Care will be taken to build the rpms in the order of their
# dependencies, this should allow rpm to infer package dependencies
# correctly
#
# A package's version is the time when RPM assembly started, passed
# as the @time argument because package_rpms might be called for
# multiple systems, if a cluster was specified on the command-line.
#
def package_rpms(system, time, whitelist=None):

    if type(system) is not dict:
        system = app.defs.get(system)

    with sandbox.setup(system):
        install_contents(system)

        # Fail now if missing `rpm` or `rpmbuild`
        env_vars = sandbox.env_vars_for_build(system)
        sandbox.run_sandboxed(system, 'rpm --version', env_vars)
        sandbox.run_sandboxed(system, 'rpmbuild --version', env_vars)

        # First initialize the db
        rpmdb_path = os.path.join(system['sandbox'], 'var', 'lib', 'rpm')
        rpmdir = os.path.join(system['sandbox'], 'RPMS')
        command = 'rpm ' + common_rpm_args + ' --initdb'
        if not os.path.exists(rpmdb_path):
            os.makedirs(rpmdb_path)
        if not os.path.exists(rpmdir):
            os.makedirs(rpmdir)
        sandbox.run_sandboxed(system, command, env_vars)

        # Package each rpm in order of build dependency
        package_results = foreach_def(
            system, package_one_rpm, {'system': system, 'time': time},
            whitelist=whitelist)
        errors = any(not t[1] for t in package_results)
        if errors:
            log(system, 'ERROR: Failed to successfully generate all rpms!')
            sys.exit(1)

        # Move the resulting RPMS directory into the deployment area
        rpm_destdir = os.path.join(app.config['deployment'], 'RPMs',
                                   cache_key(system))
        if not os.path.exists(rpm_destdir):
            os.makedirs(rpm_destdir)
        for entry in os.listdir(rpmdir):
            srcfile = os.path.join(rpmdir, entry)
            dstfile = os.path.join(rpm_destdir, entry)
            # I could not move files if they already exist,
            # but the RPMs have already been produced at this point.
            os.rename(os.path.join(rpmdir, entry),
                      os.path.join(rpm_destdir, entry))

        # Move the generated RPM database into the deployment area
        rpm_dbsrc = os.path.join(system['sandbox'], 'var', 'lib', 'rpm')
        rpm_dbdest = os.path.join(rpm_destdir, "db")
        if os.path.exists(rpm_dbdest):
            shutil.rmtree(rpm_dbdest)
        shutil.move(rpm_dbsrc, rpm_dbdest)


#
# XXX Stuff taken from assembly.py in ybd
#
def install_contents(dn, contents=None):
    ''' Install contents (recursively) into dn['sandbox'] '''

    if contents is None:
        contents = dn.get('contents', [])

    log(dn, 'Installing contents\n', contents, verbose=True)

    for it in contents:
        item = app.defs.get(it)
        if os.path.exists(os.path.join(dn['sandbox'],
                                       'baserock', item['name'] + '.meta')):
            # content has already been installed
            log(dn, 'Already installed', item['name'], verbose=True)
            continue

        for i in item.get('contents', []):
            install_contents(dn, [i])

        if item.get('build-mode', 'staging') != 'bootstrap':
            if not get_cache(item):
                log('RPM',
                    "%s isn't cached, can't stage the system!" % item['name'],
                    exit=True)
            sandbox.install(dn, item)