summaryrefslogtreecommitdiff
path: root/morphlib/remoteartifactcache.py
blob: 45933d1015eadaa4b0282369c94922d37aaeccf4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
# Copyright (C) 2012-2015  Codethink Limited
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.


import cliapp
import logging
import shutil
import urllib
import urllib2
import urlparse


class HeadRequest(urllib2.Request):  # pragma: no cover

    def get_method(self):
        return 'HEAD'


class GetError(cliapp.AppException):

    def __init__(self, cache, filename, error):
        cliapp.AppException.__init__(
            self, 'Failed to get the file %s from the artifact cache %s: %s' %
                  (filename, cache, error))


class RemoteArtifactCache(object):

    def __init__(self, server_url):
        self.server_url = server_url

    def has(self, artifact):
        return self._has_file(artifact.basename())

    def has_artifact_metadata(self, artifact, name):
        return self._has_file(artifact.metadata_basename(name))

    def has_source_metadata(self, source, cachekey, name):
        filename = '%s.%s' % (cachekey, name)
        return self._has_file(filename)

    def _has_file(self, filename):  # pragma: no cover
        url = self._request_url(filename)
        logging.debug('RemoteArtifactCache._has_file: url=%s' % url)
        request = HeadRequest(url)
        try:
            urllib2.urlopen(request)
            return True
        except (urllib2.HTTPError, urllib2.URLError):
            return False

    def _request_url(self, filename):  # pragma: no cover
        server_url = self.server_url
        if not server_url.endswith('/'):
            server_url += '/'
        return urlparse.urljoin(
            server_url, '/1.0/artifacts?filename=%s' % 
            urllib.quote(filename))

    def __str__(self):  # pragma: no cover
        return self.server_url

    def _fetch_file(self, remote_filename, local_file):
        remote_url = self._request_url(remote_filename)
        logging.debug('RemoteArtifactCache._fetch_file: url=%s' % remote_url)

        try:
            remote_file = urllib2.urlopen(remote_url)
            shutil.copyfileobj(remote_file, local_file)
        except (urllib.HTTPError, urllib.URLError) as e:
            logging.debug(str(e))
            raise GetError(self, remote_filename, e)

    def _fetch_files(self, to_fetch):
        '''Fetch a set of files atomically.

        If an error occurs during the transfer of any files, all downloaded
        data is deleted, to reduce the chances of having artifacts in the local
        cache that are missing their metadata, and so on.

        This assumes that the morphlib.savefile module is used so the file
        handles passed in to_fetch have a .abort() method.

        '''
        try:
            for remote_filename, local_file in to_fetch:
                self._fetch_file(remote_filename, local_file)
        except BaseException:
            for _, local_file in to_fetch:
                local_file.abort()
            raise
        else:
            for _, local_file in to_fetch:
                local_file.close()

    def get_artifact(self, artifact, lac, status_cb=None):
        '''Ensure an artifact is available in the local artifact cache.'''

        to_fetch = []
        if not lac.has(artifact):
            to_fetch.append(
                (artifact.basename(), lac.put(artifact)))

        if artifact.source.morphology.needs_artifact_metadata_cached:
            if not lac.has_artifact_metadata(artifact, 'meta'):
                to_fetch.append((
                    artifact.metadata_basename(artifact, 'meta'),
                    lac.put_artifact_metadata(artifact, 'meta')))

        if len(to_fetch) > 0:
            if status_cb:
                status_cb(
                    msg='Fetching to local cache: artifact %(name)s',
                    name=artifact.name)

            self._fetch_files(to_fetch)

    def get_artifacts(self, artifacts, lac, status_cb=None):
        '''Ensure multiple artifacts are available in the local cache.'''

        # FIXME: Running the downloads in parallel may give a speed boost, as
        # many of these are small files.

        for artifact in artifacts:
            self.get_artifact(artifact, lac, status_cb=status_cb)