summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Ipsum <richard.ipsum@codethink.co.uk>2014-10-09 16:06:08 +0100
committerRichard Ipsum <richard.ipsum@codethink.co.uk>2014-11-04 12:40:54 +0000
commit88a3d0787058fdb8820547d55c63bc68eee2db51 (patch)
treee7d1c21c9944b954b5b4c5690088362ab01a475b
parent8f5c2a182583072ffd43e508ec1af9d5e179e78e (diff)
downloadimport-88a3d0787058fdb8820547d55c63bc68eee2db51.tar.gz
Catch exceptions for anything network related
-rwxr-xr-ximport/pip.to_lorry23
1 files changed, 18 insertions, 5 deletions
diff --git a/import/pip.to_lorry b/import/pip.to_lorry
index d91c06c..7dd1678 100755
--- a/import/pip.to_lorry
+++ b/import/pip.to_lorry
@@ -31,8 +31,11 @@ import xmlrpclib
PYPI_URL = 'http://pypi.python.org/pypi'
def fetch_package_metadata(package_name):
- return requests.get('%s/%s/json'
- % (PYPI_URL, package_name)).json()
+ try:
+ return requests.get('%s/%s/json' % (PYPI_URL, package_name)).json()
+ except Exception as e:
+ print("Couldn't fetch package metadata: ", e, file=sys.stderr)
+ sys.exit(1)
def find_repo_type(url):
vcss = [('git', 'clone'), ('hg', 'clone'),
@@ -106,8 +109,12 @@ def filter_urls(urls):
return filter(allowed_extension, urls)
def str_tarball_lorry(package_name):
- client = xmlrpclib.ServerProxy(PYPI_URL)
- releases = client.package_releases(package_name)
+ try:
+ client = xmlrpclib.ServerProxy(PYPI_URL)
+ releases = client.package_releases(package_name)
+ except Exception as e:
+ print("Couldn't fetch release data:", e, file=sys.stderr)
+ sys.exit(1)
if len(releases) == 0:
print("Couldn't find any releases for packge %s, exiting" % package_name)
@@ -122,7 +129,13 @@ def str_tarball_lorry(package_name):
print('Fetching urls for package %s with version %s'
% (package_name, release_version))
- urls = client.release_urls(package_name, release_version)
+
+ try:
+ urls = client.release_urls(package_name, release_version)
+ except Exception as e:
+ print("Couldn't fetch release urls:", e, file=sys.stderr)
+ sys.exit(1)
+
tarball_urls = filter_urls(urls)
if len(tarball_urls) > 0: