summaryrefslogtreecommitdiff
path: root/lib/ansible/galaxy
diff options
context:
space:
mode:
authorJordan Borean <jborean93@gmail.com>2019-08-21 07:49:05 +1000
committerGitHub <noreply@github.com>2019-08-21 07:49:05 +1000
commite747487720ce682798cfdbbfecb9b7299f9a6a2b (patch)
tree609f69adc2d5c3c34c3a76fb55a42a134ff29093 /lib/ansible/galaxy
parent4d424d0830b5c184fe63c931931fd36437a37cb7 (diff)
downloadansible-e747487720ce682798cfdbbfecb9b7299f9a6a2b.tar.gz
ansible-galaxy - define multiple galaxy instances in ansible.cfg (#60553)
* ansible-galaxy: support multiple servers on install * Added docs for the server configuration file * Fix up doc string for requirements file format * Fix bugs after testing * Fix kwarg doc and added version * Fix typo and doc improvement * Fix base64 encoding and allow --server to override list
Diffstat (limited to 'lib/ansible/galaxy')
-rw-r--r--lib/ansible/galaxy/__init__.py3
-rw-r--r--lib/ansible/galaxy/api.py113
-rw-r--r--lib/ansible/galaxy/collection.py161
-rw-r--r--lib/ansible/galaxy/role.py3
-rw-r--r--lib/ansible/galaxy/token.py40
5 files changed, 137 insertions, 183 deletions
diff --git a/lib/ansible/galaxy/__init__.py b/lib/ansible/galaxy/__init__.py
index dcfdb9049c..2be986c54c 100644
--- a/lib/ansible/galaxy/__init__.py
+++ b/lib/ansible/galaxy/__init__.py
@@ -26,6 +26,7 @@ __metaclass__ = type
import os
import yaml
+import ansible.constants as C
from ansible import context
from ansible.module_utils._text import to_bytes
@@ -45,7 +46,7 @@ class Galaxy(object):
def __init__(self):
# roles_path needs to be a list and will be by default
- roles_path = context.CLIARGS.get('roles_path', tuple())
+ roles_path = context.CLIARGS.get('roles_path', C.DEFAULT_ROLES_PATH)
# cli option handling is responsible for splitting roles_path
self.roles_paths = roles_path
diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
index 5845ba6b45..7df9d3b1af 100644
--- a/lib/ansible/galaxy/api.py
+++ b/lib/ansible/galaxy/api.py
@@ -22,14 +22,11 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import base64
import json
-from functools import wraps
-
from ansible import context
-import ansible.constants as C
from ansible.errors import AnsibleError
-from ansible.galaxy.token import GalaxyToken
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible.module_utils.six.moves.urllib.parse import quote as urlquote, urlencode
@@ -40,26 +37,16 @@ from ansible.utils.display import Display
display = Display()
-def requires_token(func):
- ''' wrapper to laziliy initialize token file '''
- @wraps(func)
- def wrapped(self, *args, **kwargs):
- if self.token is None:
- self.token = GalaxyToken()
- return func(self, *args, **kwargs)
- return wrapped
-
-
def g_connect(method):
''' wrapper to lazily initialize connection info to galaxy '''
def wrapped(self, *args, **kwargs):
if not self.initialized:
- display.vvvv("Initial connection to galaxy_server: %s" % self._api_server)
+ display.vvvv("Initial connection to galaxy_server: %s" % self.api_server)
server_version = self._get_server_api_version()
if server_version not in self.SUPPORTED_VERSIONS:
raise AnsibleError("Unsupported Galaxy server API version: %s" % server_version)
- self.baseurl = '%s/api/%s' % (self._api_server, server_version)
+ self.baseurl = _urljoin(self.api_server, "api", server_version)
self.version = server_version # for future use
display.vvvv("Base API: %s" % self.baseurl)
self.initialized = True
@@ -67,40 +54,52 @@ def g_connect(method):
return wrapped
+def _urljoin(*args):
+ return '/'.join(to_native(a, errors='surrogate_or_strict').rstrip('/') for a in args + ('',))
+
+
class GalaxyAPI(object):
''' This class is meant to be used as a API client for an Ansible Galaxy server '''
SUPPORTED_VERSIONS = ['v1']
- def __init__(self, galaxy):
+ def __init__(self, galaxy, name, url, username=None, password=None, token=None):
self.galaxy = galaxy
- self.token = None
- self._api_server = C.GALAXY_SERVER
- self._validate_certs = not context.CLIARGS['ignore_certs']
+ self.name = name
+ self.username = username
+ self.password = password
+ self.token = token
+ self.api_server = url
+ self.validate_certs = not context.CLIARGS['ignore_certs']
self.baseurl = None
self.version = None
self.initialized = False
- display.debug('Validate TLS certificates: %s' % self._validate_certs)
-
- # set the API server
- if context.CLIARGS['api_server'] != C.GALAXY_SERVER:
- self._api_server = context.CLIARGS['api_server']
-
- @requires_token
- def __auth_header(self):
- token = self.token.get()
- if token is None:
- raise AnsibleError("No access token. You must first use login to authenticate and obtain an access token.")
- return {'Authorization': 'Token ' + token}
+ display.debug('Validate TLS certificates for %s: %s' % (self.api_server, self.validate_certs))
+
+ def _auth_header(self, required=True):
+ token = self.token.get() if self.token else None
+
+ if token:
+ return {'Authorization': "Token %s" % token}
+ elif self.username:
+ token = "%s:%s" % (to_text(self.username, errors='surrogate_or_strict'),
+ to_text(self.password, errors='surrogate_or_strict', nonstring='passthru') or '')
+ b64_val = base64.b64encode(to_bytes(token, encoding='utf-8', errors='surrogate_or_strict'))
+ return {'Authorization': "Basic %s" % to_text(b64_val)}
+ elif required:
+ raise AnsibleError("No access token or username set. A token can be set with --api-key, with "
+ "'ansible-galaxy login', or set in ansible.cfg.")
+ else:
+ return {}
@g_connect
def __call_galaxy(self, url, args=None, headers=None, method=None):
if args and not headers:
- headers = self.__auth_header()
+ headers = self._auth_header()
try:
display.vvv(url)
- resp = open_url(url, data=args, validate_certs=self._validate_certs, headers=headers, method=method,
+ resp = open_url(url, data=args, validate_certs=self.validate_certs, headers=headers, method=method,
timeout=20)
data = json.loads(to_text(resp.read(), errors='surrogate_or_strict'))
except HTTPError as e:
@@ -108,22 +107,14 @@ class GalaxyAPI(object):
raise AnsibleError(res['detail'])
return data
- @property
- def api_server(self):
- return self._api_server
-
- @property
- def validate_certs(self):
- return self._validate_certs
-
def _get_server_api_version(self):
"""
Fetches the Galaxy API current version to ensure
the API server is up and reachable.
"""
- url = '%s/api/' % self._api_server
+ url = _urljoin(self.api_server, "api")
try:
- return_data = open_url(url, validate_certs=self._validate_certs)
+ return_data = open_url(url, validate_certs=self.validate_certs)
except Exception as e:
raise AnsibleError("Failed to get data from the API server (%s): %s " % (url, to_native(e)))
@@ -142,9 +133,9 @@ class GalaxyAPI(object):
"""
Retrieve an authentication token
"""
- url = '%s/tokens/' % self.baseurl
+ url = _urljoin(self.baseurl, "tokens")
args = urlencode({"github_token": github_token})
- resp = open_url(url, data=args, validate_certs=self._validate_certs, method="POST")
+ resp = open_url(url, data=args, validate_certs=self.validate_certs, method="POST")
data = json.loads(to_text(resp.read(), errors='surrogate_or_strict'))
return data
@@ -153,7 +144,7 @@ class GalaxyAPI(object):
"""
Post an import request
"""
- url = '%s/imports/' % self.baseurl
+ url = _urljoin(self.baseurl, "imports")
args = {
"github_user": github_user,
"github_repo": github_repo,
@@ -173,7 +164,7 @@ class GalaxyAPI(object):
"""
Check the status of an import task.
"""
- url = '%s/imports/' % self.baseurl
+ url = _urljoin(self.baseurl, "imports")
if task_id is not None:
url = "%s?id=%d" % (url, task_id)
elif github_user is not None and github_repo is not None:
@@ -200,7 +191,7 @@ class GalaxyAPI(object):
except Exception:
raise AnsibleError("Invalid role name (%s). Specify role as format: username.rolename" % role_name)
- url = '%s/roles/?owner__username=%s&name=%s' % (self.baseurl, user_name, role_name)
+ url = _urljoin(self.baseurl, "roles", "?owner__username=%s&name=%s" % (user_name, role_name))[:-1]
data = self.__call_galaxy(url)
if len(data["results"]) != 0:
return data["results"][0]
@@ -215,12 +206,12 @@ class GalaxyAPI(object):
results = []
try:
- url = '%s/roles/%s/%s/?page_size=50' % (self.baseurl, role_id, related)
+ url = _urljoin(self.baseurl, "roles", role_id, related, "?page_size=50")[:-1]
data = self.__call_galaxy(url)
results = data['results']
done = (data.get('next_link', None) is None)
while not done:
- url = '%s%s' % (self._api_server, data['next_link'])
+ url = _urljoin(self.api_server, data['next_link'])
data = self.__call_galaxy(url)
results += data['results']
done = (data.get('next_link', None) is None)
@@ -234,7 +225,7 @@ class GalaxyAPI(object):
Fetch the list of items specified.
"""
try:
- url = '%s/%s/?page_size' % (self.baseurl, what)
+ url = _urljoin(self.baseurl, what, "?page_size")[:-1]
data = self.__call_galaxy(url)
if "results" in data:
results = data['results']
@@ -244,7 +235,7 @@ class GalaxyAPI(object):
if "next" in data:
done = (data.get('next_link', None) is None)
while not done:
- url = '%s%s' % (self._api_server, data['next_link'])
+ url = _urljoin(self.api_server, data['next_link'])
data = self.__call_galaxy(url)
results += data['results']
done = (data.get('next_link', None) is None)
@@ -255,7 +246,7 @@ class GalaxyAPI(object):
@g_connect
def search_roles(self, search, **kwargs):
- search_url = self.baseurl + '/search/roles/?'
+ search_url = _urljoin(self.baseurl, "search", "roles", "?")[:-1]
if search:
search_url += '&autocomplete=' + to_text(urlquote(to_bytes(search)))
@@ -284,7 +275,7 @@ class GalaxyAPI(object):
@g_connect
def add_secret(self, source, github_user, github_repo, secret):
- url = "%s/notification_secrets/" % self.baseurl
+ url = _urljoin(self.baseurl, "notification_secrets")
args = urlencode({
"source": source,
"github_user": github_user,
@@ -296,18 +287,18 @@ class GalaxyAPI(object):
@g_connect
def list_secrets(self):
- url = "%s/notification_secrets" % self.baseurl
- data = self.__call_galaxy(url, headers=self.__auth_header())
+ url = _urljoin(self.baseurl, "notification_secrets")
+ data = self.__call_galaxy(url, headers=self._auth_header())
return data
@g_connect
def remove_secret(self, secret_id):
- url = "%s/notification_secrets/%s/" % (self.baseurl, secret_id)
- data = self.__call_galaxy(url, headers=self.__auth_header(), method='DELETE')
+ url = _urljoin(self.baseurl, "notification_secrets", secret_id)
+ data = self.__call_galaxy(url, headers=self._auth_header(), method='DELETE')
return data
@g_connect
def delete_role(self, github_user, github_repo):
- url = "%s/removerole/?github_user=%s&github_repo=%s" % (self.baseurl, github_user, github_repo)
- data = self.__call_galaxy(url, headers=self.__auth_header(), method='DELETE')
+ url = _urljoin(self.baseurl, "removerole", "?github_user=%s&github_repo=%s" % (github_user, github_repo))[:-1]
+ data = self.__call_galaxy(url, headers=self._auth_header(), method='DELETE')
return data
diff --git a/lib/ansible/galaxy/collection.py b/lib/ansible/galaxy/collection.py
index e2a7de53f2..7c19ee82a1 100644
--- a/lib/ansible/galaxy/collection.py
+++ b/lib/ansible/galaxy/collection.py
@@ -24,6 +24,7 @@ from yaml.error import YAMLError
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.galaxy import get_collections_galaxy_meta_info
+from ansible.galaxy.api import _urljoin
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils import six
from ansible.utils.collection_loader import is_collection_ref
@@ -44,8 +45,8 @@ class CollectionRequirement:
_FILE_MAPPING = [(b'MANIFEST.json', 'manifest_file'), (b'FILES.json', 'files_file')]
- def __init__(self, namespace, name, b_path, source, versions, requirement, force, parent=None, validate_certs=True,
- metadata=None, files=None, skip=False):
+ def __init__(self, namespace, name, b_path, api, versions, requirement, force, parent=None, metadata=None,
+ files=None, skip=False):
"""
Represents a collection requirement, the versions that are available to be installed as well as any
dependencies the collection has.
@@ -53,12 +54,11 @@ class CollectionRequirement:
:param namespace: The collection namespace.
:param name: The collection name.
:param b_path: Byte str of the path to the collection tarball if it has already been downloaded.
- :param source: The Galaxy server URL to download if the collection is from Galaxy.
+ :param api: The GalaxyAPI to use if the collection is from Galaxy.
:param versions: A list of versions of the collection that are available.
:param requirement: The version requirement string used to verify the list of versions fit the requirements.
:param force: Whether the force flag applied to the collection.
:param parent: The name of the parent the collection is a dependency of.
- :param validate_certs: Whether to validate the Galaxy server certificate.
:param metadata: The collection metadata dict if it has already been retrieved.
:param files: The files that exist inside the collection. This is based on the FILES.json file inside the
collection artifact.
@@ -68,12 +68,11 @@ class CollectionRequirement:
self.namespace = namespace
self.name = name
self.b_path = b_path
- self.source = source
+ self.api = api
self.versions = set(versions)
self.force = force
self.skip = skip
self.required_by = []
- self._validate_certs = validate_certs
self._metadata = metadata
self._files = files
@@ -120,7 +119,7 @@ class CollectionRequirement:
msg = "Cannot meet dependency requirement '%s:%s' for collection %s" \
% (to_text(self), requirement, parent)
- collection_source = to_text(self.b_path, nonstring='passthru') or self.source
+ collection_source = to_text(self.b_path, nonstring='passthru') or self.api.api_server
req_by = "\n".join(
"\t%s - '%s:%s'" % (to_text(p) if p else 'base', to_text(self), r)
for p, r in self.required_by
@@ -147,7 +146,9 @@ class CollectionRequirement:
if self.b_path is None:
download_url = self._galaxy_info['download_url']
artifact_hash = self._galaxy_info['artifact']['sha256']
- self.b_path = _download_file(download_url, b_temp_path, artifact_hash, self._validate_certs)
+ headers = self.api._auth_header(required=False)
+ self.b_path = _download_file(download_url, b_temp_path, artifact_hash, self.api.validate_certs,
+ headers=headers)
if os.path.exists(b_collection_path):
shutil.rmtree(b_collection_path)
@@ -180,9 +181,10 @@ class CollectionRequirement:
if self._metadata:
return
- n_collection_url = _urljoin(self.source, 'api', 'v2', 'collections', self.namespace, self.name, 'versions',
- self.latest_version)
- details = json.load(open_url(n_collection_url, validate_certs=self._validate_certs))
+ n_collection_url = _urljoin(self.api.api_server, 'api', 'v2', 'collections', self.namespace, self.name,
+ 'versions', self.latest_version)
+ details = json.load(open_url(n_collection_url, validate_certs=self.api.validate_certs,
+ headers=self.api._auth_header(required=False)))
self._galaxy_info = details
self._metadata = details['metadata']
@@ -225,7 +227,7 @@ class CollectionRequirement:
return False
@staticmethod
- def from_tar(b_path, validate_certs, force, parent=None):
+ def from_tar(b_path, force, parent=None):
if not tarfile.is_tarfile(b_path):
raise AnsibleError("Collection artifact at '%s' is not a valid tar file." % to_native(b_path))
@@ -254,10 +256,10 @@ class CollectionRequirement:
version = meta['version']
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
- validate_certs=validate_certs, metadata=meta, files=files)
+ metadata=meta, files=files)
@staticmethod
- def from_path(b_path, validate_certs, force, parent=None):
+ def from_path(b_path, force, parent=None):
info = {}
for b_file_name, property_name in CollectionRequirement._FILE_MAPPING:
b_file_path = os.path.join(b_path, b_file_name)
@@ -292,16 +294,17 @@ class CollectionRequirement:
files = info.get('files_file', {}).get('files', {})
return CollectionRequirement(namespace, name, b_path, None, [version], version, force, parent=parent,
- validate_certs=validate_certs, metadata=meta, files=files, skip=True)
+ metadata=meta, files=files, skip=True)
@staticmethod
- def from_name(collection, servers, requirement, validate_certs, force, parent=None):
+ def from_name(collection, apis, requirement, force, parent=None):
namespace, name = collection.split('.', 1)
galaxy_info = None
galaxy_meta = None
- for server in servers:
- collection_url_paths = [server, 'api', 'v2', 'collections', namespace, name, 'versions']
+ for api in apis:
+ collection_url_paths = [api.api_server, 'api', 'v2', 'collections', namespace, name, 'versions']
+ headers = api._auth_header(required=False)
is_single = False
if not (requirement == '*' or requirement.startswith('<') or requirement.startswith('>') or
@@ -314,7 +317,7 @@ class CollectionRequirement:
n_collection_url = _urljoin(*collection_url_paths)
try:
- resp = json.load(open_url(n_collection_url, validate_certs=validate_certs))
+ resp = json.load(open_url(n_collection_url, validate_certs=api.validate_certs, headers=headers))
except urllib_error.HTTPError as err:
if err.code == 404:
continue
@@ -333,14 +336,14 @@ class CollectionRequirement:
if resp['next'] is None:
break
resp = json.load(open_url(to_native(resp['next'], errors='surrogate_or_strict'),
- validate_certs=validate_certs))
+ validate_certs=api.validate_certs, headers=headers))
break
else:
raise AnsibleError("Failed to find collection %s:%s" % (collection, requirement))
- req = CollectionRequirement(namespace, name, None, server, versions, requirement, force, parent=parent,
- validate_certs=validate_certs, metadata=galaxy_meta)
+ req = CollectionRequirement(namespace, name, None, api, versions, requirement, force, parent=parent,
+ metadata=galaxy_meta)
req._galaxy_info = galaxy_info
return req
@@ -380,14 +383,13 @@ def build_collection(collection_path, output_path, force):
_build_collection_tar(b_collection_path, b_collection_output, collection_manifest, file_manifest)
-def publish_collection(collection_path, server, key, ignore_certs, wait):
+def publish_collection(collection_path, api, wait):
"""
Publish an Ansible collection tarball into an Ansible Galaxy server.
:param collection_path: The path to the collection tarball to publish.
- :param server: A native string of the Ansible Galaxy server to publish to.
- :param key: The API key to use for authorization.
- :param ignore_certs: Whether to ignore certificate validation when interacting with the server.
+ :param api: A GalaxyAPI to publish the collection to.
+ :param wait: Whether to wait until the import process is complete.
"""
b_collection_path = to_bytes(collection_path, errors='surrogate_or_strict')
if not os.path.exists(b_collection_path):
@@ -396,21 +398,19 @@ def publish_collection(collection_path, server, key, ignore_certs, wait):
raise AnsibleError("The collection path specified '%s' is not a tarball, use 'ansible-galaxy collection "
"build' to create a proper release artifact." % to_native(collection_path))
- display.display("Publishing collection artifact '%s' to %s" % (collection_path, server))
+ display.display("Publishing collection artifact '%s' to %s %s" % (collection_path, api.name, api.api_server))
- n_url = _urljoin(server, 'api', 'v2', 'collections')
+ n_url = _urljoin(api.api_server, 'api', 'v2', 'collections')
data, content_type = _get_mime_data(b_collection_path)
headers = {
'Content-type': content_type,
'Content-length': len(data),
}
- if key:
- headers['Authorization'] = "Token %s" % key
- validate_certs = not ignore_certs
+ headers.update(api._auth_header())
try:
- resp = json.load(open_url(n_url, data=data, headers=headers, method='POST', validate_certs=validate_certs))
+ resp = json.load(open_url(n_url, data=data, headers=headers, method='POST', validate_certs=api.validate_certs))
except urllib_error.HTTPError as err:
try:
err_info = json.load(err)
@@ -423,24 +423,24 @@ def publish_collection(collection_path, server, key, ignore_certs, wait):
raise AnsibleError("Error when publishing collection (HTTP Code: %d, Message: %s Code: %s)"
% (err.code, message, code))
- display.vvv("Collection has been pushed to the Galaxy server %s" % server)
+ display.vvv("Collection has been pushed to the Galaxy server %s %s" % (api.name, api.api_server))
import_uri = resp['task']
if wait:
- _wait_import(import_uri, key, validate_certs)
+ _wait_import(import_uri, api)
display.display("Collection has been successfully published to the Galaxy server")
else:
display.display("Collection has been pushed to the Galaxy server, not waiting until import has completed "
"due to --no-wait being set. Import task results can be found at %s" % import_uri)
-def install_collections(collections, output_path, servers, validate_certs, ignore_errors, no_deps, force, force_deps):
+def install_collections(collections, output_path, apis, validate_certs, ignore_errors, no_deps, force, force_deps):
"""
Install Ansible collections to the path specified.
:param collections: The collections to install, should be a list of tuples with (name, requirement, Galaxy server).
:param output_path: The path to install the collections to.
- :param servers: A list of Galaxy servers to query when searching for a collection.
- :param validate_certs: Whether to validate the Galaxy server certificates.
+ :param apis: A list of GalaxyAPIs to query when searching for a collection.
+ :param validate_certs: Whether to validate the certificates if downloading a tarball.
:param ignore_errors: Whether to ignore any errors when installing the collection.
:param no_deps: Ignore any collection dependencies and only install the base requirements.
:param force: Re-install a collection if it has already been installed.
@@ -449,7 +449,7 @@ def install_collections(collections, output_path, servers, validate_certs, ignor
existing_collections = _find_existing_collections(output_path)
with _tempdir() as b_temp_path:
- dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs,
+ dependency_map = _build_dependency_map(collections, existing_collections, b_temp_path, apis, validate_certs,
force, force_deps, no_deps)
for collection in dependency_map.values():
@@ -463,56 +463,6 @@ def install_collections(collections, output_path, servers, validate_certs, ignor
raise
-def parse_collections_requirements_file(requirements_file):
- """
- Parses an Ansible requirement.yml file and returns all the collections defined in it. This value ca be used with
- install_collection(). The requirements file is in the form:
-
- ---
- collections:
- - namespace.collection
- - name: namespace.collection
- version: version identifier, multiple identifiers are separated by ','
- source: the URL or prededefined source name in ~/.ansible_galaxy to pull the collection from
-
- :param requirements_file: The path to the requirements file.
- :return: A list of tuples (name, version, source).
- """
- collection_info = []
-
- b_requirements_file = to_bytes(requirements_file, errors='surrogate_or_strict')
- if not os.path.exists(b_requirements_file):
- raise AnsibleError("The requirements file '%s' does not exist." % to_native(requirements_file))
-
- display.vvv("Reading collection requirement file at '%s'" % requirements_file)
- with open(b_requirements_file, 'rb') as req_obj:
- try:
- requirements = yaml.safe_load(req_obj)
- except YAMLError as err:
- raise AnsibleError("Failed to parse the collection requirements yml at '%s' with the following error:\n%s"
- % (to_native(requirements_file), to_native(err)))
-
- if not isinstance(requirements, dict) or 'collections' not in requirements:
- # TODO: Link to documentation page that documents the requirements.yml format for collections.
- raise AnsibleError("Expecting collections requirements file to be a dict with the key "
- "collections that contains a list of collections to install.")
-
- for collection_req in requirements['collections']:
- if isinstance(collection_req, dict):
- req_name = collection_req.get('name', None)
- if req_name is None:
- raise AnsibleError("Collections requirement entry should contain the key name.")
-
- req_version = collection_req.get('version', '*')
- req_source = collection_req.get('source', None)
-
- collection_info.append((req_name, req_version, req_source))
- else:
- collection_info.append((collection_req, '*', None))
-
- return collection_info
-
-
def validate_collection_name(name):
"""
Validates the collection name as an input from the user or a requirements file fit the requirements.
@@ -779,17 +729,15 @@ def _get_mime_data(b_collection_path):
return b"\r\n".join(form), content_type
-def _wait_import(task_url, key, validate_certs):
- headers = {}
- if key:
- headers['Authorization'] = "Token %s" % key
+def _wait_import(task_url, api):
+ headers = api._auth_header()
display.vvv('Waiting until galaxy import task %s has completed' % task_url)
wait = 2
while True:
resp = json.load(open_url(to_native(task_url, errors='surrogate_or_strict'), headers=headers, method='GET',
- validate_certs=validate_certs))
+ validate_certs=api.validate_certs))
if resp.get('finished_at', None):
break
@@ -830,7 +778,7 @@ def _find_existing_collections(path):
for b_collection in os.listdir(b_namespace_path):
b_collection_path = os.path.join(b_namespace_path, b_collection)
if os.path.isdir(b_collection_path):
- req = CollectionRequirement.from_path(b_collection_path, True, False)
+ req = CollectionRequirement.from_path(b_collection_path, False)
display.vvv("Found installed collection %s:%s at '%s'" % (to_text(req), req.latest_version,
to_text(b_collection_path)))
collections.append(req)
@@ -838,13 +786,13 @@ def _find_existing_collections(path):
return collections
-def _build_dependency_map(collections, existing_collections, b_temp_path, servers, validate_certs, force, force_deps,
+def _build_dependency_map(collections, existing_collections, b_temp_path, apis, validate_certs, force, force_deps,
no_deps):
dependency_map = {}
# First build the dependency map on the actual requirements
for name, version, source in collections:
- _get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, servers,
+ _get_collection_info(dependency_map, existing_collections, name, version, source, b_temp_path, apis,
validate_certs, (force or force_deps))
checked_parents = set([to_text(c) for c in dependency_map.values() if c.skip])
@@ -860,7 +808,7 @@ def _build_dependency_map(collections, existing_collections, b_temp_path, server
deps_exhausted = False
for dep_name, dep_requirement in parent_info.dependencies.items():
_get_collection_info(dependency_map, existing_collections, dep_name, dep_requirement,
- parent_info.source, b_temp_path, servers, validate_certs, force_deps,
+ parent_info.api, b_temp_path, apis, validate_certs, force_deps,
parent=parent)
checked_parents.add(parent)
@@ -880,7 +828,7 @@ def _build_dependency_map(collections, existing_collections, b_temp_path, server
return dependency_map
-def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, server_list,
+def _get_collection_info(dep_map, existing_collections, collection, requirement, source, b_temp_path, apis,
validate_certs, force, parent=None):
dep_msg = ""
if parent:
@@ -896,7 +844,7 @@ def _get_collection_info(dep_map, existing_collections, collection, requirement,
b_tar_path = _download_file(collection, b_temp_path, None, validate_certs)
if b_tar_path:
- req = CollectionRequirement.from_tar(b_tar_path, validate_certs, force, parent=parent)
+ req = CollectionRequirement.from_tar(b_tar_path, force, parent=parent)
collection_name = to_text(req)
if collection_name in dep_map:
@@ -912,9 +860,8 @@ def _get_collection_info(dep_map, existing_collections, collection, requirement,
collection_info = dep_map[collection]
collection_info.add_requirement(parent, requirement)
else:
- servers = [source] if source else server_list
- collection_info = CollectionRequirement.from_name(collection, servers, requirement, validate_certs, force,
- parent=parent)
+ apis = [source] if source else apis
+ collection_info = CollectionRequirement.from_name(collection, apis, requirement, force, parent=parent)
existing = [c for c in existing_collections if to_text(c) == to_text(collection_info)]
if existing and not collection_info.force:
@@ -925,11 +872,7 @@ def _get_collection_info(dep_map, existing_collections, collection, requirement,
dep_map[to_text(collection_info)] = collection_info
-def _urljoin(*args):
- return '/'.join(to_native(a, errors='surrogate_or_strict').rstrip('/') for a in args + ('',))
-
-
-def _download_file(url, b_path, expected_hash, validate_certs):
+def _download_file(url, b_path, expected_hash, validate_certs, headers=None):
bufsize = 65536
digest = sha256()
@@ -939,7 +882,9 @@ def _download_file(url, b_path, expected_hash, validate_certs):
b_file_path = tempfile.NamedTemporaryFile(dir=b_path, prefix=b_file_name, suffix=b_file_ext, delete=False).name
display.vvv("Downloading %s to %s" % (url, to_text(b_path)))
- resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs)
+ # Galaxy redirs downloads to S3 which reject the request if an Authorization header is attached so don't redir that
+ resp = open_url(to_native(url, errors='surrogate_or_strict'), validate_certs=validate_certs, headers=headers,
+ unredirected_headers=['Authorization'])
with open(b_file_path, 'wb') as download_file:
data = resp.read(bufsize)
diff --git a/lib/ansible/galaxy/role.py b/lib/ansible/galaxy/role.py
index 2956655dd7..5cf188eef9 100644
--- a/lib/ansible/galaxy/role.py
+++ b/lib/ansible/galaxy/role.py
@@ -31,6 +31,7 @@ import yaml
from distutils.version import LooseVersion
from shutil import rmtree
+import ansible.constants as C
from ansible import context
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_native, to_text
@@ -204,7 +205,7 @@ class GalaxyRole(object):
role_data = self.src
tmp_file = self.fetch(role_data)
else:
- api = GalaxyAPI(self.galaxy)
+ api = GalaxyAPI(self.galaxy, 'role_default', C.GALAXY_SERVER)
role_data = api.lookup_role_by_name(self.src)
if not role_data:
raise AnsibleError("- sorry, %s was not found on %s." % (self.src, api.api_server))
diff --git a/lib/ansible/galaxy/token.py b/lib/ansible/galaxy/token.py
index 6c235ed652..bd9a72a8c9 100644
--- a/lib/ansible/galaxy/token.py
+++ b/lib/ansible/galaxy/token.py
@@ -33,33 +33,49 @@ from ansible.utils.display import Display
display = Display()
+class NoTokenSentinel(object):
+ """ Represents an ansible.cfg server with not token defined (will ignore cmdline and GALAXY_TOKEN_PATH. """
+ def __new__(cls, *args, **kwargs):
+ return cls
+
+
class GalaxyToken(object):
''' Class to storing and retrieving local galaxy token '''
- def __init__(self):
- self.b_file = to_bytes(C.GALAXY_TOKEN_PATH)
- self.config = yaml.safe_load(self.__open_config_for_read())
- if not self.config:
- self.config = {}
+ def __init__(self, token=None):
+ self.b_file = to_bytes(C.GALAXY_TOKEN_PATH, errors='surrogate_or_strict')
+ # Done so the config file is only opened when set/get/save is called
+ self._config = None
+ self._token = token
+
+ @property
+ def config(self):
+ if not self._config:
+ self._config = self._read()
- def __open_config_for_read(self):
+ # Prioritise the token passed into the constructor
+ if self._token:
+ self._config['token'] = None if self._token is NoTokenSentinel else self._token
- f = None
+ return self._config
+
+ def _read(self):
action = 'Opened'
if not os.path.isfile(self.b_file):
# token file not found, create and chomd u+rw
- f = open(self.b_file, 'w')
- f.close()
+ open(self.b_file, 'w').close()
os.chmod(self.b_file, S_IRUSR | S_IWUSR) # owner has +rw
action = 'Created'
- f = open(self.b_file, 'r')
+ with open(self.b_file, 'r') as f:
+ config = yaml.safe_load(f)
+
display.vvv('%s %s' % (action, to_text(self.b_file)))
- return f
+ return config or {}
def set(self, token):
- self.config['token'] = token
+ self._token = token
self.save()
def get(self):