summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/ansible/cli/__init__.py9
-rw-r--r--lib/ansible/cli/doc.py9
-rw-r--r--lib/ansible/cli/playbook.py9
-rw-r--r--lib/ansible/collections/list.py4
-rw-r--r--lib/ansible/config/ansible_builtin_runtime.yml (renamed from lib/ansible/config/routing.yml)54
-rw-r--r--lib/ansible/config/base.yml18
-rw-r--r--lib/ansible/errors/__init__.py15
-rw-r--r--lib/ansible/executor/module_common.py87
-rw-r--r--lib/ansible/executor/powershell/module_manifest.py2
-rw-r--r--lib/ansible/executor/task_executor.py4
-rw-r--r--lib/ansible/playbook/collectionsearch.py4
-rw-r--r--lib/ansible/playbook/helpers.py1
-rw-r--r--lib/ansible/playbook/role/__init__.py4
-rw-r--r--lib/ansible/playbook/role/definition.py5
-rw-r--r--lib/ansible/playbook/task.py6
-rw-r--r--lib/ansible/plugins/inventory/__init__.py5
-rw-r--r--lib/ansible/plugins/loader.py341
-rw-r--r--lib/ansible/plugins/strategy/linear.py34
-rw-r--r--lib/ansible/template/__init__.py89
-rw-r--r--lib/ansible/utils/collection_loader.py603
-rw-r--r--lib/ansible/utils/collection_loader/__init__.py23
-rw-r--r--lib/ansible/utils/collection_loader/_collection_config.py101
-rw-r--r--lib/ansible/utils/collection_loader/_collection_finder.py953
-rw-r--r--lib/ansible/utils/collection_loader/_collection_meta.py17
24 files changed, 1645 insertions, 752 deletions
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 3adf5eb5a4..59b19c636f 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -26,7 +26,8 @@ from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
from ansible.plugins.loader import add_all_plugin_dirs
from ansible.release import __version__
-from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path, set_collection_playbook_paths
+from ansible.utils.collection_loader import AnsibleCollectionConfig
+from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
from ansible.utils.display import Display
from ansible.utils.path import unfrackpath
from ansible.utils.unsafe_proxy import to_unsafe_text
@@ -455,11 +456,11 @@ class CLI(with_metaclass(ABCMeta, object)):
if basedir:
loader.set_basedir(basedir)
add_all_plugin_dirs(basedir)
- set_collection_playbook_paths(basedir)
- default_collection = get_collection_name_from_path(basedir)
+ AnsibleCollectionConfig.playbook_paths = basedir
+ default_collection = _get_collection_name_from_path(basedir)
if default_collection:
display.warning(u'running with default collection {0}'.format(default_collection))
- AnsibleCollectionLoader().set_default_collection(default_collection)
+ AnsibleCollectionConfig.default_collection = default_collection
vault_ids = list(options['vault_ids'])
default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index 8e96376ab6..bfb67e66ff 100644
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -28,7 +28,8 @@ from ansible.parsing.metadata import extract_metadata
from ansible.parsing.plugin_docs import read_docstub
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.loader import action_loader, fragment_loader
-from ansible.utils.collection_loader import set_collection_playbook_paths, get_collection_name_from_path
+from ansible.utils.collection_loader import AnsibleCollectionConfig
+from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
from ansible.utils.display import Display
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_versioned_doclink
@@ -44,11 +45,11 @@ def jdump(text):
def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
- # TODO: take into account routing.yml once implemented
+ # TODO: take into account runtime.yml once implemented
b_colldirs = list_collection_dirs(coll_filter=coll_filter)
for b_path in b_colldirs:
path = to_text(b_path, errors='surrogate_or_strict')
- collname = get_collection_name_from_path(b_path)
+ collname = _get_collection_name_from_path(b_path)
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collection=collname))
@@ -127,7 +128,7 @@ class DocCLI(CLI):
# add to plugin paths from command line
basedir = context.CLIARGS['basedir']
if basedir:
- set_collection_playbook_paths(basedir)
+ AnsibleCollectionConfig.playbook_paths = basedir
loader.add_directory(basedir, with_subdir=True)
if context.CLIARGS['module_path']:
for path in context.CLIARGS['module_path']:
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index 300d913349..3429a28396 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -16,7 +16,8 @@ from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.module_utils._text import to_bytes
from ansible.playbook.block import Block
from ansible.utils.display import Display
-from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path, set_collection_playbook_paths
+from ansible.utils.collection_loader import AnsibleCollectionConfig
+from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
from ansible.plugins.loader import add_all_plugin_dirs
@@ -90,13 +91,13 @@ class PlaybookCLI(CLI):
b_playbook_dirs.append(b_playbook_dir)
- set_collection_playbook_paths(b_playbook_dirs)
+ AnsibleCollectionConfig.playbook_paths = b_playbook_dirs
- playbook_collection = get_collection_name_from_path(b_playbook_dirs[0])
+ playbook_collection = _get_collection_name_from_path(b_playbook_dirs[0])
if playbook_collection:
display.warning("running playbook inside collection {0}".format(playbook_collection))
- AnsibleCollectionLoader().set_default_collection(playbook_collection)
+ AnsibleCollectionConfig.default_collection = playbook_collection
# don't deal with privilege escalation or passwords when we don't need to
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
diff --git a/lib/ansible/collections/list.py b/lib/ansible/collections/list.py
index cd207dafdf..8e8bc66490 100644
--- a/lib/ansible/collections/list.py
+++ b/lib/ansible/collections/list.py
@@ -10,7 +10,7 @@ from collections import defaultdict
from ansible.collections import is_collection_path
from ansible.module_utils._text import to_bytes
-from ansible.utils.collection_loader import AnsibleCollectionLoader
+from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.display import Display
display = Display()
@@ -27,7 +27,7 @@ def list_valid_collection_paths(search_paths=None, warn=False):
if search_paths is None:
search_paths = []
- search_paths.extend(AnsibleCollectionLoader().n_collection_paths)
+ search_paths.extend(AnsibleCollectionConfig.collection_paths)
for path in search_paths:
diff --git a/lib/ansible/config/routing.yml b/lib/ansible/config/ansible_builtin_runtime.yml
index cf8f2e49d1..8064babf80 100644
--- a/lib/ansible/config/routing.yml
+++ b/lib/ansible/config/ansible_builtin_runtime.yml
@@ -1,8 +1,10 @@
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-# This file is only for 2.9 backwards compatiblity, expect it to go away in future ansible versions.
plugin_routing:
connection:
+ # test entry
+ redirected_local:
+ redirect: ansible.builtin.local
buildah:
redirect: containers.podman.buildah
podman:
@@ -48,6 +50,12 @@ plugin_routing:
persistent:
redirect: ansible.netcommon.persistent
modules:
+ # test entry
+ formerly_core_ping:
+ redirect: testns.testcoll.ping
+ # test entry
+ uses_redirected_action:
+ redirect: ansible.builtin.ping
podman_container_info:
redirect: containers.podman.podman_container_info
podman_image_info:
@@ -4100,8 +4108,8 @@ plugin_routing:
redirect: ansible.posix.sysctl
async_status:
redirect: ansible.windows.async_status
- setup:
- redirect: ansible.windows.setup
+ setup.ps1:
+ redirect: ansible.windows.setup.ps1
slurp:
redirect: ansible.windows.slurp
win_acl:
@@ -7553,6 +7561,10 @@ plugin_routing:
cpm_user:
redirect: wti.remote.cpm_user
module_utils:
+ formerly_core:
+ redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
+ sub1.sub2.formerly_core:
+ redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
common:
redirect: f5networks.f5_modules.common
frr:
@@ -8052,6 +8064,9 @@ plugin_routing:
vyos:
redirect: vyos.vyos.vyos
action:
+ # test entry, overloaded with module of same name to use a different base action (ie not "normal.py")
+ uses_redirected_action:
+ redirect: testns.testcoll.subclassed_norm
aireos:
redirect: community.general.aireos
aruba:
@@ -8493,6 +8508,11 @@ plugin_routing:
vyos:
redirect: vyos.vyos.vyos
filter:
+ # test entries
+ formerly_core_filter:
+ redirect: ansible.builtin.bool
+ formerly_core_masked_filter:
+ redirect: ansible.builtin.bool
gcp_kms_encrypt:
redirect: google.cloud.gcp_kms_encrypt
gcp_kms_decrypt:
@@ -8575,6 +8595,9 @@ plugin_routing:
qradar:
redirect: ibm.qradar.qradar
inventory:
+ # test entry
+ formerly_core_inventory:
+ redirect: testns.content_adj.statichost
cloudscale:
redirect: community.general.cloudscale
docker_machine:
@@ -8622,6 +8645,9 @@ plugin_routing:
azure_rm:
redirect: azure.azcollection.azure_rm
lookup:
+ # test entry
+ formerly_core_lookup:
+ redirect: testns.testcoll.mylookup
avi:
redirect: community.general.avi
cartesian:
@@ -8714,7 +8740,29 @@ plugin_routing:
junos:
redirect: junipernetworks.junos.junos
shell:
+ # test entry
+ formerly_core_powershell:
+ redirect: ansible.builtin.powershell
csh:
redirect: ansible.posix.csh
fish:
redirect: ansible.posix.fish
+ test:
+ # test entries
+ formerly_core_test:
+ redirect: ansible.builtin.search
+ formerly_core_masked_test:
+ redirect: ansible.builtin.search
+import_redirection:
+ # test entry
+ ansible.module_utils.formerly_core:
+ redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
+ ansible.module_utils.known_hosts:
+ redirect: ansible_collections.community.general.plugins.module_utils.known_hosts
+ # ansible.builtin synthetic collection redirection hackery
+ ansible_collections.ansible.builtin.plugins.modules:
+ redirect: ansible.modules
+ ansible_collections.ansible.builtin.plugins.module_utils:
+ redirect: ansible.module_utils
+ ansible_collections.ansible.builtin.plugins:
+ redirect: ansible.plugins
diff --git a/lib/ansible/config/base.yml b/lib/ansible/config/base.yml
index 9c500b7c7d..7029c65189 100644
--- a/lib/ansible/config/base.yml
+++ b/lib/ansible/config/base.yml
@@ -215,6 +215,14 @@ CACHE_PLUGIN_TIMEOUT:
- {key: fact_caching_timeout, section: defaults}
type: integer
yaml: {key: facts.cache.timeout}
+COLLECTIONS_SCAN_SYS_PATH:
+ name: enable/disable scanning sys.path for installed collections
+ default: true
+ type: boolean
+ env:
+ - {name: ANSIBLE_COLLECTIONS_SCAN_SYS_PATH}
+ ini:
+ - {key: collections_scan_sys_path, section: defaults}
COLLECTIONS_PATHS:
name: ordered list of root paths for loading installed Ansible collections content
description: Colon separated paths in which Ansible will search for collections content.
@@ -224,6 +232,16 @@ COLLECTIONS_PATHS:
- {name: ANSIBLE_COLLECTIONS_PATHS}
ini:
- {key: collections_paths, section: defaults}
+COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH:
+ name: Defines behavior when loading a collection that does not support the current Ansible version
+ description:
+ - When a collection is loaded that does not support the running Ansible version (via the collection metadata key
+ `requires_ansible`), the default behavior is to issue a warning and continue anyway. Setting this value to `ignore`
+ skips the warning entirely, while setting it to `fatal` will immediately halt Ansible execution.
+ env: [{name: ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH}]
+ ini: [{key: collections_on_ansible_version_mismatch, section: defaults}]
+ choices: [error, warning, ignore]
+ default: warning
COLOR_CHANGED:
name: Color for 'changed' task status
default: yellow
diff --git a/lib/ansible/errors/__init__.py b/lib/ansible/errors/__init__.py
index 243ce9955a..93871b9e9a 100644
--- a/lib/ansible/errors/__init__.py
+++ b/lib/ansible/errors/__init__.py
@@ -276,6 +276,21 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
+class AnsiblePluginRemoved(AnsibleRuntimeError):
+ ''' a requested plugin has been removed '''
+ pass
+
+
+class AnsiblePluginCircularRedirect(AnsibleRuntimeError):
+ '''a cycle was detected in plugin redirection'''
+ pass
+
+
+class AnsibleCollectionUnsupportedVersionError(AnsibleRuntimeError):
+ '''a collection is not supported by this version of Ansible'''
+ pass
+
+
# These Exceptions are temporary, using them as flow control until we can get a better solution.
# DO NOT USE as they will probably be removed soon.
# We will port the action modules in our tree to use a context manager instead.
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index a29502e533..fea097cf90 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -36,9 +36,10 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
from ansible.executor.powershell import module_manifest as ps_manifest
-from ansible.module_utils._text import to_bytes, to_text, to_native
-from ansible.module_utils.compat.importlib import import_module
+from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
from ansible.plugins.loader import module_utils_loader
+from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
+
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
@@ -601,7 +602,8 @@ class ModuleInfo:
path = None
if imp is None:
- self._info = info = importlib.machinery.PathFinder.find_spec(name, paths)
+ # don't pretend this is a top-level module, prefix the rest of the namespace
+ self._info = info = importlib.machinery.PathFinder.find_spec('ansible.module_utils.' + name, paths)
if info is not None:
self.py_src = os.path.splitext(info.origin)[1] in importlib.machinery.SOURCE_SUFFIXES
self.pkg_dir = info.origin.endswith('/__init__.py')
@@ -632,33 +634,61 @@ class ModuleInfo:
class CollectionModuleInfo(ModuleInfo):
- def __init__(self, name, paths):
+ def __init__(self, name, pkg):
self._mod_name = name
self.py_src = True
- # FIXME: Implement pkg_dir so that we can place __init__.py files
self.pkg_dir = False
- for path in paths:
- self._package_name = '.'.join(path.split('/'))
- try:
- self.get_source()
- except FileNotFoundError:
- pass
- else:
- self.path = os.path.join(path, self._mod_name) + '.py'
- break
- else:
- # FIXME (nitz): implement package fallback code
+ split_name = pkg.split('.')
+ split_name.append(name)
+ if len(split_name) < 5 or split_name[0] != 'ansible_collections' or split_name[3] != 'plugins' or split_name[4] != 'module_utils':
+ raise ValueError('must search for something beneath a collection module_utils, not {0}.{1}'.format(to_native(pkg), to_native(name)))
+
+ # NB: we can't use pkgutil.get_data safely here, since we don't want to import/execute package/module code on
+ # the controller while analyzing/assembling the module, so we'll have to manually import the collection's
+ # Python package to locate it (import root collection, reassemble resource path beneath, fetch source)
+
+ # FIXME: handle MU redirection logic here
+
+ collection_pkg_name = '.'.join(split_name[0:3])
+ resource_base_path = os.path.join(*split_name[3:])
+ # look for package_dir first, then module
+
+ self._src = pkgutil.get_data(collection_pkg_name, to_native(os.path.join(resource_base_path, '__init__.py')))
+
+ if self._src is not None: # empty string is OK
+ return
+
+ self._src = pkgutil.get_data(collection_pkg_name, to_native(resource_base_path + '.py'))
+
+ if not self._src:
raise ImportError('unable to load collection-hosted module_util'
- ' {0}.{1}'.format(to_native(self._package_name),
- to_native(name)))
+ ' {0}.{1}'.format(to_native(pkg), to_native(name)))
def get_source(self):
- # FIXME (nitz): need this in py2 for some reason TBD, but we shouldn't (get_data delegates
- # to wrong loader without it)
- pkg = import_module(self._package_name)
- data = pkgutil.get_data(to_native(self._package_name), to_native(self._mod_name + '.py'))
- return data
+ return self._src
+
+
+class InternalRedirectModuleInfo(ModuleInfo):
+ def __init__(self, name, full_name):
+ self.pkg_dir = None
+ self._original_name = full_name
+ self.path = full_name.replace('.', '/') + '.py'
+ collection_meta = _get_collection_metadata('ansible.builtin')
+ redirect = collection_meta.get('plugin_routing', {}).get('module_utils', {}).get(name, {}).get('redirect', None)
+ if not redirect:
+ raise ImportError('no redirect found for {0}'.format(name))
+ self._redirect = redirect
+ self.py_src = True
+ self._shim_src = """
+import sys
+import {1} as mod
+
+sys.modules['{0}'] = mod
+""".format(self._original_name, self._redirect)
+
+ def get_source(self):
+ return self._shim_src
def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, zf):
@@ -721,8 +751,7 @@ def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, z
break
try:
# this is a collection-hosted MU; look it up with pkgutil.get_data()
- module_info = CollectionModuleInfo(py_module_name[-idx],
- [os.path.join(*py_module_name[:-idx])])
+ module_info = CollectionModuleInfo(py_module_name[-idx], '.'.join(py_module_name[:-idx]))
break
except ImportError:
continue
@@ -740,7 +769,13 @@ def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, z
[os.path.join(p, *relative_module_utils_dir[:-idx]) for p in module_utils_paths])
break
except ImportError:
- continue
+ # check metadata for redirect, generate stub if present
+ try:
+ module_info = InternalRedirectModuleInfo(py_module_name[-idx],
+ '.'.join(py_module_name[:(None if idx == 1 else -1)]))
+ break
+ except ImportError:
+ continue
else:
# If we get here, it's because of a bug in ModuleDepFinder. If we get a reproducer we
# should then fix ModuleDepFinder
diff --git a/lib/ansible/executor/powershell/module_manifest.py b/lib/ansible/executor/powershell/module_manifest.py
index 321ff83455..83a1c3a739 100644
--- a/lib/ansible/executor/powershell/module_manifest.py
+++ b/lib/ansible/executor/powershell/module_manifest.py
@@ -142,7 +142,7 @@ class PSModuleDepFinder(object):
if name in self.exec_scripts.keys():
return
- data = pkgutil.get_data("ansible.executor.powershell", name + ".ps1")
+ data = pkgutil.get_data("ansible.executor.powershell", to_native(name + ".ps1"))
if data is None:
raise AnsibleError("Could not find executor powershell script "
"for '%s'" % name)
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index 3a5c98dd58..f4a5fd834f 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -27,7 +27,7 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
from ansible.template import Templar
-from ansible.utils.collection_loader import AnsibleCollectionLoader
+from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
from ansible.vars.clean import namespace_facts, clean_facts
@@ -1101,7 +1101,7 @@ def start_connection(play_context, variables, task_uuid):
# can.
'ANSIBLE_BECOME_PLUGINS': become_loader.print_paths(),
'ANSIBLE_CLICONF_PLUGINS': cliconf_loader.print_paths(),
- 'ANSIBLE_COLLECTIONS_PATHS': os.pathsep.join(AnsibleCollectionLoader().n_collection_paths),
+ 'ANSIBLE_COLLECTIONS_PATHS': to_native(os.pathsep.join(AnsibleCollectionConfig.collection_paths)),
'ANSIBLE_CONNECTION_PLUGINS': connection_loader.print_paths(),
'ANSIBLE_HTTPAPI_PLUGINS': httpapi_loader.print_paths(),
'ANSIBLE_NETCONF_PLUGINS': netconf_loader.print_paths(),
diff --git a/lib/ansible/playbook/collectionsearch.py b/lib/ansible/playbook/collectionsearch.py
index d80b6a1c6a..fb69519b40 100644
--- a/lib/ansible/playbook/collectionsearch.py
+++ b/lib/ansible/playbook/collectionsearch.py
@@ -6,7 +6,7 @@ __metaclass__ = type
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
-from ansible.utils.collection_loader import AnsibleCollectionLoader
+from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.template import is_template, Environment
from ansible.utils.display import Display
@@ -14,7 +14,7 @@ display = Display()
def _ensure_default_collection(collection_list=None):
- default_collection = AnsibleCollectionLoader().default_collection
+ default_collection = AnsibleCollectionConfig.default_collection
# Will be None when used as the default
if collection_list is None:
diff --git a/lib/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py
index 4eff569943..2a8c5aecbe 100644
--- a/lib/ansible/playbook/helpers.py
+++ b/lib/ansible/playbook/helpers.py
@@ -25,7 +25,6 @@ from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, Ansible
from ansible.module_utils._text import to_native
from ansible.module_utils.six import string_types
from ansible.parsing.mod_args import ModuleArgsParser
-from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
display = Display()
diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py
index 4b43b3eb64..b7456afcf7 100644
--- a/lib/ansible/playbook/role/__init__.py
+++ b/lib/ansible/playbook/role/__init__.py
@@ -32,7 +32,7 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins.loader import add_all_plugin_dirs
-from ansible.utils.collection_loader import AnsibleCollectionLoader
+from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.vars import combine_vars
@@ -234,7 +234,7 @@ class Role(Base, Conditional, Taggable, CollectionSearch):
if self._role_collection: # this is a collection-hosted role
self.collections.insert(0, self._role_collection)
else: # this is a legacy role, but set the default collection if there is one
- default_collection = AnsibleCollectionLoader().default_collection
+ default_collection = AnsibleCollectionConfig.default_collection
if default_collection:
self.collections.insert(0, default_collection)
# legacy role, ensure all plugin dirs under the role are added to plugin search path
diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py
index b859f63a95..20d69ebd6b 100644
--- a/lib/ansible/playbook/role/definition.py
+++ b/lib/ansible/playbook/role/definition.py
@@ -31,7 +31,8 @@ from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
from ansible.template import Templar
-from ansible.utils.collection_loader import get_collection_role_path, AnsibleCollectionRef
+from ansible.utils.collection_loader import AnsibleCollectionRef
+from ansible.utils.collection_loader._collection_finder import _get_collection_role_path
from ansible.utils.path import unfrackpath
from ansible.utils.display import Display
@@ -155,7 +156,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
# try to load as a collection-based role first
if self._collection_list or AnsibleCollectionRef.is_valid_fqcr(role_name):
- role_tuple = get_collection_role_path(role_name, self._collection_list)
+ role_tuple = _get_collection_role_path(role_name, self._collection_list)
if role_tuple:
# we found it, stash collection data and return the name/path tuple
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index 972707f5cf..e9e5d876ef 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -36,7 +36,7 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
-from ansible.utils.collection_loader import AnsibleCollectionLoader
+from ansible.utils.collection_loader import AnsibleCollectionConfig
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
@@ -182,7 +182,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
- default_collection = AnsibleCollectionLoader().default_collection
+ default_collection = AnsibleCollectionConfig.default_collection
collections_list = ds.get('collections')
if collections_list is None:
@@ -293,7 +293,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
if self._parent:
self._parent.post_validate(templar)
- if AnsibleCollectionLoader().default_collection:
+ if AnsibleCollectionConfig.default_collection:
pass
super(Task, self).post_validate(templar)
diff --git a/lib/ansible/plugins/inventory/__init__.py b/lib/ansible/plugins/inventory/__init__.py
index fc6046c628..c0ecb4c1cd 100644
--- a/lib/ansible/plugins/inventory/__init__.py
+++ b/lib/ansible/plugins/inventory/__init__.py
@@ -216,10 +216,13 @@ class BaseInventoryPlugin(AnsiblePlugin):
except Exception as e:
raise AnsibleParserError(to_native(e))
+ # a plugin can be loaded via many different names with redirection- if so, we want to accept any of those names
+ valid_names = getattr(self, '_redirected_names') or [self.NAME]
+
if not config:
# no data
raise AnsibleParserError("%s is empty" % (to_native(path)))
- elif config.get('plugin') != self.NAME:
+ elif config.get('plugin') not in valid_names:
# this is not my config file
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
elif not isinstance(config, Mapping):
diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py
index fb61fd2084..e52eebc49a 100644
--- a/lib/ansible/plugins/loader.py
+++ b/lib/ansible/plugins/loader.py
@@ -16,25 +16,45 @@ import warnings
from collections import defaultdict
from ansible import constants as C
-from ansible.errors import AnsibleError
+from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemoved, AnsibleCollectionUnsupportedVersionError
from ansible.module_utils._text import to_bytes, to_text, to_native
from ansible.module_utils.compat.importlib import import_module
from ansible.module_utils.six import string_types
from ansible.parsing.utils.yaml import from_yaml
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
-from ansible.utils.collection_loader import AnsibleCollectionLoader, AnsibleFlatMapLoader, AnsibleCollectionRef
+from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
+from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder, _get_collection_metadata
from ansible.utils.display import Display
from ansible.utils.plugin_docs import add_fragments
+from ansible import __version__ as ansible_version
+
+# TODO: take the packaging dep, or vendor SpecifierSet?
+
+try:
+ from packaging.specifiers import SpecifierSet
+ from packaging.version import Version
+except ImportError:
+ SpecifierSet = None
+ Version = None
+
+try:
+ # use C version if possible for speedup
+ from yaml import CSafeLoader as SafeLoader
+except ImportError:
+ from yaml import SafeLoader
try:
import importlib.util
imp = None
except ImportError:
import imp
+ ModuleNotFoundError = None
display = Display()
+_tombstones = None
+
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
@@ -93,6 +113,61 @@ def add_dirs_to_loader(which_loader, paths):
loader.add_directory(path, with_subdir=True)
+class PluginLoadContext(object):
+ def __init__(self):
+ self.original_name = None
+ self.redirect_list = []
+ self.error_list = []
+ self.import_error_list = []
+ self.load_attempts = []
+ self.pending_redirect = None
+ self.exit_reason = None
+ self.plugin_resolved_path = None
+ self.plugin_resolved_name = None
+ self.deprecated = False
+ self.removal_date = None
+ self.deprecation_warnings = []
+ self.resolved = False
+
+ def record_deprecation(self, name, deprecation):
+ if not deprecation:
+ return self
+
+ warning_text = deprecation.get('warning_text', None)
+ removal_date = deprecation.get('removal_date', None)
+ if not warning_text:
+ if removal_date:
+ warning_text = '{0} has been deprecated and will be removed in a release after {1}'.format(name, removal_date)
+ else:
+ warning_text = '{0} has been deprecated and will be removed in a future release'.format(name)
+
+ self.deprecated = True
+ if removal_date:
+ self.removal_date = removal_date
+ self.deprecation_warnings.append(warning_text)
+ return self
+
+ def resolve(self, resolved_name, resolved_path, exit_reason):
+ self.pending_redirect = None
+ self.plugin_resolved_name = resolved_name
+ self.plugin_resolved_path = resolved_path
+ self.exit_reason = exit_reason
+ self.resolved = True
+ return self
+
+ def redirect(self, redirect_name):
+ self.pending_redirect = redirect_name
+ self.exit_reason = 'pending redirect resolution from {0} to {1}'.format(self.original_name, redirect_name)
+ self.resolved = False
+ return self
+
+ def nope(self, exit_reason):
+ self.pending_redirect = None
+ self.exit_reason = exit_reason
+ self.resolved = False
+ return self
+
+
class PluginLoader:
'''
PluginLoader loads plugins from the configured plugin directories.
@@ -136,6 +211,9 @@ class PluginLoader:
self._searched_paths = set()
+ def __repr__(self):
+ return 'PluginLoader(type={0})'.format(AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir))
+
def _clear_caches(self):
if C.OLD_PLUGIN_CACHE_CLEARING:
@@ -307,15 +385,80 @@ class PluginLoader:
self._clear_caches()
display.debug('Added %s to loader search path' % (directory))
- def _find_fq_plugin(self, fq_name, extension):
+ def _query_collection_routing_meta(self, acr, plugin_type, extension=None):
+ collection_pkg = import_module(acr.n_python_collection_package_name)
+ if not collection_pkg:
+ return None
+
+ # FIXME: shouldn't need this...
+ try:
+ # force any type-specific metadata postprocessing to occur
+ import_module(acr.n_python_collection_package_name + '.plugins.{0}'.format(plugin_type))
+ except ImportError:
+ pass
+
+ # this will be created by the collection PEP302 loader
+ collection_meta = getattr(collection_pkg, '_collection_meta', None)
+
+ if not collection_meta:
+ return None
+
+ # TODO: add subdirs support
+ # check for extension-specific entry first (eg 'setup.ps1')
+ # TODO: str/bytes on extension/name munging
+ if acr.subdirs:
+ subdir_qualified_resource = '.'.join([acr.subdirs, acr.resource])
+ else:
+ subdir_qualified_resource = acr.resource
+ entry = collection_meta.get('plugin_routing', {}).get(plugin_type, {}).get(subdir_qualified_resource + extension, None)
+ if not entry:
+ # try for extension-agnostic entry
+ entry = collection_meta.get('plugin_routing', {}).get(plugin_type, {}).get(subdir_qualified_resource, None)
+ return entry
+
+ def _find_fq_plugin(self, fq_name, extension, plugin_load_context):
"""Search builtin paths to find a plugin. No external paths are searched,
meaning plugins inside roles inside collections will be ignored.
"""
+ plugin_load_context.resolved = False
+
plugin_type = AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir)
acr = AnsibleCollectionRef.from_fqcr(fq_name, plugin_type)
+ # check collection metadata to see if any special handling is required for this plugin
+ routing_metadata = self._query_collection_routing_meta(acr, plugin_type, extension=extension)
+
+ # TODO: factor this into a wrapper method
+ if routing_metadata:
+ deprecation = routing_metadata.get('deprecation', None)
+
+ # this will no-op if there's no deprecation metadata for this plugin
+ plugin_load_context.record_deprecation(fq_name, deprecation)
+
+ tombstone = routing_metadata.get('tombstone', None)
+
+ if tombstone:
+ redirect = tombstone.get('redirect', None)
+ removal_date = tombstone.get('removal_date')
+ if removal_date:
+ removed_msg = '{0} was removed on {1}'.format(fq_name, removal_date)
+ else:
+ removed_msg = '{0} was removed in a previous release'.format(fq_name)
+ plugin_load_context.removal_date = removal_date
+ plugin_load_context.resolved = True
+ plugin_load_context.exit_reason = removed_msg
+ return plugin_load_context
+
+ redirect = routing_metadata.get('redirect', None)
+
+ if redirect:
+ # FIXME: remove once this is covered in debug or whatever
+ display.vv("redirecting (type: {0}) {1} to {2}".format(plugin_type, fq_name, redirect))
+ return plugin_load_context.redirect(redirect)
+ # TODO: non-FQCN case, do we support `.` prefix for current collection, assume it with no dots, require it for subdirs in current, or ?
+
n_resource = to_native(acr.resource, errors='strict')
# we want this before the extension is added
full_name = '{0}.{1}'.format(acr.n_python_package_name, n_resource)
@@ -326,17 +469,10 @@ class PluginLoader:
pkg = sys.modules.get(acr.n_python_package_name)
if not pkg:
# FIXME: there must be cheaper/safer way to do this
- pkg = import_module(acr.n_python_package_name)
-
- # if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be
- # anywhere in the tree
- if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__, AnsibleFlatMapLoader):
try:
- file_path = pkg.__loader__.find_file(n_resource)
- return full_name, to_text(file_path)
- except IOError:
- # this loader already takes care of extensionless files, so if we didn't find it, just bail
- return None, None
+ pkg = import_module(acr.n_python_package_name)
+ except (ImportError, ModuleNotFoundError):
+ return plugin_load_context.nope('Python package {0} not found'.format(acr.n_python_package_name))
pkg_path = os.path.dirname(pkg.__file__)
@@ -344,7 +480,11 @@ class PluginLoader:
# FIXME: and is file or file link or ...
if os.path.exists(n_resource_path):
- return full_name, to_text(n_resource_path)
+ return plugin_load_context.resolve(full_name, to_text(n_resource_path), 'found exact match for {0} in {1}'.format(full_name, acr.collection))
+
+ if extension:
+ # the request was extension-specific, don't try for an extensionless match
+ return plugin_load_context.nope('no match for {0} in {1}'.format(to_text(n_resource), acr.collection))
# look for any matching extension in the package location (sans filter)
found_files = [f
@@ -352,24 +492,63 @@ class PluginLoader:
if os.path.isfile(f) and not f.endswith(C.MODULE_IGNORE_EXTS)]
if not found_files:
- return None, None
+ return plugin_load_context.nope('failed fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
if len(found_files) > 1:
# TODO: warn?
pass
- return full_name, to_text(found_files[0])
+ return plugin_load_context.resolve(full_name, to_text(found_files[0]), 'found fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
''' Find a plugin named name '''
- return self.find_plugin_with_name(name, mod_type, ignore_deprecated, check_aliases, collection_list)[1]
+ result = self.find_plugin_with_context(name, mod_type, ignore_deprecated, check_aliases, collection_list)
+ if result.resolved and result.plugin_resolved_path:
+ return result.plugin_resolved_path
- def find_plugin_with_name(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
- ''' Find a plugin named name '''
+ return None
+
+ def find_plugin_with_context(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
+ ''' Find a plugin named name, returning contextual info about the load, recursively resolving redirection '''
+ plugin_load_context = PluginLoadContext()
+ plugin_load_context.original_name = name
+ while True:
+ result = self._resolve_plugin_step(name, mod_type, ignore_deprecated, check_aliases, collection_list, plugin_load_context=plugin_load_context)
+ if result.pending_redirect:
+ if result.pending_redirect in result.redirect_list:
+ raise AnsiblePluginCircularRedirect('plugin redirect loop resolving {0} (path: {1})'.format(result.original_name, result.redirect_list))
+ name = result.pending_redirect
+ result.pending_redirect = None
+ plugin_load_context = result
+ else:
+ break
+
+ # TODO: smuggle these to the controller when we're in a worker, reduce noise from normal things like missing plugin packages during collection search
+ if plugin_load_context.error_list:
+ display.warning("errors were encountered during the plugin load for {0}:\n{1}".format(name, plugin_load_context.error_list))
+
+ # TODO: display/return import_error_list? Only useful for forensics...
+
+ if plugin_load_context.deprecated and C.config.get_config_value('DEPRECATION_WARNINGS'):
+ for dw in plugin_load_context.deprecation_warnings:
+ # TODO: need to smuggle these to the controller if we're in a worker context
+ display.warning('[DEPRECATION WARNING] ' + dw)
+
+ return plugin_load_context
+
+ # FIXME: name bikeshed
+ def _resolve_plugin_step(self, name, mod_type='', ignore_deprecated=False,
+ check_aliases=False, collection_list=None, plugin_load_context=PluginLoadContext()):
+ if not plugin_load_context:
+ raise ValueError('A PluginLoadContext is required')
+
+ plugin_load_context.redirect_list.append(name)
+ plugin_load_context.resolved = False
global _PLUGIN_FILTERS
if name in _PLUGIN_FILTERS[self.package]:
- return None, None
+ plugin_load_context.exit_reason = '{0} matched a defined plugin filter'.format(name)
+ return plugin_load_context
if mod_type:
suffix = mod_type
@@ -387,37 +566,46 @@ class PluginLoader:
candidates = [name]
else:
candidates = ['{0}.{1}'.format(c, name) for c in collection_list]
- # TODO: keep actual errors, not just assembled messages
- errors = []
+
for candidate_name in candidates:
try:
+ plugin_load_context.load_attempts.append(candidate_name)
# HACK: refactor this properly
if candidate_name.startswith('ansible.legacy'):
# 'ansible.legacy' refers to the plugin finding behavior used before collections existed.
# They need to search 'library' and the various '*_plugins' directories in order to find the file.
- full_name = name
- p = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1), ignore_deprecated, check_aliases, suffix)
+ plugin_load_context = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1),
+ plugin_load_context, ignore_deprecated, check_aliases, suffix)
else:
# 'ansible.builtin' should be handled here. This means only internal, or builtin, paths are searched.
- full_name, p = self._find_fq_plugin(candidate_name, suffix)
- if p:
- return full_name, p
+ plugin_load_context = self._find_fq_plugin(candidate_name, suffix, plugin_load_context=plugin_load_context)
+ if plugin_load_context.resolved or plugin_load_context.pending_redirect: # if we got an answer or need to chase down a redirect, return
+ return plugin_load_context
+ except (AnsiblePluginRemoved, AnsiblePluginCircularRedirect, AnsibleCollectionUnsupportedVersionError):
+ # these are generally fatal, let them fly
+ raise
+ except ImportError as ie:
+ plugin_load_context.import_error_list.append(ie)
except Exception as ex:
- errors.append(to_native(ex))
+ # FIXME: keep actual errors, not just assembled messages
+ plugin_load_context.error_list.append(to_native(ex))
+
+ if plugin_load_context.error_list:
+ display.debug(msg='plugin lookup for {0} failed; errors: {1}'.format(name, '; '.join(plugin_load_context.error_list)))
- if errors:
- display.debug(msg='plugin lookup for {0} failed; errors: {1}'.format(name, '; '.join(errors)))
+ plugin_load_context.exit_reason = 'no matches found for {0}'.format(name)
- return None, None
+ return plugin_load_context
# if we got here, there's no collection list and it's not an FQ name, so do legacy lookup
- return name, self._find_plugin_legacy(name, ignore_deprecated, check_aliases, suffix)
+ return self._find_plugin_legacy(name, plugin_load_context, ignore_deprecated, check_aliases, suffix)
- def _find_plugin_legacy(self, name, ignore_deprecated=False, check_aliases=False, suffix=None):
+ def _find_plugin_legacy(self, name, plugin_load_context, ignore_deprecated=False, check_aliases=False, suffix=None):
"""Search library and various *_plugins paths in order to find the file.
This was behavior prior to the existence of collections.
"""
+ plugin_load_context.resolved = False
if check_aliases:
name = self.aliases.get(name, name)
@@ -426,7 +614,10 @@ class PluginLoader:
# requested mod_type
pull_cache = self._plugin_path_cache[suffix]
try:
- return pull_cache[name]
+ plugin_load_context.plugin_resolved_path = pull_cache[name]
+ plugin_load_context.plugin_resolved_name = name
+ plugin_load_context.resolved = True
+ return plugin_load_context
except KeyError:
# Cache miss. Now let's find the plugin
pass
@@ -438,6 +629,7 @@ class PluginLoader:
# We can use _get_paths() since add_directory() forces a cache refresh.
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
display.debug('trying %s' % path)
+ plugin_load_context.load_attempts.append(path)
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
@@ -474,7 +666,10 @@ class PluginLoader:
self._searched_paths.add(path)
try:
- return pull_cache[name]
+ plugin_load_context.plugin_resolved_path = pull_cache[name]
+ plugin_load_context.plugin_resolved_name = name
+ plugin_load_context.resolved = True
+ return plugin_load_context
except KeyError:
# Didn't find the plugin in this directory. Load modules from the next one
pass
@@ -488,9 +683,17 @@ class PluginLoader:
# FIXME: this is not always the case, some are just aliases
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. ' # pylint: disable=ansible-deprecated-no-version
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
- return pull_cache[alias_name]
+ plugin_load_context.plugin_resolved_path = pull_cache[alias_name]
+ plugin_load_context.plugin_resolved_name = alias_name
+ plugin_load_context.resolved = True
+ return plugin_load_context
- return None
+ # last ditch, if it's something that can be redirected, look for a builtin redirect before giving up
+ candidate_fqcr = 'ansible.builtin.{0}'.format(name)
+ if '.' not in name and AnsibleCollectionRef.is_valid_fqcr(candidate_fqcr):
+ return self._find_fq_plugin(fq_name=candidate_fqcr, extension=suffix, plugin_load_context=plugin_load_context)
+
+ return plugin_load_context.nope('{0} is not eligible for last-chance resolution'.format(name))
def has_plugin(self, name, collection_list=None):
''' Checks if a plugin named name exists '''
@@ -530,11 +733,12 @@ class PluginLoader:
module = imp.load_source(to_native(full_name), to_native(path), module_file)
return module
- def _update_object(self, obj, name, path):
+ def _update_object(self, obj, name, path, redirected_names=None):
# set extra info on the module, in case we want it later
setattr(obj, '_original_path', path)
setattr(obj, '_load_name', name)
+ setattr(obj, '_redirected_names', redirected_names or [])
def get(self, name, *args, **kwargs):
''' instantiates a plugin of the given name using arguments '''
@@ -544,10 +748,15 @@ class PluginLoader:
collection_list = kwargs.pop('collection_list', None)
if name in self.aliases:
name = self.aliases[name]
- name, path = self.find_plugin_with_name(name, collection_list=collection_list)
- if path is None:
+ plugin_load_context = self.find_plugin_with_context(name, collection_list=collection_list)
+ if not plugin_load_context.resolved or not plugin_load_context.plugin_resolved_path:
+ # FIXME: this is probably an error (eg removed plugin)
return None
+ name = plugin_load_context.plugin_resolved_name
+ path = plugin_load_context.plugin_resolved_path
+ redirected_names = plugin_load_context.redirect_list or []
+
if path not in self._module_cache:
self._module_cache[path] = self._load_module_source(name, path)
self._load_config_defs(name, self._module_cache[path], path)
@@ -566,6 +775,7 @@ class PluginLoader:
if not issubclass(obj, plugin_class):
return None
+ # FIXME: update this to use the load context
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
if not class_only:
@@ -573,7 +783,7 @@ class PluginLoader:
# A plugin may need to use its _load_name in __init__ (for example, to set
# or get options from config), so update the object before using the constructor
instance = object.__new__(obj)
- self._update_object(instance, name, path)
+ self._update_object(instance, name, path, redirected_names)
obj.__init__(instance, *args, **kwargs)
obj = instance
except TypeError as e:
@@ -583,7 +793,7 @@ class PluginLoader:
return None
raise
- self._update_object(obj, name, path)
+ self._update_object(obj, name, path, redirected_names)
return obj
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
@@ -818,9 +1028,52 @@ def _load_plugin_filter():
return filters
+# since we don't want the actual collection loader understanding metadata, we'll do it in an event handler
+def _on_collection_load_handler(collection_name, collection_path):
+ display.vvvv(to_text('Loading collection {0} from {1}'.format(collection_name, collection_path)))
+
+ collection_meta = _get_collection_metadata(collection_name)
+
+ try:
+ if not _does_collection_support_ansible_version(collection_meta.get('requires_ansible', ''), ansible_version):
+ mismatch_behavior = C.config.get_config_value('COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH')
+ message = 'Collection {0} does not support Ansible version {1}'.format(collection_name, ansible_version)
+ if mismatch_behavior == 'warning':
+ display.warning(message)
+ elif mismatch_behavior == 'error':
+ raise AnsibleCollectionUnsupportedVersionError(message)
+ except AnsibleError:
+ raise
+ except Exception as ex:
+ display.warning('Error parsing collection metadata requires_ansible value from collection {0}: {1}'.format(collection_name, ex))
+
+
+def _does_collection_support_ansible_version(requirement_string, ansible_version):
+ if not requirement_string:
+ return True
+
+ if not SpecifierSet:
+ display.warning('packaging Python module unavailable; unable to validate collection Ansible version requirements')
+ return True
+
+ ss = SpecifierSet(requirement_string)
+
+ # ignore prerelease/postrelease/beta/dev flags for simplicity
+ base_ansible_version = Version(ansible_version).base_version
+
+ return ss.contains(base_ansible_version)
+
+
def _configure_collection_loader():
- if not any((isinstance(l, AnsibleCollectionLoader) for l in sys.meta_path)):
- sys.meta_path.insert(0, AnsibleCollectionLoader(C.config))
+ if AnsibleCollectionConfig.collection_finder:
+ display.warning('AnsibleCollectionFinder has already been configured')
+ return
+
+ finder = _AnsibleCollectionFinder(C.config.get_config_value('COLLECTIONS_PATHS'), C.config.get_config_value('COLLECTIONS_SCAN_SYS_PATH'))
+ finder._install()
+
+ # this should succeed now
+ AnsibleCollectionConfig.on_collection_load += _on_collection_load_handler
# TODO: All of the following is initialization code It should be moved inside of an initialization
diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py
index 6883c9c1cb..5a7b979714 100644
--- a/lib/ansible/plugins/strategy/linear.py
+++ b/lib/ansible/plugins/strategy/linear.py
@@ -240,10 +240,28 @@ class StrategyModule(StrategyBase):
run_once = False
work_to_do = True
+ # check to see if this task should be skipped, due to it being a member of a
+ # role which has already run (and whether that role allows duplicate execution)
+ if task._role and task._role.has_run(host):
+ # If there is no metadata, the default behavior is to not allow duplicates,
+ # if there is metadata, check to see if the allow_duplicates flag was set to true
+ if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
+ display.debug("'%s' skipped because role has already run" % task)
+ continue
+
+ display.debug("getting variables")
+ task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task,
+ _hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
+ self.add_tqm_variables(task_vars, play=iterator._play)
+ templar = Templar(loader=self._loader, variables=task_vars)
+ display.debug("done getting variables")
+
# test to see if the task across all hosts points to an action plugin which
# sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
# will only send this task to the first host in the list.
+ task.action = templar.template(task.action)
+
try:
action = action_loader.get(task.action, class_only=True)
except KeyError:
@@ -251,15 +269,6 @@ class StrategyModule(StrategyBase):
# corresponding action plugin
action = None
- # check to see if this task should be skipped, due to it being a member of a
- # role which has already run (and whether that role allows duplicate execution)
- if task._role and task._role.has_run(host):
- # If there is no metadata, the default behavior is to not allow duplicates,
- # if there is metadata, check to see if the allow_duplicates flag was set to true
- if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
- display.debug("'%s' skipped because role has already run" % task)
- continue
-
if task.action == 'meta':
# for the linear strategy, we run meta tasks just once and for
# all hosts currently being iterated over rather than one host
@@ -277,13 +286,6 @@ class StrategyModule(StrategyBase):
skip_rest = True
break
- display.debug("getting variables")
- task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task,
- _hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
- self.add_tqm_variables(task_vars, play=iterator._play)
- templar = Templar(loader=self._loader, variables=task_vars)
- display.debug("done getting variables")
-
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
if (task.any_errors_fatal or run_once) and not task.ignore_errors:
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 10fa6e128b..aa779e82f3 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -30,6 +30,7 @@ import time
from contextlib import contextmanager
from distutils.version import LooseVersion
from numbers import Number
+from traceback import format_exc
try:
from hashlib import sha1
@@ -53,6 +54,7 @@ from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.collection_loader import AnsibleCollectionRef
from ansible.utils.display import Display
+from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
from ansible.utils.unsafe_proxy import wrap_var
display = Display()
@@ -350,52 +352,75 @@ class JinjaPluginIntercept(MutableMapping):
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
# aren't supposed to change during a run
def __getitem__(self, key):
- if not isinstance(key, string_types):
- raise ValueError('key must be a string')
-
- key = to_native(key)
+ try:
+ if not isinstance(key, string_types):
+ raise ValueError('key must be a string')
- if '.' not in key: # might be a built-in value, delegate to base dict
- return self._delegatee.__getitem__(key)
+ key = to_native(key)
- func = self._collection_jinja_func_cache.get(key)
+ if '.' not in key: # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
+ func = self._delegatee.get(key)
- if func:
- return func
+ if func:
+ return func
- acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
+ ts = _get_collection_metadata('ansible.builtin')
- if not acr:
- raise KeyError('invalid plugin name: {0}'.format(key))
+ # TODO: implement support for collection-backed redirect (currently only builtin)
+ # TODO: implement cycle detection (unified across collection redir as well)
+ redirect_fqcr = ts.get('plugin_routing', {}).get(self._dirname, {}).get(key, {}).get('redirect', None)
+ if redirect_fqcr:
+ acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname)
+ display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource))
+ key = redirect_fqcr
+ # TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs)
- try:
- pkg = import_module(acr.n_python_package_name)
- except ImportError:
- raise KeyError()
+ func = self._collection_jinja_func_cache.get(key)
- parent_prefix = acr.collection
+ if func:
+ return func
- if acr.subdirs:
- parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
+ acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
- for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
- if ispkg:
- continue
+ if not acr:
+ raise KeyError('invalid plugin name: {0}'.format(key))
try:
- plugin_impl = self._pluginloader.get(module_name)
- except Exception as e:
- raise TemplateSyntaxError(to_native(e), 0)
+ pkg = import_module(acr.n_python_package_name)
+ except ImportError:
+ raise KeyError()
- method_map = getattr(plugin_impl, self._method_map_name)
+ parent_prefix = acr.collection
- for f in iteritems(method_map()):
- fq_name = '.'.join((parent_prefix, f[0]))
- # FIXME: detect/warn on intra-collection function name collisions
- self._collection_jinja_func_cache[fq_name] = f[1]
+ if acr.subdirs:
+ parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
+
+ # TODO: implement collection-level redirect
+
+ for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
+ if ispkg:
+ continue
- function_impl = self._collection_jinja_func_cache[key]
- return function_impl
+ try:
+ plugin_impl = self._pluginloader.get(module_name)
+ except Exception as e:
+ raise TemplateSyntaxError(to_native(e), 0)
+
+ method_map = getattr(plugin_impl, self._method_map_name)
+
+ for f in iteritems(method_map()):
+ fq_name = '.'.join((parent_prefix, f[0]))
+ # FIXME: detect/warn on intra-collection function name collisions
+ self._collection_jinja_func_cache[fq_name] = f[1]
+
+ function_impl = self._collection_jinja_func_cache[key]
+ return function_impl
+ except KeyError:
+ raise
+ except Exception as ex:
+ display.warning('an unexpected error occurred during Jinja2 environment setup: {0}'.format(to_native(ex)))
+ display.vvv('exception during Jinja2 environment setup: {0}'.format(format_exc()))
+ raise
def __setitem__(self, key, value):
return self._delegatee.__setitem__(key, value)
diff --git a/lib/ansible/utils/collection_loader.py b/lib/ansible/utils/collection_loader.py
deleted file mode 100644
index e38d4137f0..0000000000
--- a/lib/ansible/utils/collection_loader.py
+++ /dev/null
@@ -1,603 +0,0 @@
-# (c) 2019 Ansible Project
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import os.path
-import re
-import sys
-
-from types import ModuleType
-
-from ansible.module_utils._text import to_bytes, to_native, to_text
-from ansible.module_utils.compat.importlib import import_module
-from ansible.module_utils.six import iteritems, string_types, with_metaclass
-from ansible.utils.singleton import Singleton
-
-_SYNTHETIC_PACKAGES = {
- # these provide fallback package definitions when there are no on-disk paths
- 'ansible_collections': dict(type='pkg_only', allow_external_subpackages=True),
- 'ansible_collections.ansible': dict(type='pkg_only', allow_external_subpackages=True),
- # these implement the ansible.builtin synthetic collection mapped to the packages inside the ansible distribution
- 'ansible_collections.ansible.builtin': dict(type='pkg_only'),
- 'ansible_collections.ansible.builtin.plugins': dict(type='map', map='ansible.plugins'),
- 'ansible_collections.ansible.builtin.plugins.module_utils': dict(type='map', map='ansible.module_utils', graft=True),
- 'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True),
-}
-
-
-# FIXME: exception handling/error logging
-class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
- def __init__(self, config=None):
- if config:
- paths = config.get_config_value('COLLECTIONS_PATHS')
- else:
- paths = os.environ.get('ANSIBLE_COLLECTIONS_PATHS', '').split(os.pathsep)
-
- if isinstance(paths, string_types):
- paths = [paths]
- elif paths is None:
- paths = []
-
- # expand any placeholders in configured paths
- paths = [
- to_native(os.path.expanduser(p), errors='surrogate_or_strict')
- for p in paths
- ]
-
- # Append all ``ansible_collections`` dirs from sys.path to the end
- for path in sys.path:
- if (
- path not in paths and
- os.path.isdir(to_bytes(
- os.path.join(path, 'ansible_collections'),
- errors='surrogate_or_strict',
- ))
- ):
- paths.append(path)
-
- self._n_configured_paths = paths
-
- self._n_playbook_paths = []
- self._default_collection = None
- # pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
- for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES) if p[1].get('graft')):
- pkg_name = syn_pkg_def[0]
- pkg_def = syn_pkg_def[1]
-
- newmod = ModuleType(pkg_name)
- newmod.__package__ = pkg_name
- newmod.__file__ = '<ansible_synthetic_collection_package>'
- pkg_type = pkg_def.get('type')
-
- # TODO: need to rethink map style so we can just delegate all the loading
-
- if pkg_type == 'flatmap':
- newmod.__loader__ = AnsibleFlatMapLoader(import_module(pkg_def['flatmap']))
- newmod.__path__ = []
-
- sys.modules[pkg_name] = newmod
-
- @property
- def n_collection_paths(self):
- return self._n_playbook_paths + self._n_configured_paths
-
- def get_collection_path(self, collection_name):
- if not AnsibleCollectionRef.is_valid_collection_name(collection_name):
- raise ValueError('{0} is not a valid collection name'.format(to_native(collection_name)))
-
- m = import_module('ansible_collections.{0}'.format(collection_name))
-
- return m.__file__
-
- def set_playbook_paths(self, b_playbook_paths):
- if isinstance(b_playbook_paths, string_types):
- b_playbook_paths = [b_playbook_paths]
-
- # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
- added_paths = set()
-
- # de-dupe and ensure the paths are native strings (Python seems to do this for package paths etc, so assume it's safe)
- self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in b_playbook_paths if not (p in added_paths or added_paths.add(p))]
- # FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?
-
- # FIXME: is there a better place to store this?
- # FIXME: only allow setting this once
- def set_default_collection(self, collection_name):
- self._default_collection = collection_name
-
- @property
- def default_collection(self):
- return self._default_collection
-
- def find_module(self, fullname, path=None):
- if self._find_module(fullname, path, load=False)[0]:
- return self
-
- return None
-
- def load_module(self, fullname):
- mod = self._find_module(fullname, None, load=True)[1]
-
- if not mod:
- raise ImportError('module {0} not found'.format(fullname))
-
- return mod
-
- def _find_module(self, fullname, path, load):
- # this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
- if not fullname.startswith('ansible_collections.') and fullname != 'ansible_collections':
- return False, None
-
- if sys.modules.get(fullname):
- if not load:
- return True, None
-
- return True, sys.modules[fullname]
-
- newmod = None
-
- # this loader implements key functionality for Ansible collections
- # * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
- # * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
- # * preventing controller-side code injection during collection loading
- # * (default loader would execute arbitrary package code from all __init__.py's)
-
- parent_pkg_name = '.'.join(fullname.split('.')[:-1])
-
- parent_pkg = sys.modules.get(parent_pkg_name)
-
- if parent_pkg_name and not parent_pkg:
- raise ImportError('parent package {0} not found'.format(parent_pkg_name))
-
- # are we at or below the collection level? eg a.mynamespace.mycollection.something.else
- # if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
- # we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
- sub_collection = fullname.count('.') > 1
-
- synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
- synpkg_remainder = ''
-
- if not synpkg_def:
- # if the parent is a grafted package, we have some special work to do, otherwise just look for stuff on disk
- parent_synpkg_def = _SYNTHETIC_PACKAGES.get(parent_pkg_name)
- if parent_synpkg_def and parent_synpkg_def.get('graft'):
- synpkg_def = parent_synpkg_def
- synpkg_remainder = '.' + fullname.rpartition('.')[2]
-
- # FUTURE: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
- if synpkg_def:
- pkg_type = synpkg_def.get('type')
- if not pkg_type:
- raise KeyError('invalid synthetic package type (no package "type" specified)')
- if pkg_type == 'map':
- map_package = synpkg_def.get('map')
-
- if not map_package:
- raise KeyError('invalid synthetic map package definition (no target "map" defined)')
-
- if not load:
- return True, None
-
- mod = import_module(map_package + synpkg_remainder)
-
- sys.modules[fullname] = mod
-
- return True, mod
- elif pkg_type == 'flatmap':
- raise NotImplementedError()
- elif pkg_type == 'pkg_only':
- if not load:
- return True, None
-
- newmod = ModuleType(fullname)
- newmod.__package__ = fullname
- newmod.__file__ = '<ansible_synthetic_collection_package>'
- newmod.__loader__ = self
- newmod.__path__ = []
-
- if not synpkg_def.get('allow_external_subpackages'):
- # if external subpackages are NOT allowed, we're done
- sys.modules[fullname] = newmod
- return True, newmod
-
- # if external subpackages ARE allowed, check for on-disk implementations and return a normal
- # package if we find one, otherwise return the one we created here
-
- if not parent_pkg: # top-level package, look for NS subpackages on all collection paths
- package_paths = [self._extend_path_with_ns(p, fullname) for p in self.n_collection_paths]
- else: # subpackage; search in all subpaths (we'll limit later inside a collection)
- package_paths = [self._extend_path_with_ns(p, fullname) for p in parent_pkg.__path__]
-
- for candidate_child_path in package_paths:
- code_object = None
- is_package = True
- location = None
- # check for implicit sub-package first
- if os.path.isdir(to_bytes(candidate_child_path)):
- # Py3.x implicit namespace packages don't have a file location, so they don't support get_data
- # (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
- # a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
- location = os.path.join(candidate_child_path, '__synthetic__')
- else:
- for source_path in [os.path.join(candidate_child_path, '__init__.py'),
- candidate_child_path + '.py']:
- if not os.path.isfile(to_bytes(source_path)):
- continue
-
- if not load:
- return True, None
-
- with open(to_bytes(source_path), 'rb') as fd:
- source = fd.read()
-
- code_object = compile(source=source, filename=source_path, mode='exec', flags=0, dont_inherit=True)
- location = source_path
- is_package = source_path.endswith('__init__.py')
- break
-
- if not location:
- continue
-
- newmod = ModuleType(fullname)
- newmod.__file__ = location
- newmod.__loader__ = self
-
- if is_package:
- if sub_collection: # we never want to search multiple instances of the same collection; use first found
- newmod.__path__ = [candidate_child_path]
- else:
- newmod.__path__ = package_paths
-
- newmod.__package__ = fullname
- else:
- newmod.__package__ = parent_pkg_name
-
- sys.modules[fullname] = newmod
-
- if code_object:
- # FIXME: decide cases where we don't actually want to exec the code?
- exec(code_object, newmod.__dict__)
-
- return True, newmod
-
- # even if we didn't find one on disk, fall back to a synthetic package if we have one...
- if newmod:
- sys.modules[fullname] = newmod
- return True, newmod
-
- # FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin
-
- return False, None
-
- @staticmethod
- def _extend_path_with_ns(path, ns):
- ns_path_add = ns.rsplit('.', 1)[-1]
-
- return os.path.join(path, ns_path_add)
-
- def get_data(self, filename):
- with open(filename, 'rb') as fd:
- return fd.read()
-
-
-class AnsibleFlatMapLoader(object):
- _extension_blacklist = ['.pyc', '.pyo']
-
- def __init__(self, root_package):
- self._root_package = root_package
- self._dirtree = None
-
- def _init_dirtree(self):
- # FIXME: thread safety
- root_path = os.path.dirname(self._root_package.__file__)
- flat_files = []
- # FIXME: make this a dict of filename->dir for faster direct lookup?
- # FIXME: deal with _ prefixed deprecated files (or require another method for collections?)
- # FIXME: fix overloaded filenames (eg, rename Windows setup to win_setup)
- for root, dirs, files in os.walk(root_path):
- # add all files in this dir that don't have a blacklisted extension
- flat_files.extend(((root, f) for f in files if not any((f.endswith(ext) for ext in self._extension_blacklist))))
-
- # HACK: Put Windows modules at the end of the list. This makes collection_loader behave
- # the same way as plugin loader, preventing '.ps1' from modules being selected before '.py'
- # modules simply because '.ps1' files may be above '.py' files in the flat_files list.
- #
- # The expected sort order is paths in the order they were in 'flat_files'
- # with paths ending in '/windows' at the end, also in the original order they were
- # in 'flat_files'. The .sort() method is guaranteed to be stable, so original order is preserved.
- flat_files.sort(key=lambda p: p[0].endswith('/windows'))
- self._dirtree = flat_files
-
- def find_file(self, filename):
- # FIXME: thread safety
- if not self._dirtree:
- self._init_dirtree()
-
- if '.' not in filename: # no extension specified, use extension regex to filter
- extensionless_re = re.compile(r'^{0}(\..+)?$'.format(re.escape(filename)))
- # why doesn't Python have first()?
- try:
- # FIXME: store extensionless in a separate direct lookup?
- filepath = next(os.path.join(r, f) for r, f in self._dirtree if extensionless_re.match(f))
- except StopIteration:
- raise IOError("couldn't find {0}".format(filename))
- else: # actual filename, just look it up
- # FIXME: this case sucks; make it a lookup
- try:
- filepath = next(os.path.join(r, f) for r, f in self._dirtree if f == filename)
- except StopIteration:
- raise IOError("couldn't find {0}".format(filename))
-
- return filepath
-
- def get_data(self, filename):
- found_file = self.find_file(filename)
-
- with open(found_file, 'rb') as fd:
- return fd.read()
-
-
-# TODO: implement these for easier inline debugging?
-# def get_source(self, fullname):
-# def get_code(self, fullname):
-# def is_package(self, fullname):
-
-
-class AnsibleCollectionRef:
- # FUTURE: introspect plugin loaders to get these dynamically?
- VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
- 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
- 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
- 'terminal', 'test', 'vars'])
-
- # FIXME: tighten this up to match Python identifier reqs, etc
- VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
- VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
- VALID_FQCR_RE = re.compile(to_text(r'^\w+\.\w+\.\w+(\.\w+)*$')) # can have 0-N included subdirs as well
-
- def __init__(self, collection_name, subdirs, resource, ref_type):
- """
- Create an AnsibleCollectionRef from components
- :param collection_name: a collection name of the form 'namespace.collectionname'
- :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
- :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- """
- collection_name = to_text(collection_name, errors='strict')
- if subdirs is not None:
- subdirs = to_text(subdirs, errors='strict')
- resource = to_text(resource, errors='strict')
- ref_type = to_text(ref_type, errors='strict')
-
- if not self.is_valid_collection_name(collection_name):
- raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
-
- if ref_type not in self.VALID_REF_TYPES:
- raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
-
- self.collection = collection_name
- if subdirs:
- if not re.match(self.VALID_SUBDIRS_RE, subdirs):
- raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
- self.subdirs = subdirs
- else:
- self.subdirs = u''
-
- self.resource = resource
- self.ref_type = ref_type
-
- package_components = [u'ansible_collections', self.collection]
-
- if self.ref_type == u'role':
- package_components.append(u'roles')
- else:
- # we assume it's a plugin
- package_components += [u'plugins', self.ref_type]
-
- if self.subdirs:
- package_components.append(self.subdirs)
-
- if self.ref_type == u'role':
- # roles are their own resource
- package_components.append(self.resource)
-
- self.n_python_package_name = to_native('.'.join(package_components))
-
- @staticmethod
- def from_fqcr(ref, ref_type):
- """
- Parse a string as a fully-qualified collection reference, raises ValueError if invalid
- :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- :return: a populated AnsibleCollectionRef object
- """
- # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
- # we split the resource name off the right, split ns and coll off the left, and we're left with any optional
- # subdirs that need to be added back below the plugin-specific subdir we'll add. So:
- # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
- # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
- # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
- if not AnsibleCollectionRef.is_valid_fqcr(ref):
- raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
-
- ref = to_text(ref, errors='strict')
- ref_type = to_text(ref_type, errors='strict')
-
- resource_splitname = ref.rsplit(u'.', 1)
- package_remnant = resource_splitname[0]
- resource = resource_splitname[1]
-
- # split the left two components of the collection package name off, anything remaining is plugin-type
- # specific subdirs to be added back on below the plugin type
- package_splitname = package_remnant.split(u'.', 2)
- if len(package_splitname) == 3:
- subdirs = package_splitname[2]
- else:
- subdirs = u''
-
- collection_name = u'.'.join(package_splitname[0:2])
-
- return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
-
- @staticmethod
- def try_parse_fqcr(ref, ref_type):
- """
- Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
- :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
- :return: a populated AnsibleCollectionRef object on successful parsing, else None
- """
- try:
- return AnsibleCollectionRef.from_fqcr(ref, ref_type)
- except ValueError:
- pass
-
- @staticmethod
- def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
- """
- Utility method to convert from a PluginLoader dir name to a plugin ref_type
- :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
- :return: the corresponding plugin ref_type (eg, 'action', 'role')
- """
- legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
-
- plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
-
- if plugin_type == u'library':
- plugin_type = u'modules'
-
- if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
- raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
-
- return plugin_type
-
- @staticmethod
- def is_valid_fqcr(ref, ref_type=None):
- """
- Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
- :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
- :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
- :return: True if the collection ref passed is well-formed, False otherwise
- """
-
- ref = to_text(ref)
-
- if not ref_type:
- return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
-
- return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
-
- @staticmethod
- def is_valid_collection_name(collection_name):
- """
- Validates if the given string is a well-formed collection name (does not look up the collection itself)
- :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
- :return: True if the collection name passed is well-formed, False otherwise
- """
-
- collection_name = to_text(collection_name)
-
- return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
-
-
-def get_collection_role_path(role_name, collection_list=None):
- acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
-
- if acr:
- # looks like a valid qualified collection ref; skip the collection_list
- collection_list = [acr.collection]
- subdirs = acr.subdirs
- resource = acr.resource
- elif not collection_list:
- return None # not a FQ role and no collection search list spec'd, nothing to do
- else:
- resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
- subdirs = ''
-
- for collection_name in collection_list:
- try:
- acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
- # FIXME: error handling/logging; need to catch any import failures and move along
-
- # FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
- pkg = import_module(acr.n_python_package_name)
-
- if pkg is not None:
- # the package is now loaded, get the collection's package and ask where it lives
- path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
- return resource, to_text(path, errors='surrogate_or_strict'), collection_name
-
- except IOError:
- continue
- except Exception as ex:
- # FIXME: pick out typical import errors first, then error logging
- continue
-
- return None
-
-
-_N_COLLECTION_PATH_RE = re.compile(r'/ansible_collections/([^/]+)/([^/]+)')
-
-
-def get_collection_name_from_path(path):
- """
- Return the containing collection name for a given path, or None if the path is not below a configured collection, or
- the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
- collection roots).
- :param n_path: native-string path to evaluate for collection containment
- :return: collection name or None
- """
- n_collection_paths = [to_native(os.path.abspath(to_bytes(p))) for p in AnsibleCollectionLoader().n_collection_paths]
-
- b_path = os.path.abspath(to_bytes(path))
- n_path = to_native(b_path)
-
- for coll_path in n_collection_paths:
- common_prefix = to_native(os.path.commonprefix([b_path, to_bytes(coll_path)]))
- if common_prefix == coll_path:
- # strip off the common prefix (handle weird testing cases of nested collection roots, eg)
- collection_remnant = n_path[len(coll_path):]
- # commonprefix may include the trailing /, prepend to the remnant if necessary (eg trailing / on root)
- if collection_remnant and collection_remnant[0] != '/':
- collection_remnant = '/' + collection_remnant
- # the path lives under this collection root, see if it maps to a collection
- found_collection = _N_COLLECTION_PATH_RE.search(collection_remnant)
- if not found_collection:
- continue
- n_collection_name = '{0}.{1}'.format(*found_collection.groups())
-
- loaded_collection_path = AnsibleCollectionLoader().get_collection_path(n_collection_name)
-
- if not loaded_collection_path:
- return None
-
- # ensure we're using the canonical real path, with the bogus __synthetic__ stripped off
- b_loaded_collection_path = os.path.dirname(os.path.abspath(to_bytes(loaded_collection_path)))
-
- # if the collection path prefix matches the path prefix we were passed, it's the same collection that's loaded
- if os.path.commonprefix([b_path, b_loaded_collection_path]) == b_loaded_collection_path:
- return n_collection_name
-
- return None # if not, it's a collection, but not the same collection the loader sees, so ignore it
-
-
-def set_collection_playbook_paths(b_playbook_paths):
- AnsibleCollectionLoader().set_playbook_paths(b_playbook_paths)
-
-
-def resource_from_fqcr(ref):
- """
- Return resource from a fully-qualified collection reference,
- or from a simple resource name.
-
- For fully-qualified collection references, this is equivalent to
- ``AnsibleCollectionRef.from_fqcr(ref).resource``.
-
- :param ref: collection reference to parse
- :return: the resource as a unicode string
- """
- ref = to_text(ref, errors='strict')
- return ref.split(u'.')[-1]
diff --git a/lib/ansible/utils/collection_loader/__init__.py b/lib/ansible/utils/collection_loader/__init__.py
new file mode 100644
index 0000000000..a81f503966
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/__init__.py
@@ -0,0 +1,23 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class?
+from ._collection_config import AnsibleCollectionConfig
+from ._collection_finder import AnsibleCollectionRef
+from ansible.module_utils.common.text.converters import to_text
+
+
+def resource_from_fqcr(ref):
+ """
+ Return resource from a fully-qualified collection reference,
+ or from a simple resource name.
+ For fully-qualified collection references, this is equivalent to
+ ``AnsibleCollectionRef.from_fqcr(ref).resource``.
+ :param ref: collection reference to parse
+ :return: the resource as a unicode string
+ """
+ ref = to_text(ref, errors='strict')
+ return ref.split(u'.')[-1]
diff --git a/lib/ansible/utils/collection_loader/_collection_config.py b/lib/ansible/utils/collection_loader/_collection_config.py
new file mode 100644
index 0000000000..e717cde957
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_config.py
@@ -0,0 +1,101 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils.common.text.converters import to_text
+from ansible.module_utils.six import with_metaclass
+
+
+class _EventSource:
+ def __init__(self):
+ self._handlers = set()
+
+ def __iadd__(self, handler):
+ if not callable(handler):
+ raise ValueError('handler must be callable')
+ self._handlers.add(handler)
+ return self
+
+ def __isub__(self, handler):
+ try:
+ self._handlers.remove(handler)
+ except KeyError:
+ pass
+
+ return self
+
+ def _on_exception(self, handler, exc, *args, **kwargs):
+ # if we return True, we want the caller to re-raise
+ return True
+
+ def fire(self, *args, **kwargs):
+ for h in self._handlers:
+ try:
+ h(*args, **kwargs)
+ except Exception as ex:
+ if self._on_exception(h, ex, *args, **kwargs):
+ raise
+
+
+class _AnsibleCollectionConfig(type):
+ def __init__(cls, meta, name, bases):
+ cls._collection_finder = None
+ cls._default_collection = None
+ cls._on_collection_load = _EventSource()
+
+ @property
+ def collection_finder(cls):
+ return cls._collection_finder
+
+ @collection_finder.setter
+ def collection_finder(cls, value):
+ if cls._collection_finder:
+ raise ValueError('an AnsibleCollectionFinder has already been configured')
+
+ cls._collection_finder = value
+
+ @property
+ def collection_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_collection_paths]
+
+ @property
+ def default_collection(cls):
+ return cls._default_collection
+
+ @default_collection.setter
+ def default_collection(cls, value):
+ if cls._default_collection:
+ raise ValueError('default collection {0} has already been configured'.format(value))
+
+ cls._default_collection = value
+
+ @property
+ def on_collection_load(cls):
+ return cls._on_collection_load
+
+ @on_collection_load.setter
+ def on_collection_load(cls, value):
+ if value is not cls._on_collection_load:
+ raise ValueError('on_collection_load is not directly settable (use +=)')
+
+ @property
+ def playbook_paths(cls):
+ cls._require_finder()
+ return [to_text(p) for p in cls._collection_finder._n_playbook_paths]
+
+ @playbook_paths.setter
+ def playbook_paths(cls, value):
+ cls._require_finder()
+ cls._collection_finder.set_playbook_paths(value)
+
+ def _require_finder(cls):
+ if not cls._collection_finder:
+ raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process')
+
+
+# concrete class of our metaclass type that defines the class properties we want
+class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)):
+ pass
diff --git a/lib/ansible/utils/collection_loader/_collection_finder.py b/lib/ansible/utils/collection_loader/_collection_finder.py
new file mode 100644
index 0000000000..c2366680ac
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_finder.py
@@ -0,0 +1,953 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import os.path
+import pkgutil
+import re
+import sys
+
+
+# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity)
+# that only allow stdlib and module_utils
+from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
+from ansible.module_utils.six import string_types, PY3
+from ._collection_config import AnsibleCollectionConfig
+
+from contextlib import contextmanager
+from types import ModuleType
+
+try:
+ from importlib import import_module
+except ImportError:
+ def import_module(name):
+ __import__(name)
+ return sys.modules[name]
+
+try:
+ from importlib import reload as reload_module
+except ImportError:
+ # 2.7 has a global reload function instead...
+ reload_module = reload # pylint:disable=undefined-variable
+
+# NB: this supports import sanity test providing a different impl
+try:
+ from ._collection_meta import _meta_yml_to_dict
+except ImportError:
+ _meta_yml_to_dict = None
+
+
+class _AnsibleCollectionFinder:
+ def __init__(self, paths=None, scan_sys_paths=True):
+ # TODO: accept metadata loader override
+ self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__)))
+
+ if isinstance(paths, string_types):
+ paths = [paths]
+ elif paths is None:
+ paths = []
+
+ # expand any placeholders in configured paths
+ paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths]
+
+ if scan_sys_paths:
+ # append all sys.path entries with an ansible_collections package
+ for path in sys.path:
+ if (
+ path not in paths and
+ os.path.isdir(to_bytes(
+ os.path.join(path, 'ansible_collections'),
+ errors='surrogate_or_strict',
+ ))
+ ):
+ paths.append(path)
+
+ self._n_configured_paths = paths
+ self._n_cached_collection_paths = None
+ self._n_cached_collection_qualified_paths = None
+
+ self._n_playbook_paths = []
+
+ @classmethod
+ def _remove(cls):
+ for mps in sys.meta_path:
+ if isinstance(mps, _AnsibleCollectionFinder):
+ sys.meta_path.remove(mps)
+
+ # remove any path hooks that look like ours
+ for ph in sys.path_hooks:
+ if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder):
+ sys.path_hooks.remove(ph)
+
+ # zap any cached path importer cache entries that might refer to us
+ sys.path_importer_cache.clear()
+
+ AnsibleCollectionConfig._collection_finder = None
+
+ # validate via the public property that we really killed it
+ if AnsibleCollectionConfig.collection_finder is not None:
+ raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder')
+
+ def _install(self):
+ self._remove()
+ sys.meta_path.insert(0, self)
+
+ sys.path_hooks.insert(0, self._ansible_collection_path_hook)
+
+ AnsibleCollectionConfig.collection_finder = self
+
+ def _ansible_collection_path_hook(self, path):
+ path = to_native(path)
+ interesting_paths = self._n_cached_collection_qualified_paths
+ if not interesting_paths:
+ interesting_paths = [os.path.join(p, 'ansible_collections') for p in
+ self._n_collection_paths]
+ interesting_paths.insert(0, self._ansible_pkg_path)
+ self._n_cached_collection_qualified_paths = interesting_paths
+
+ if any(path.startswith(p) for p in interesting_paths):
+ return _AnsiblePathHookFinder(self, path)
+
+ raise ImportError('not interested')
+
+ @property
+ def _n_collection_paths(self):
+ paths = self._n_cached_collection_paths
+ if not paths:
+ self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths
+ return paths
+
+ def set_playbook_paths(self, playbook_paths):
+ if isinstance(playbook_paths, string_types):
+ playbook_paths = [playbook_paths]
+
+ # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
+ added_paths = set()
+
+ # de-dupe
+ self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))]
+ self._n_cached_collection_paths = None
+ # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up.
+ # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init
+ # to prevent this from occurring
+ for pkg in ['ansible_collections', 'ansible_collections.ansible']:
+ self._reload_hack(pkg)
+
+ def _reload_hack(self, fullname):
+ m = sys.modules.get(fullname)
+ if not m:
+ return
+ reload_module(m)
+
+ def find_module(self, fullname, path=None):
+ # Figure out what's being asked for, and delegate to a special-purpose loader
+
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_find = split_name[-1]
+ part_count = len(split_name)
+
+ if toplevel_pkg not in ['ansible', 'ansible_collections']:
+ # not interested in anything other than ansible_collections (and limited cases under ansible)
+ return None
+
+ # sanity check what we're getting from import, canonicalize path values
+ if part_count == 1:
+ if path:
+ raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname))
+ else:
+ # seed the path to the configured collection roots
+ path = self._n_collection_paths
+
+ if part_count > 1 and path is None:
+ raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname))
+
+ # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found
+ try:
+ if toplevel_pkg == 'ansible':
+ # something under the ansible package, delegate to our internal loader in case of redirections
+ return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path)
+ if part_count == 1:
+ return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path)
+ if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens
+ return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path)
+ elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll
+ return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path)
+ # anything below the collection
+ return _AnsibleCollectionLoader(fullname=fullname, path_list=path)
+ except ImportError:
+ # TODO: log attempt to load context
+ return None
+
+
+# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually
+# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except
+# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet.
+class _AnsiblePathHookFinder:
+ def __init__(self, collection_finder, pathctx):
+ # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context
+ self._pathctx = to_native(pathctx)
+ self._collection_finder = collection_finder
+ if PY3:
+ # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests)
+ self._file_finder = None
+
+ # class init is fun- this method has a self arg that won't get used
+ def _get_filefinder_path_hook(self=None):
+ _file_finder_hook = None
+ if PY3:
+ # try to find the FileFinder hook to call for fallback path-based imports in Py3
+ _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)]
+ if len(_file_finder_hook) != 1:
+ raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook)))
+ _file_finder_hook = _file_finder_hook[0]
+
+ return _file_finder_hook
+
+ _filefinder_path_hook = _get_filefinder_path_hook()
+
+ def find_module(self, fullname, path=None):
+ # we ignore the passed in path here- use what we got from the path hook init
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+
+ if toplevel_pkg == 'ansible_collections':
+ # collections content? delegate to the collection finder
+ return self._collection_finder.find_module(fullname, path=[self._pathctx])
+ else:
+ # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader
+ # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test
+ # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and
+ # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure
+ # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the
+ # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's
+ # built-in FS caching and byte-compilation for most things.
+ if PY3:
+ # create or consult our cached file finder for this path
+ if not self._file_finder:
+ self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx)
+
+ spec = self._file_finder.find_spec(fullname)
+ if not spec:
+ return None
+ return spec.loader
+ else:
+ # call py2's internal loader
+ return pkgutil.ImpImporter(self._pathctx).find_module(fullname)
+
+ def iter_modules(self, prefix):
+ # NB: this currently represents only what's on disk, and does not handle package redirection
+ return _iter_modules_impl([self._pathctx], prefix)
+
+ def __repr__(self):
+ return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx)
+
+
+class _AnsibleCollectionPkgLoaderBase:
+ _allows_package_code = False
+
+ def __init__(self, fullname, path_list=None):
+ self._fullname = fullname
+ self._redirect_module = None
+ self._split_name = fullname.split('.')
+ self._rpart_name = fullname.rpartition('.')
+ self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel
+ self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens
+
+ self._source_code_path = None
+ self._decoded_source = None
+ self._compiled_code = None
+
+ self._validate_args()
+
+ self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list])
+ self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths)
+
+ self._validate_final()
+
+ # allow subclasses to validate args and sniff split values before we start digging around
+ def _validate_args(self):
+ if self._split_name[0] != 'ansible_collections':
+ raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname))
+
+ # allow subclasses to customize candidate path filtering
+ def _get_candidate_paths(self, path_list):
+ return [os.path.join(p, self._package_to_load) for p in path_list]
+
+ # allow subclasses to customize finding paths
+ def _get_subpackage_search_paths(self, candidate_paths):
+ # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules)
+ return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]
+
+ # allow subclasses to customize state validation/manipulation before we return the loader instance
+ def _validate_final(self):
+ return
+
+ @staticmethod
+ @contextmanager
+ def _new_or_existing_module(name, **kwargs):
+ # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior
+ created_module = False
+ module = sys.modules.get(name)
+ try:
+ if not module:
+ module = ModuleType(name)
+ created_module = True
+ sys.modules[name] = module
+ # always override the values passed, except name (allow reference aliasing)
+ for attr, value in kwargs.items():
+ setattr(module, attr, value)
+ yield module
+ except Exception:
+ if created_module:
+ if sys.modules.get(name):
+ sys.modules.pop(name)
+ raise
+
+ # basic module/package location support
+ # NB: this does not support distributed packages!
+ @staticmethod
+ def _module_file_from_path(leaf_name, path):
+ has_code = True
+ package_path = os.path.join(to_native(path), to_native(leaf_name))
+ module_path = None
+
+ # if the submodule is a package, assemble valid submodule paths, but stop looking for a module
+ if os.path.isdir(to_bytes(package_path)):
+ # is there a package init?
+ module_path = os.path.join(package_path, '__init__.py')
+ if not os.path.isfile(to_bytes(module_path)):
+ module_path = os.path.join(package_path, '__synthetic__')
+ has_code = False
+ else:
+ module_path = package_path + '.py'
+ package_path = None
+ if not os.path.isfile(to_bytes(module_path)):
+ raise ImportError('{0} not found at {1}'.format(leaf_name, path))
+
+ return module_path, has_code, package_path
+
+ def load_module(self, fullname):
+ # short-circuit redirect; we've already imported the redirected module, so just alias it and return it
+ if self._redirect_module:
+ sys.modules[self._fullname] = self._redirect_module
+ return self._redirect_module
+
+ # we're actually loading a module/package
+ module_attrs = dict(
+ __loader__=self,
+ __file__=self.get_filename(fullname),
+ __package__=self._parent_package_name # sane default for non-packages
+ )
+
+ # eg, I am a package
+ if self._subpackage_search_paths is not None: # empty is legal
+ module_attrs['__path__'] = self._subpackage_search_paths
+ module_attrs['__package__'] = fullname # per PEP366
+
+ with self._new_or_existing_module(fullname, **module_attrs) as module:
+ # execute the module's code in its namespace
+ exec(self.get_code(fullname), module.__dict__)
+
+ return module
+
+ def is_package(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname))
+ return self._subpackage_search_paths is not None
+
+ def get_source(self, fullname):
+ if self._decoded_source:
+ return self._decoded_source
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname))
+ if not self._source_code_path:
+ return None
+ # FIXME: what do we want encoding/newline requirements to be?
+ self._decoded_source = self.get_data(self._source_code_path)
+ return self._decoded_source
+
+ def get_data(self, path):
+ if not path:
+ raise ValueError('a path must be specified')
+
+ # TODO: ensure we're being asked for a path below something we own
+ # TODO: try to handle redirects internally?
+
+ if not path[0] == '/':
+ # relative to current package, search package paths if possible (this may not be necessary)
+ # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
+ raise ValueError('relative resource paths not supported')
+ else:
+ candidate_paths = [path]
+
+ for p in candidate_paths:
+ b_path = to_bytes(p)
+ if os.path.isfile(b_path):
+ with open(b_path, 'rb') as fd:
+ return fd.read()
+
+ return None
+
+ def _synthetic_filename(self, fullname):
+ return '<ansible_synthetic_collection_package>'
+
+ def get_filename(self, fullname):
+ if fullname != self._fullname:
+ raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname))
+
+ filename = self._source_code_path
+
+ if not filename and self.is_package(fullname):
+ if len(self._subpackage_search_paths) == 1:
+ filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__')
+ else:
+ filename = self._synthetic_filename(fullname)
+
+ return filename
+
+ def get_code(self, fullname):
+ if self._compiled_code:
+ return self._compiled_code
+
+ # this may or may not be an actual filename, but it's the value we'll use for __file__
+ filename = self.get_filename(fullname)
+ if not filename:
+ filename = '<string>'
+
+ source_code = self.get_source(fullname)
+ if not source_code:
+ source_code = ''
+
+ self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True)
+
+ return self._compiled_code
+
+ def iter_modules(self, prefix):
+ return _iter_modules_impl(self._subpackage_search_paths, prefix)
+
+ def __repr__(self):
+ return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path)
+
+
+class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionRootPkgLoader, self)._validate_args()
+ if len(self._split_name) != 1:
+ raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname))
+
+
+# Implements Ansible's custom namespace package support.
+# The ansible_collections package and one level down (collections namespaces) are Python namespace packages
+# that search across all configured collection roots. The collection package (two levels down) is the first one found
+# on the configured collection root path, and Python namespace package aggregation is not allowed at or below
+# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored
+# by this loader.
+class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionNSPkgLoader, self)._validate_args()
+ if len(self._split_name) != 2:
+ raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ # special-case the `ansible` namespace, since `ansible.builtin` is magical
+ if not self._subpackage_search_paths and self._package_to_load != 'ansible':
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+
+
+# handles locating the actual collection package and associated metadata
+class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
+ def _validate_args(self):
+ super(_AnsibleCollectionPkgLoader, self)._validate_args()
+ if len(self._split_name) != 3:
+ raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname))
+
+ def _validate_final(self):
+ if self._split_name[1:3] == ['ansible', 'builtin']:
+ # we don't want to allow this one to have on-disk search capability
+ self._subpackage_search_paths = []
+ elif not self._subpackage_search_paths:
+ raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
+ else:
+ # only search within the first collection we found
+ self._subpackage_search_paths = [self._subpackage_search_paths[0]]
+
+ def load_module(self, fullname):
+ if not _meta_yml_to_dict:
+ raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set')
+
+ module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)
+
+ module._collection_meta = {}
+ # TODO: load collection metadata, cache in __loader__ state
+
+ collection_name = '.'.join(self._split_name[1:3])
+
+ if collection_name == 'ansible.builtin':
+ # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
+ raw_routing = pkgutil.get_data('ansible.config', 'ansible_builtin_runtime.yml')
+ else:
+ b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml'))
+ if os.path.isfile(b_routing_meta_path):
+ with open(b_routing_meta_path, 'rb') as fd:
+ raw_routing = fd.read()
+ else:
+ raw_routing = ''
+ try:
+ if raw_routing:
+ routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml'))
+ module._collection_meta = self._canonicalize_meta(routing_dict)
+ except Exception as ex:
+ raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex)))
+
+ AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__))
+
+ return module
+
+ def _canonicalize_meta(self, meta_dict):
+ # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection)
+ # OR we could do it all on the fly?
+ # if not meta_dict:
+ # return {}
+ #
+ # ns_name = '.'.join(self._split_name[0:2])
+ # collection_name = '.'.join(self._split_name[0:3])
+ #
+ # #
+ # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})):
+ # for plugin_key, plugin_dict in iteritems(routing_type_dict):
+ # redirect = plugin_dict.get('redirect', '')
+ # if redirect.startswith('..'):
+ # redirect = redirect[2:]
+
+ return meta_dict
+
+
+# loads everything under a collection, including handling redirections defined by the collection
+class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase):
+ # HACK: stash this in a better place
+ _redirected_package_map = {}
+ _allows_package_code = True
+
+ def _validate_args(self):
+ super(_AnsibleCollectionLoader, self)._validate_args()
+ if len(self._split_name) < 4:
+ raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname))
+
+ def _get_candidate_paths(self, path_list):
+ if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']:
+ raise ValueError('this loader requires exactly one path to search')
+
+ return path_list
+
+ def _get_subpackage_search_paths(self, candidate_paths):
+ collection_meta = _get_collection_metadata('.'.join(self._split_name[1:3]))
+
+ # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!)
+ redirect = None
+ explicit_redirect = False
+
+ routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname])
+ if routing_entry:
+ redirect = routing_entry.get('redirect')
+
+ if redirect:
+ explicit_redirect = True
+ else:
+ redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname)
+
+ # NB: package level redirection requires hooking all future imports beneath the redirected source package
+ # in order to ensure sanity on future relative imports. We always import everything under its "real" name,
+ # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported
+ # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module
+ # (one for each name), and relative imports that ascend above the redirected package would break (since they'd
+ # see the redirected ancestor package contents instead of the package where they actually live).
+ if redirect:
+ # FIXME: wrap this so we can be explicit about a failed redirection
+ self._redirect_module = import_module(redirect)
+ if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__:
+ # if the import target looks like a package, store its name so we can rewrite future descendent loads
+ # FIXME: shouldn't this be in a shared location? This is currently per loader instance, so
+ self._redirected_package_map[self._fullname] = redirect
+
+ # if we redirected, don't do any further custom package logic
+ return None
+
+ # we're not doing a redirect- try to find what we need to actually load a module/package
+
+ # this will raise ImportError if we can't find the requested module/package at all
+ if not candidate_paths:
+ # noplace to look, just ImportError
+ raise ImportError('package has no paths')
+
+ found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0])
+
+ # still here? we found something to load...
+ if has_code:
+ self._source_code_path = found_path
+
+ if package_path:
+ return [package_path] # always needs to be a list
+
+ return None
+
+
+# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later
+# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur)
+class _AnsibleInternalRedirectLoader:
+ def __init__(self, fullname, path_list):
+ self._redirect = None
+
+ split_name = fullname.split('.')
+ toplevel_pkg = split_name[0]
+ module_to_load = split_name[-1]
+
+ if toplevel_pkg != 'ansible':
+ raise ImportError('not interested')
+
+ builtin_meta = _get_collection_metadata('ansible.builtin')
+
+ routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname])
+ if routing_entry:
+ self._redirect = routing_entry.get('redirect')
+
+ if not self._redirect:
+ raise ImportError('not redirected, go ask path_hook')
+
+ def load_module(self, fullname):
+ # since we're delegating to other loaders, this should only be called for internal redirects where we answered
+ # find_module with this loader, in which case we'll just directly import the redirection target, insert it into
+ # sys.modules under the name it was requested by, and return the original module.
+
+ # should never see this
+ if not self._redirect:
+ raise ValueError('no redirect found for {0}'.format(fullname))
+
+ # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect
+ mod = import_module(self._redirect)
+ sys.modules[fullname] = mod
+ return mod
+
+
+class AnsibleCollectionRef:
+ # FUTURE: introspect plugin loaders to get these dynamically?
+ VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
+ 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
+ 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
+ 'terminal', 'test', 'vars'])
+
+ # FIXME: tighten this up to match Python identifier reqs, etc
+ VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
+ VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
+ VALID_FQCR_RE = re.compile(to_text(r'^\w+\.\w+\.\w+(\.\w+)*$')) # can have 0-N included subdirs as well
+
+ def __init__(self, collection_name, subdirs, resource, ref_type):
+ """
+ Create an AnsibleCollectionRef from components
+ :param collection_name: a collection name of the form 'namespace.collectionname'
+ :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
+ :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ """
+ collection_name = to_text(collection_name, errors='strict')
+ if subdirs is not None:
+ subdirs = to_text(subdirs, errors='strict')
+ resource = to_text(resource, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+
+ if not self.is_valid_collection_name(collection_name):
+ raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
+
+ if ref_type not in self.VALID_REF_TYPES:
+ raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
+
+ self.collection = collection_name
+ if subdirs:
+ if not re.match(self.VALID_SUBDIRS_RE, subdirs):
+ raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
+ self.subdirs = subdirs
+ else:
+ self.subdirs = u''
+
+ self.resource = resource
+ self.ref_type = ref_type
+
+ package_components = [u'ansible_collections', self.collection]
+ fqcr_components = [self.collection]
+
+ self.n_python_collection_package_name = to_native('.'.join(package_components))
+
+ if self.ref_type == u'role':
+ package_components.append(u'roles')
+ else:
+ # we assume it's a plugin
+ package_components += [u'plugins', self.ref_type]
+
+ if self.subdirs:
+ package_components.append(self.subdirs)
+ fqcr_components.append(self.subdirs)
+
+ if self.ref_type == u'role':
+ # roles are their own resource
+ package_components.append(self.resource)
+
+ fqcr_components.append(self.resource)
+
+ self.n_python_package_name = to_native('.'.join(package_components))
+ self._fqcr = u'.'.join(fqcr_components)
+
+ def __repr__(self):
+ return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource)
+
+ @property
+ def fqcr(self):
+ return self._fqcr
+
+ @staticmethod
+ def from_fqcr(ref, ref_type):
+ """
+ Parse a string as a fully-qualified collection reference, raises ValueError if invalid
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object
+ """
+ # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
+ # we split the resource name off the right, split ns and coll off the left, and we're left with any optional
+ # subdirs that need to be added back below the plugin-specific subdir we'll add. So:
+ # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
+ # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
+ # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
+ if not AnsibleCollectionRef.is_valid_fqcr(ref):
+ raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
+
+ ref = to_text(ref, errors='strict')
+ ref_type = to_text(ref_type, errors='strict')
+
+ resource_splitname = ref.rsplit(u'.', 1)
+ package_remnant = resource_splitname[0]
+ resource = resource_splitname[1]
+
+ # split the left two components of the collection package name off, anything remaining is plugin-type
+ # specific subdirs to be added back on below the plugin type
+ package_splitname = package_remnant.split(u'.', 2)
+ if len(package_splitname) == 3:
+ subdirs = package_splitname[2]
+ else:
+ subdirs = u''
+
+ collection_name = u'.'.join(package_splitname[0:2])
+
+ return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
+
+ @staticmethod
+ def try_parse_fqcr(ref, ref_type):
+ """
+ Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
+ :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
+ :return: a populated AnsibleCollectionRef object on successful parsing, else None
+ """
+ try:
+ return AnsibleCollectionRef.from_fqcr(ref, ref_type)
+ except ValueError:
+ pass
+
+ @staticmethod
+ def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
+ """
+ Utility method to convert from a PluginLoader dir name to a plugin ref_type
+ :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
+ :return: the corresponding plugin ref_type (eg, 'action', 'role')
+ """
+ legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
+
+ plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
+
+ if plugin_type == u'library':
+ plugin_type = u'modules'
+
+ if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
+ raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
+
+ return plugin_type
+
+ @staticmethod
+ def is_valid_fqcr(ref, ref_type=None):
+ """
+ Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
+ :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
+ :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
+ :return: True if the collection ref passed is well-formed, False otherwise
+ """
+
+ ref = to_text(ref)
+
+ if not ref_type:
+ return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
+
+ return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
+
+ @staticmethod
+ def is_valid_collection_name(collection_name):
+ """
+ Validates if the given string is a well-formed collection name (does not look up the collection itself)
+ :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
+ :return: True if the collection name passed is well-formed, False otherwise
+ """
+
+ collection_name = to_text(collection_name)
+
+ return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
+
+
+def _get_collection_role_path(role_name, collection_list=None):
+ acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
+
+ if acr:
+ # looks like a valid qualified collection ref; skip the collection_list
+ collection_list = [acr.collection]
+ subdirs = acr.subdirs
+ resource = acr.resource
+ elif not collection_list:
+ return None # not a FQ role and no collection search list spec'd, nothing to do
+ else:
+ resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
+ subdirs = ''
+
+ for collection_name in collection_list:
+ try:
+ acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
+ # FIXME: error handling/logging; need to catch any import failures and move along
+ pkg = import_module(acr.n_python_package_name)
+
+ if pkg is not None:
+ # the package is now loaded, get the collection's package and ask where it lives
+ path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
+ return resource, to_text(path, errors='surrogate_or_strict'), collection_name
+
+ except IOError:
+ continue
+ except Exception as ex:
+ # FIXME: pick out typical import errors first, then error logging
+ continue
+
+ return None
+
+
+def _get_collection_name_from_path(path):
+ """
+ Return the containing collection name for a given path, or None if the path is not below a configured collection, or
+ the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
+ collection roots).
+ :param path: path to evaluate for collection containment
+ :return: collection name or None
+ """
+
+ # FIXME: mess with realpath canonicalization or not?
+ path = to_native(path)
+
+ path_parts = path.split('/')
+ if path_parts.count('ansible_collections') != 1:
+ return None
+
+ ac_pos = path_parts.index('ansible_collections')
+
+ # make sure it's followed by at least a namespace and collection name
+ if len(path_parts) < ac_pos + 3:
+ return None
+
+ candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])
+
+ try:
+ # we've got a name for it, now see if the path prefix matches what the loader sees
+ imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__)))
+ except ImportError:
+ return None
+
+ # reassemble the original path prefix up the collection name, and it should match what we just imported. If not
+ # this is probably a collection root that's not configured.
+
+ original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])
+
+ if original_path_prefix != imported_pkg_path:
+ return None
+
+ return candidate_collection_name
+
+
+def _get_import_redirect(collection_meta_dict, fullname):
+ if not collection_meta_dict:
+ return None
+
+ return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect'])
+
+
+def _get_ancestor_redirect(redirected_package_map, fullname):
+ # walk the requested module's ancestor packages to see if any have been previously redirected
+ cur_pkg = fullname
+ while cur_pkg:
+ cur_pkg = cur_pkg.rpartition('.')[0]
+ ancestor_redirect = redirected_package_map.get(cur_pkg)
+ if ancestor_redirect:
+ # rewrite the prefix on fullname so we import the target first, then alias it
+ redirect = ancestor_redirect + fullname[len(cur_pkg):]
+ return redirect
+ return None
+
+
+def _nested_dict_get(root_dict, key_list):
+ cur_value = root_dict
+ for key in key_list:
+ cur_value = cur_value.get(key)
+ if not cur_value:
+ return None
+
+ return cur_value
+
+
+def _iter_modules_impl(paths, prefix=''):
+ # NB: this currently only iterates what's on disk- redirected modules are not considered
+ if not prefix:
+ prefix = ''
+ else:
+ prefix = to_native(prefix)
+ # yield (module_loader, name, ispkg) for each module/pkg under path
+ # TODO: implement ignore/silent catch for unreadable?
+ for b_path in map(to_bytes, paths):
+ if not os.path.isdir(b_path):
+ continue
+ for b_basename in sorted(os.listdir(b_path)):
+ b_candidate_module_path = os.path.join(b_path, b_basename)
+ if os.path.isdir(b_candidate_module_path):
+ # exclude things that obviously aren't Python package dirs
+ # FIXME: this dir is adjustable in py3.8+, check for it
+ if b'.' in b_basename or b_basename == b'__pycache__':
+ continue
+
+ # TODO: proper string handling?
+ yield prefix + to_native(b_basename), True
+ else:
+ # FIXME: match builtin ordering for package/dir/file, support compiled?
+ if b_basename.endswith(b'.py') and b_basename != b'__init__.py':
+ yield prefix + to_native(os.path.splitext(b_basename)[0]), False
+
+
+def _get_collection_metadata(collection_name):
+ collection_name = to_native(collection_name)
+ if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
+ raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
+
+ try:
+ collection_pkg = import_module('ansible_collections.' + collection_name)
+ except ImportError:
+ raise ValueError('unable to locate collection {0}'.format(collection_name))
+
+ _collection_meta = getattr(collection_pkg, '_collection_meta', None)
+
+ if _collection_meta is None:
+ raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name))
+
+ return _collection_meta
diff --git a/lib/ansible/utils/collection_loader/_collection_meta.py b/lib/ansible/utils/collection_loader/_collection_meta.py
new file mode 100644
index 0000000000..979a921cab
--- /dev/null
+++ b/lib/ansible/utils/collection_loader/_collection_meta.py
@@ -0,0 +1,17 @@
+# (c) 2019 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from yaml import safe_load
+
+
+def _meta_yml_to_dict(yaml_string_data, content_id):
+ routing_dict = safe_load(yaml_string_data)
+ if not routing_dict:
+ routing_dict = {}
+ # TODO: change this to Mapping abc?
+ if not isinstance(routing_dict, dict):
+ raise ValueError('collection metadata must be a dictionary')
+ return routing_dict