diff options
author | Matt Martz <matt@sivel.net> | 2023-01-10 11:09:57 -0600 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-01-10 11:09:57 -0600 |
commit | 56d142350d62fb674e1c6874b3ace2cf5cb933a7 (patch) | |
tree | 1bf7c16181d41763b8a271ac31cf578d3e491d11 | |
parent | e41d2874a67ade813ffaf2ebfff67987291d53c0 (diff) | |
download | ansible-56d142350d62fb674e1c6874b3ace2cf5cb933a7.tar.gz |
Add support for importlib.resources (#78915)
* Add support for importlib.resources
* Remove the importlib.resources imports
* return the correct data
* Some code comments, and re-order for consistency
* Disallow traversing packages below an individual collection
* Add a traversable class for namespaces
* Re-use variable
* Utilize itertools.chain.from_iterable
Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
* Simplify logic to check for packages from ansible loaders
Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
* Just a generator expression, instead of a generator
* docstrings
* Add comment about find_spec for our namespaces
* Add some initial unit tests for importlib.resources
* normalize
* Utilize importlib.resources for listing collections
* collections_path is already in config, just use config
* install uses a different default for collections_path
* Remove unused import
* Remove duplicate __truediv__
* Bring back TraversableResources
* Apply some small suggestions from code review
Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
Co-authored-by: Matt Davis <6775756+nitzmahone@users.noreply.github.com>
* Remove cross contamination between plugin loader code and CLI code
* Remove unused import
Co-authored-by: Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua>
Co-authored-by: Matt Davis <6775756+nitzmahone@users.noreply.github.com>
29 files changed, 491 insertions, 359 deletions
diff --git a/hacking/test-module.py b/hacking/test-module.py index 54343e07e8..745acdbc7f 100755 --- a/hacking/test-module.py +++ b/hacking/test-module.py @@ -44,6 +44,7 @@ import ansible.utils.vars as utils_vars from ansible.parsing.dataloader import DataLoader from ansible.parsing.utils.jsonify import jsonify from ansible.parsing.splitter import parse_kv +from ansible.plugins.loader import init_plugin_loader from ansible.executor import module_common import ansible.constants as C from ansible.module_utils._text import to_native, to_text @@ -266,6 +267,7 @@ def rundebug(debugger, modfile, argspath, modname, module_style, interpreters): def main(): options, args = parse() + init_plugin_loader() interpreters = get_interpreters(options.interpreter) (modfile, modname, module_style) = boilerplate_module(options.module_path, options.module_args, interpreters, options.check, options.filename) diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py index d59f450944..5f49d6a07f 100644 --- a/lib/ansible/cli/__init__.py +++ b/lib/ansible/cli/__init__.py @@ -98,10 +98,11 @@ from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.inventory.manager import InventoryManager from ansible.module_utils.six import string_types from ansible.module_utils._text import to_bytes, to_text +from ansible.module_utils.common.collections import is_sequence from ansible.module_utils.common.file import is_executable from ansible.parsing.dataloader import DataLoader from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret -from ansible.plugins.loader import add_all_plugin_dirs +from ansible.plugins.loader import add_all_plugin_dirs, init_plugin_loader from ansible.release import __version__ from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path @@ -154,6 +155,13 @@ class CLI(ABC): """ self.parse() + # Initialize plugin loader after parse, so that the init code can utilize parsed arguments + cli_collections_path = context.CLIARGS.get('collections_path') or [] + if not is_sequence(cli_collections_path): + # In some contexts ``collections_path`` is singular + cli_collections_path = [cli_collections_path] + init_plugin_loader(cli_collections_path) + display.vv(to_text(opt_help.version(self.parser.prog))) if C.CONFIG_FILE: @@ -522,6 +530,10 @@ class CLI(ABC): @staticmethod def _play_prereqs(): + # TODO: evaluate moving all of the code that touches ``AnsibleCollectionConfig`` + # into ``init_plugin_loader`` so that we can specifically remove + # ``AnsibleCollectionConfig.playbook_paths`` to make it immutable after instantiation + options = context.CLIARGS # all needs loader diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py index 54f4706dd2..fbebeee9f2 100755 --- a/lib/ansible/cli/galaxy.py +++ b/lib/ansible/cli/galaxy.py @@ -12,6 +12,7 @@ from ansible.cli import CLI import json import os.path +import pathlib import re import shutil import sys @@ -97,7 +98,8 @@ def with_collection_artifacts_manager(wrapped_method): return wrapped_method(*args, **kwargs) # FIXME: use validate_certs context from Galaxy servers when downloading collections - artifacts_manager_kwargs = {'validate_certs': context.CLIARGS['resolved_validate_certs']} + # .get used here for when this is used in a non-CLI context + artifacts_manager_kwargs = {'validate_certs': context.CLIARGS.get('resolved_validate_certs', True)} keyring = context.CLIARGS.get('keyring', None) if keyring is not None: @@ -154,8 +156,8 @@ def _get_collection_widths(collections): fqcn_set = {to_text(c.fqcn) for c in collections} version_set = {to_text(c.ver) for c in collections} - fqcn_length = len(max(fqcn_set, key=len)) - version_length = len(max(version_set, key=len)) + fqcn_length = len(max(fqcn_set or [''], key=len)) + version_length = len(max(version_set or [''], key=len)) return fqcn_length, version_length @@ -268,7 +270,6 @@ class GalaxyCLI(CLI): collections_path = opt_help.argparse.ArgumentParser(add_help=False) collections_path.add_argument('-p', '--collections-path', dest='collections_path', type=opt_help.unfrack_path(pathsep=True), - default=AnsibleCollectionConfig.collection_paths, action=opt_help.PrependListAction, help="One or more directories to search for collections in addition " "to the default COLLECTIONS_PATHS. Separate multiple paths " @@ -1250,7 +1251,7 @@ class GalaxyCLI(CLI): def execute_verify(self, artifacts_manager=None): collections = context.CLIARGS['args'] - search_paths = context.CLIARGS['collections_path'] + search_paths = AnsibleCollectionConfig.collection_paths ignore_errors = context.CLIARGS['ignore_errors'] local_verify_only = context.CLIARGS['offline'] requirements_file = context.CLIARGS['requirements'] @@ -1577,7 +1578,9 @@ class GalaxyCLI(CLI): display.warning(w) if not path_found: - raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])) + raise AnsibleOptionsError( + "- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type']) + ) return 0 @@ -1592,100 +1595,66 @@ class GalaxyCLI(CLI): artifacts_manager.require_build_metadata = False output_format = context.CLIARGS['output_format'] - collections_search_paths = set(context.CLIARGS['collections_path']) collection_name = context.CLIARGS['collection'] - default_collections_path = AnsibleCollectionConfig.collection_paths + default_collections_path = set(C.COLLECTIONS_PATHS) + collections_search_paths = ( + set(context.CLIARGS['collections_path'] or []) | default_collections_path | set(AnsibleCollectionConfig.collection_paths) + ) collections_in_paths = {} warnings = [] path_found = False collection_found = False + + namespace_filter = None + collection_filter = None + if collection_name: + # list a specific collection + + validate_collection_name(collection_name) + namespace_filter, collection_filter = collection_name.split('.') + + collections = list(find_existing_collections( + list(collections_search_paths), + artifacts_manager, + namespace_filter=namespace_filter, + collection_filter=collection_filter, + dedupe=False + )) + + seen = set() + fqcn_width, version_width = _get_collection_widths(collections) + for collection in sorted(collections, key=lambda c: c.src): + collection_found = True + collection_path = pathlib.Path(to_text(collection.src)).parent.parent.as_posix() + + if output_format in {'yaml', 'json'}: + collections_in_paths[collection_path] = { + collection.fqcn: {'version': collection.ver} for collection in collections + } + else: + if collection_path not in seen: + _display_header( + collection_path, + 'Collection', + 'Version', + fqcn_width, + version_width + ) + seen.add(collection_path) + _display_collection(collection, fqcn_width, version_width) + + path_found = False for path in collections_search_paths: - collection_path = GalaxyCLI._resolve_path(path) if not os.path.exists(path): if path in default_collections_path: # don't warn for missing default paths continue - warnings.append("- the configured path {0} does not exist.".format(collection_path)) - continue - - if not os.path.isdir(collection_path): - warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path)) - continue - - path_found = True - - if collection_name: - # list a specific collection - - validate_collection_name(collection_name) - namespace, collection = collection_name.split('.') - - collection_path = validate_collection_path(collection_path) - b_collection_path = to_bytes(os.path.join(collection_path, namespace, collection), errors='surrogate_or_strict') - - if not os.path.exists(b_collection_path): - warnings.append("- unable to find {0} in collection paths".format(collection_name)) - continue - - if not os.path.isdir(collection_path): - warnings.append("- the configured path {0}, exists, but it is not a directory.".format(collection_path)) - continue - - collection_found = True - - try: - collection = Requirement.from_dir_path_as_unknown( - b_collection_path, - artifacts_manager, - ) - except ValueError as val_err: - six.raise_from(AnsibleError(val_err), val_err) - - if output_format in {'yaml', 'json'}: - collections_in_paths[collection_path] = { - collection.fqcn: {'version': collection.ver} - } - - continue - - fqcn_width, version_width = _get_collection_widths([collection]) - - _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width) - _display_collection(collection, fqcn_width, version_width) - + warnings.append("- the configured path {0} does not exist.".format(path)) + elif os.path.exists(path) and not os.path.isdir(path): + warnings.append("- the configured path {0}, exists, but it is not a directory.".format(path)) else: - # list all collections - collection_path = validate_collection_path(path) - if os.path.isdir(collection_path): - display.vvv("Searching {0} for collections".format(collection_path)) - collections = list(find_existing_collections( - collection_path, artifacts_manager, - )) - else: - # There was no 'ansible_collections/' directory in the path, so there - # or no collections here. - display.vvv("No 'ansible_collections' directory found at {0}".format(collection_path)) - continue - - if not collections: - display.vvv("No collections found at {0}".format(collection_path)) - continue - - if output_format in {'yaml', 'json'}: - collections_in_paths[collection_path] = { - collection.fqcn: {'version': collection.ver} for collection in collections - } - - continue - - # Display header - fqcn_width, version_width = _get_collection_widths(collections) - _display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width) - - # Sort collections by the namespace and name - for collection in sorted(collections, key=to_text): - _display_collection(collection, fqcn_width, version_width) + path_found = True # Do not warn if the specific collection was found in any of the search paths if collection_found and collection_name: @@ -1694,8 +1663,10 @@ class GalaxyCLI(CLI): for w in warnings: display.warning(w) - if not path_found: - raise AnsibleOptionsError("- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type'])) + if not collections and not path_found: + raise AnsibleOptionsError( + "- None of the provided paths were usable. Please specify a valid path with --{0}s-path".format(context.CLIARGS['type']) + ) if output_format == 'json': display.display(json.dumps(collections_in_paths)) diff --git a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py index 9109137e7f..2dcc66c6b2 100755 --- a/lib/ansible/cli/scripts/ansible_connection_cli_stub.py +++ b/lib/ansible/cli/scripts/ansible_connection_cli_stub.py @@ -29,7 +29,7 @@ from ansible.module_utils.connection import Connection, ConnectionError, send_da from ansible.module_utils.service import fork_process from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder from ansible.playbook.play_context import PlayContext -from ansible.plugins.loader import connection_loader +from ansible.plugins.loader import connection_loader, init_plugin_loader from ansible.utils.path import unfrackpath, makedirs_safe from ansible.utils.display import Display from ansible.utils.jsonrpc import JsonRpcServer @@ -230,6 +230,7 @@ def main(args=None): parser.add_argument('playbook_pid') parser.add_argument('task_uuid') args = parser.parse_args(args[1:] if args is not None else args) + init_plugin_loader() # initialize verbosity display.verbosity = args.verbosity diff --git a/lib/ansible/collections/list.py b/lib/ansible/collections/list.py index af3c1cae28..3cb509c7e7 100644 --- a/lib/ansible/collections/list.py +++ b/lib/ansible/collections/list.py @@ -9,7 +9,8 @@ import os from collections import defaultdict from ansible.errors import AnsibleError -from ansible.collections import is_collection_path +from ansible.cli.galaxy import with_collection_artifacts_manager +from ansible.galaxy.collection import find_existing_collections from ansible.module_utils._text import to_bytes from ansible.utils.collection_loader import AnsibleCollectionConfig from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path @@ -18,14 +19,13 @@ from ansible.utils.display import Display display = Display() -def list_collections(coll_filter=None, search_paths=None, dedupe=False): +@with_collection_artifacts_manager +def list_collections(coll_filter=None, search_paths=None, dedupe=True, artifacts_manager=None): collections = {} - for candidate in list_collection_dirs(search_paths=search_paths, coll_filter=coll_filter): - if os.path.exists(candidate): - collection = _get_collection_name_from_path(candidate) - if collection not in collections or not dedupe: - collections[collection] = candidate + for candidate in list_collection_dirs(search_paths=search_paths, coll_filter=coll_filter, artifacts_manager=artifacts_manager, dedupe=dedupe): + collection = _get_collection_name_from_path(candidate) + collections[collection] = candidate return collections @@ -59,7 +59,8 @@ def list_valid_collection_paths(search_paths=None, warn=False): yield path -def list_collection_dirs(search_paths=None, coll_filter=None): +@with_collection_artifacts_manager +def list_collection_dirs(search_paths=None, coll_filter=None, artifacts_manager=None, dedupe=True): """ Return paths for the specific collections found in passed or configured search paths :param search_paths: list of text-string paths, if none load default config @@ -67,48 +68,18 @@ def list_collection_dirs(search_paths=None, coll_filter=None): :return: list of collection directory paths """ - collection = None - namespace = None + namespace_filter = None + collection_filter = None if coll_filter is not None: if '.' in coll_filter: try: - (namespace, collection) = coll_filter.split('.') + namespace_filter, collection_filter = coll_filter.split('.') except ValueError: raise AnsibleError("Invalid collection pattern supplied: %s" % coll_filter) else: - namespace = coll_filter + namespace_filter = coll_filter - collections = defaultdict(dict) - for path in list_valid_collection_paths(search_paths): + for req in find_existing_collections(search_paths, artifacts_manager, namespace_filter=namespace_filter, + collection_filter=collection_filter, dedupe=dedupe): - if os.path.basename(path) != 'ansible_collections': - path = os.path.join(path, 'ansible_collections') - - b_coll_root = to_bytes(path, errors='surrogate_or_strict') - - if os.path.exists(b_coll_root) and os.path.isdir(b_coll_root): - - if namespace is None: - namespaces = os.listdir(b_coll_root) - else: - namespaces = [namespace] - - for ns in namespaces: - b_namespace_dir = os.path.join(b_coll_root, to_bytes(ns)) - - if os.path.isdir(b_namespace_dir): - - if collection is None: - colls = os.listdir(b_namespace_dir) - else: - colls = [collection] - - for mycoll in colls: - - # skip dupe collections as they will be masked in execution - if mycoll not in collections[ns]: - b_coll = to_bytes(mycoll) - b_coll_dir = os.path.join(b_namespace_dir, b_coll) - if is_collection_path(b_coll_dir): - collections[ns][mycoll] = b_coll_dir - yield b_coll_dir + yield to_bytes(req.src) diff --git a/lib/ansible/compat/importlib_resources.py b/lib/ansible/compat/importlib_resources.py new file mode 100644 index 0000000000..cbd537f690 --- /dev/null +++ b/lib/ansible/compat/importlib_resources.py @@ -0,0 +1,20 @@ +# Copyright: Contributors to the Ansible project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +HAS_IMPORTLIB_RESOURCES = False + +if sys.version_info < (3, 10): + try: + from importlib_resources import files # type: ignore[import] + except ImportError: + files = None # type: ignore[assignment] + else: + HAS_IMPORTLIB_RESOURCES = True +else: + from importlib.resources import files + HAS_IMPORTLIB_RESOURCES = True diff --git a/lib/ansible/galaxy/collection/__init__.py b/lib/ansible/galaxy/collection/__init__.py index ffeba77dea..fa9ccd3350 100644 --- a/lib/ansible/galaxy/collection/__init__.py +++ b/lib/ansible/galaxy/collection/__init__.py @@ -11,6 +11,7 @@ import fnmatch import functools import json import os +import pathlib import queue import re import shutil @@ -83,6 +84,7 @@ if t.TYPE_CHECKING: FilesManifestType = t.Dict[t.Literal['files', 'format'], t.Union[t.List[FileManifestEntryType], int]] import ansible.constants as C +from ansible.compat.importlib_resources import files from ansible.errors import AnsibleError from ansible.galaxy.api import GalaxyAPI from ansible.galaxy.collection.concrete_artifact_manager import ( @@ -1402,36 +1404,75 @@ def _build_collection_dir(b_collection_path, b_collection_output, collection_man return collection_output -def find_existing_collections(path, artifacts_manager): +def _normalize_collection_path(path): + str_path = path.as_posix() if isinstance(path, pathlib.Path) else path + return pathlib.Path( + # This is annoying, but GalaxyCLI._resolve_path did it + os.path.expandvars(str_path) + ).expanduser().absolute() + + +def find_existing_collections(path_filter, artifacts_manager, namespace_filter=None, collection_filter=None, dedupe=True): """Locate all collections under a given path. :param path: Collection dirs layout search path. :param artifacts_manager: Artifacts manager. """ - b_path = to_bytes(path, errors='surrogate_or_strict') + if files is None: + raise AnsibleError('importlib_resources is not installed and is required') - # FIXME: consider using `glob.glob()` to simplify looping - for b_namespace in os.listdir(b_path): - b_namespace_path = os.path.join(b_path, b_namespace) - if os.path.isfile(b_namespace_path): - continue + if path_filter and not is_sequence(path_filter): + path_filter = [path_filter] - # FIXME: consider feeding b_namespace_path to Candidate.from_dir_path to get subdirs automatically - for b_collection in os.listdir(b_namespace_path): - b_collection_path = os.path.join(b_namespace_path, b_collection) - if not os.path.isdir(b_collection_path): + paths = set() + for path in files('ansible_collections').glob('*/*/'): + path = _normalize_collection_path(path) + if not path.is_dir(): + continue + if path_filter: + for pf in path_filter: + try: + path.relative_to(_normalize_collection_path(pf)) + except ValueError: + continue + break + else: continue + paths.add(path) + + seen = set() + for path in paths: + namespace = path.parent.name + name = path.name + if namespace_filter and namespace != namespace_filter: + continue + if collection_filter and name != collection_filter: + continue + if dedupe: try: - req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager) - except ValueError as val_err: - raise_from(AnsibleError(val_err), val_err) + collection_path = files(f'ansible_collections.{namespace}.{name}') + except ImportError: + continue + if collection_path in seen: + continue + seen.add(collection_path) + else: + collection_path = path - display.vvv( - u"Found installed collection {coll!s} at '{path!s}'". - format(coll=to_text(req), path=to_text(req.src)) - ) - yield req + b_collection_path = to_bytes(collection_path.as_posix()) + + try: + req = Candidate.from_dir_path_as_unknown(b_collection_path, artifacts_manager) + except ValueError as val_err: + display.warning(f'{val_err}') + continue + + display.vvv( + u"Found installed collection {coll!s} at '{path!s}'". + format(coll=to_text(req), path=to_text(req.src)) + ) + yield req def install(collection, path, artifacts_manager): # FIXME: mv to dataclasses? diff --git a/lib/ansible/plugins/list.py b/lib/ansible/plugins/list.py index e09b293fea..236fcc78cd 100644 --- a/lib/ansible/plugins/list.py +++ b/lib/ansible/plugins/list.py @@ -44,6 +44,7 @@ def get_composite_name(collection, name, path, depth): def _list_plugins_from_paths(ptype, dirs, collection, depth=0): + # TODO: update to use importlib.resources plugins = {} @@ -117,6 +118,7 @@ def _list_j2_plugins_from_file(collection, plugin_path, ptype, plugin_name): def list_collection_plugins(ptype, collections, search_paths=None): + # TODO: update to use importlib.resources # starts at {plugin_name: filepath, ...}, but changes at the end plugins = {} diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py index 9d1a72694c..66d9c7e5be 100644 --- a/lib/ansible/plugins/loader.py +++ b/lib/ansible/plugins/loader.py @@ -17,6 +17,7 @@ import warnings from collections import defaultdict, namedtuple from traceback import format_exc +import ansible.module_utils.compat.typing as t from ansible import __version__ as ansible_version from ansible import constants as C from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemovedError, AnsibleCollectionUnsupportedVersionError @@ -42,6 +43,7 @@ except ImportError: import importlib.util +_PLUGIN_FILTERS = defaultdict(frozenset) # type: t.DefaultDict[str, frozenset] display = Display() get_with_context_result = namedtuple('get_with_context_result', ['object', 'plugin_load_context']) @@ -1357,7 +1359,7 @@ def get_fqcr_and_name(resource, collection='ansible.builtin'): def _load_plugin_filter(): - filters = defaultdict(frozenset) + filters = _PLUGIN_FILTERS user_set = False if C.PLUGIN_FILTERS_CFG is None: filter_cfg = '/etc/ansible/plugin_filters.yml' @@ -1455,25 +1457,38 @@ def _does_collection_support_ansible_version(requirement_string, ansible_version return ss.contains(base_ansible_version) -def _configure_collection_loader(): +def _configure_collection_loader(prefix_collections_path=None): if AnsibleCollectionConfig.collection_finder: # this must be a Python warning so that it can be filtered out by the import sanity test warnings.warn('AnsibleCollectionFinder has already been configured') return - finder = _AnsibleCollectionFinder(C.COLLECTIONS_PATHS, C.COLLECTIONS_SCAN_SYS_PATH) + if prefix_collections_path is None: + prefix_collections_path = [] + + paths = list(prefix_collections_path) + C.COLLECTIONS_PATHS + finder = _AnsibleCollectionFinder(paths, C.COLLECTIONS_SCAN_SYS_PATH) finder._install() # this should succeed now AnsibleCollectionConfig.on_collection_load += _on_collection_load_handler -# TODO: All of the following is initialization code It should be moved inside of an initialization -# function which is called at some point early in the ansible and ansible-playbook CLI startup. +def init_plugin_loader(prefix_collections_path=None): + """Initialize the plugin filters and the collection loaders + + This method must be called to configure and insert the collection python loaders + into ``sys.meta_path`` and ``sys.path_hooks``. + + This method is only called in ``CLI.run`` after CLI args have been parsed, so that + instantiation of the collection finder can utilize parsed CLI args, and to not cause + side effects. + """ + _load_plugin_filter() + _configure_collection_loader(prefix_collections_path) -_PLUGIN_FILTERS = _load_plugin_filter() -_configure_collection_loader() +# TODO: Evaluate making these class instantiations lazy, but keep them in the global scope # doc fragments first fragment_loader = PluginLoader( diff --git a/lib/ansible/utils/collection_loader/_collection_finder.py b/lib/ansible/utils/collection_loader/_collection_finder.py index d3a8765c77..fc6744ffde 100644 --- a/lib/ansible/utils/collection_loader/_collection_finder.py +++ b/lib/ansible/utils/collection_loader/_collection_finder.py @@ -7,6 +7,7 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type +import itertools import os import os.path import pkgutil @@ -39,7 +40,12 @@ except ImportError: reload_module = reload # type: ignore[name-defined] # pylint:disable=undefined-variable try: - from importlib.util import spec_from_loader + from importlib.abc import TraversableResources +except ImportError: + TraversableResources = object # type: ignore[assignment,misc] + +try: + from importlib.util import find_spec, spec_from_loader except ImportError: pass @@ -50,6 +56,11 @@ except ImportError: else: HAS_FILE_FINDER = True +try: + import pathlib +except ImportError: + pass + # NB: this supports import sanity test providing a different impl try: from ._collection_meta import _meta_yml_to_dict @@ -78,6 +89,141 @@ except AttributeError: # Python 2 PB_EXTENSIONS = ('.yml', '.yaml') +SYNTHETIC_PACKAGE_NAME = '<ansible_synthetic_collection_package>' + + +class _AnsibleNSTraversable: + """Class that implements the ``importlib.resources.abc.Traversable`` + interface for the following ``ansible_collections`` namespace packages:: + + * ``ansible_collections`` + * ``ansible_collections.<namespace>`` + + These namespace packages operate differently from a normal Python + namespace package, in that the same namespace can be distributed across + multiple directories on the filesystem and still function as a single + namespace, such as:: + + * ``/usr/share/ansible/collections/ansible_collections/ansible/posix/`` + * ``/home/user/.ansible/collections/ansible_collections/ansible/windows/`` + + This class will mimic the behavior of various ``pathlib.Path`` methods, + by combining the results of multiple root paths into the output. + + This class does not do anything to remove duplicate collections from the + list, so when traversing either namespace patterns supported by this class, + it is possible to have the same collection located in multiple root paths, + but precedence rules only use one. When iterating or traversing these + package roots, there is the potential to see the same collection in + multiple places without indication of which would be used. In such a + circumstance, it is best to then call ``importlib.resources.files`` for an + individual collection package rather than continuing to traverse from the + namespace package. + + Several methods will raise ``NotImplementedError`` as they do not make + sense for these namespace packages. + """ + def __init__(self, *paths): + self._paths = [pathlib.Path(p) for p in paths] + + def __repr__(self): + return "_AnsibleNSTraversable('%s')" % "', '".join(map(to_text, self._paths)) + + def iterdir(self): + return itertools.chain.from_iterable(p.iterdir() for p in self._paths if p.is_dir()) + + def is_dir(self): + return any(p.is_dir() for p in self._paths) + + def is_file(self): + return False + + def glob(self, pattern): + return itertools.chain.from_iterable(p.glob(pattern) for p in self._paths if p.is_dir()) + + def _not_implemented(self, *args, **kwargs): + raise NotImplementedError('not usable on namespaces') + + joinpath = __truediv__ = read_bytes = read_text = _not_implemented + + +class _AnsibleTraversableResources(TraversableResources): + """Implements ``importlib.resources.abc.TraversableResources`` for the + collection Python loaders. + + The result of ``files`` will depend on whether a particular collection, or + a sub package of a collection was referenced, as opposed to + ``ansible_collections`` or a particular namespace. For a collection and + its subpackages, a ``pathlib.Path`` instance will be returned, whereas + for the higher level namespace packages, ``_AnsibleNSTraversable`` + will be returned. + """ + def __init__(self, package, loader): + self._package = package + self._loader = loader + + def _get_name(self, package): + try: + # spec + return package.name + except AttributeError: + # module + return package.__name__ + + def _get_package(self, package): + try: + # spec + return package.__parent__ + except AttributeError: + # module + return package.__package__ + + def _get_path(self, package): + try: + # spec + return package.origin + except AttributeError: + # module + return package.__file__ + + def _is_ansible_ns_package(self, package): + origin = getattr(package, 'origin', None) + if not origin: + return False + + if origin == SYNTHETIC_PACKAGE_NAME: + return True + + module_filename = os.path.basename(origin) + return module_filename in {'__synthetic__', '__init__.py'} + + def _ensure_package(self, package): + if self._is_ansible_ns_package(package): + # Short circuit our loaders + return + if self._get_package(package) != package.__name__: + raise TypeError('%r is not a package' % package.__name__) + + def files(self): + package = self._package + parts = package.split('.') + is_ns = parts[0] == 'ansible_collections' and len(parts) < 3 + + if isinstance(package, string_types): + if is_ns: + # Don't use ``spec_from_loader`` here, because that will point + # to exactly 1 location for a namespace. Use ``find_spec`` + # to get a list of all locations for the namespace + package = find_spec(package) + else: + package = spec_from_loader(package, self._loader) + elif not isinstance(package, ModuleType): + raise TypeError('Expected string or module, got %r' % package.__class__.__name__) + + self._ensure_package(package) + if is_ns: + return _AnsibleNSTraversable(*package.submodule_search_locations) + return pathlib.Path(self._get_path(package)).parent class _AnsibleCollectionFinder: @@ -423,6 +569,9 @@ class _AnsibleCollectionPkgLoaderBase: return module_path, has_code, package_path + def get_resource_reader(self, fullname): + return _AnsibleTraversableResources(fullname, self) + def exec_module(self, module): # short-circuit redirect; avoid reinitializing existing modules if self._redirect_module: @@ -509,7 +658,7 @@ class _AnsibleCollectionPkgLoaderBase: return None def _synthetic_filename(self, fullname): - return '<ansible_synthetic_collection_package>' + return SYNTHETIC_PACKAGE_NAME def get_filename(self, fullname): if fullname != self._fullname: @@ -748,6 +897,9 @@ class _AnsibleInternalRedirectLoader: if not self._redirect: raise ImportError('not redirected, go ask path_hook') + def get_resource_reader(self, fullname): + return _AnsibleTraversableResources(fullname, self) + def exec_module(self, module): # should never see this if not self._redirect: diff --git a/requirements.txt b/requirements.txt index b92e9ad4fb..d096901ec2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,9 @@ jinja2 >= 3.0.0 PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support cryptography packaging +# importlib.resources in stdlib for py3.9 is lacking native hooks for +# importlib.resources.files +importlib_resources >= 5.0, < 5.1; python_version < '3.10' # NOTE: resolvelib 0.x version bumps should be considered major/breaking # NOTE: and we should update the upper cap with care, at least until 1.0 # NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 diff --git a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json index 243a5e4372..36f402fc76 100644 --- a/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json +++ b/test/integration/targets/ansible-doc/broken-docs/collections/ansible_collections/testns/testcol/MANIFEST.json @@ -17,7 +17,7 @@ "version": "0.1.1231", "readme": "README.md", "license_file": "COPYING", - "homepage": "", + "homepage": "" }, "file_manifest_file": { "format": 1, diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json index 243a5e4372..36f402fc76 100644 --- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json +++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol/MANIFEST.json @@ -17,7 +17,7 @@ "version": "0.1.1231", "readme": "README.md", "license_file": "COPYING", - "homepage": "", + "homepage": "" }, "file_manifest_file": { "format": 1, diff --git a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json index 02ec289f47..e930d7d8fa 100644 --- a/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json +++ b/test/integration/targets/ansible-doc/collections/ansible_collections/testns/testcol2/MANIFEST.json @@ -17,7 +17,7 @@ "version": "1.2.0", "readme": "README.md", "license_file": "COPYING", - "homepage": "", + "homepage": "" }, "file_manifest_file": { "format": 1, diff --git a/test/integration/targets/ansible-galaxy-collection/tasks/list.yml b/test/integration/targets/ansible-galaxy-collection/tasks/list.yml index b8d63492c6..2f10828119 100644 --- a/test/integration/targets/ansible-galaxy-collection/tasks/list.yml +++ b/test/integration/targets/ansible-galaxy-collection/tasks/list.yml @@ -137,7 +137,7 @@ register: list_result_error ignore_errors: True environment: - ANSIBLE_COLLECTIONS_PATH: "" + ANSIBLE_COLLECTIONS_PATH: "i_dont_exist" - assert: that: diff --git a/test/lib/ansible_test/_data/requirements/ansible.txt b/test/lib/ansible_test/_data/requirements/ansible.txt index b92e9ad4fb..d096901ec2 100644 --- a/test/lib/ansible_test/_data/requirements/ansible.txt +++ b/test/lib/ansible_test/_data/requirements/ansible.txt @@ -7,6 +7,9 @@ jinja2 >= 3.0.0 PyYAML >= 5.1 # PyYAML 5.1 is required for Python 3.8+ support cryptography packaging +# importlib.resources in stdlib for py3.9 is lacking native hooks for +# importlib.resources.files +importlib_resources >= 5.0, < 5.1; python_version < '3.10' # NOTE: resolvelib 0.x version bumps should be considered major/breaking # NOTE: and we should update the upper cap with care, at least until 1.0 # NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69 diff --git a/test/units/cli/galaxy/test_execute_list_collection.py b/test/units/cli/galaxy/test_execute_list_collection.py index e8a834d9c5..95fae159cf 100644 --- a/test/units/cli/galaxy/test_execute_list_collection.py +++ b/test/units/cli/galaxy/test_execute_list_collection.py @@ -5,14 +5,18 @@ from __future__ import absolute_import, division, print_function __metaclass__ = type +import pathlib + import pytest +from ansible import constants as C from ansible import context from ansible.cli.galaxy import GalaxyCLI from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.galaxy import collection from ansible.galaxy.dependency_resolution.dataclasses import Requirement from ansible.module_utils._text import to_native +from ansible.plugins.loader import init_plugin_loader def path_exists(path): @@ -22,20 +26,18 @@ def path_exists(path): return False elif to_native(path) == 'nope': return False - else: - return True + return True def isdir(path): if to_native(path) == 'nope': return False - else: - return True + return True def cliargs(collections_paths=None, collection_name=None): if collections_paths is None: - collections_paths = ['~/root/.ansible/collections', '/usr/share/ansible/collections'] + collections_paths = ['/root/.ansible/collections', '/usr/share/ansible/collections'] context.CLIARGS._store = { 'collections_path': collections_paths, @@ -46,95 +48,61 @@ def cliargs(collections_paths=None, collection_name=None): @pytest.fixture -def mock_collection_objects(mocker): - mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', '/usr/share/ansible/collections']) - mocker.patch('ansible.cli.galaxy.validate_collection_path', - side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections']) - - collection_args_1 = ( - ( +def mock_from_path(mocker, monkeypatch): + collection_args = { + '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj': ( 'sandwiches.pbj', - '1.5.0', - None, + '1.0.0', + '/usr/share/ansible/collections/ansible_collections/sandwiches/pbj', 'dir', None, ), - ( - 'sandwiches.reuben', - '2.5.0', - None, + '/usr/share/ansible/collections/ansible_collections/sandwiches/ham': ( + 'sandwiches.ham', + '1.0.0', + '/usr/share/ansible/collections/ansible_collections/sandwiches/ham', 'dir', None, ), - ) - - collection_args_2 = ( - ( + '/root/.ansible/collections/ansible_collections/sandwiches/pbj': ( 'sandwiches.pbj', - '1.0.0', - None, + '1.5.0', + '/root/.ansible/collections/ansible_collections/sandwiches/pbj', 'dir', None, ), - ( - 'sandwiches.ham', - '1.0.0', - None, + '/root/.ansible/collections/ansible_collections/sandwiches/reuben': ( + 'sandwiches.reuben', + '2.5.0', + '/root/.ansible/collections/ansible_collections/sandwiches/reuben', 'dir', None, ), - ) + } - collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1] - collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2] + def dispatch_requirement(path, am): + return Requirement(*collection_args[to_native(path)]) - mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2]) + files_mock = mocker.MagicMock() + mocker.patch('ansible.galaxy.collection.files', return_value=files_mock) + files_mock.glob.return_value = [] + mocker.patch.object(pathlib.Path, 'is_dir', return_value=True) + for path, args in collection_args.items(): + files_mock.glob.return_value.append(pathlib.Path(args[2])) -@pytest.fixture -def mock_from_path(mocker): - def _from_path(collection_name='pbj'): - collection_args = { - 'sandwiches.pbj': ( - ( - 'sandwiches.pbj', - '1.5.0', - None, - 'dir', - None, - ), - ( - 'sandwiches.pbj', - '1.0.0', - None, - 'dir', - None, - ), - ), - 'sandwiches.ham': ( - ( - 'sandwiches.ham', - '1.0.0', - None, - 'dir', - None, - ), - ), - } - - from_path_objects = [Requirement(*args) for args in collection_args[collection_name]] - mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects) - - return _from_path - - -def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory): + mocker.patch('ansible.galaxy.collection.Candidate.from_dir_path_as_unknown', side_effect=dispatch_requirement) + + monkeypatch.setattr(C, 'COLLECTIONS_PATHS', ['/root/.ansible/collections', '/usr/share/ansible/collections']) + + +def test_execute_list_collection_all(mocker, capsys, mock_from_path, tmp_path_factory): """Test listing all collections from multiple paths""" cliargs() + init_plugin_loader() mocker.patch('os.path.exists', return_value=True) - mocker.patch('os.path.isdir', return_value=True) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list']) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) @@ -152,21 +120,22 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tm assert out_lines[5] == 'sandwiches.reuben 2.5.0 ' assert out_lines[6] == '' assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections' - assert out_lines[8] == 'Collection Version' - assert out_lines[9] == '-------------- -------' - assert out_lines[10] == 'sandwiches.ham 1.0.0 ' - assert out_lines[11] == 'sandwiches.pbj 1.0.0 ' + assert out_lines[8] == 'Collection Version' + assert out_lines[9] == '----------------- -------' + assert out_lines[10] == 'sandwiches.ham 1.0.0 ' + assert out_lines[11] == 'sandwiches.pbj 1.0.0 ' -def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): +def test_execute_list_collection_specific(mocker, capsys, mock_from_path, tmp_path_factory): """Test listing a specific collection""" collection_name = 'sandwiches.ham' - mock_from_path(collection_name) cliargs(collection_name=collection_name) + init_plugin_loader() + mocker.patch('os.path.exists', path_exists) - mocker.patch('os.path.isdir', return_value=True) + # mocker.patch.object(pathlib.Path, 'is_dir', return_value=True) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5)) @@ -186,15 +155,16 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object assert out_lines[4] == 'sandwiches.ham 1.0.0 ' -def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): +def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_from_path, tmp_path_factory): """Test listing a specific collection that exists at multiple paths""" collection_name = 'sandwiches.pbj' - mock_from_path(collection_name) cliargs(collection_name=collection_name) + init_plugin_loader() + mocker.patch('os.path.exists', path_exists) - mocker.patch('os.path.isdir', return_value=True) + # mocker.patch.object(pathlib.Path, 'is_dir', return_value=True) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name]) @@ -221,6 +191,8 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory): """Test an invalid fully qualified collection name (FQCN)""" + init_plugin_loader() + collection_name = 'no.good.name' cliargs(collection_name=collection_name) @@ -238,6 +210,7 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory """Test listing collections when no valid paths are given""" cliargs() + init_plugin_loader() mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', return_value=False) @@ -257,13 +230,14 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory assert 'exists, but it\nis not a directory.' in err -def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory): +def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_from_path, tmp_path_factory): """Test listing all collections when one invalid path is given""" - cliargs() + cliargs(collections_paths=['nope']) + init_plugin_loader() + mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', isdir) - mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', 'nope']) mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope']) diff --git a/test/units/cli/test_doc.py b/test/units/cli/test_doc.py index b10f088893..50b714eb79 100644 --- a/test/units/cli/test_doc.py +++ b/test/units/cli/test_doc.py @@ -5,7 +5,7 @@ __metaclass__ = type import pytest from ansible.cli.doc import DocCLI, RoleMixin -from ansible.plugins.loader import module_loader +from ansible.plugins.loader import module_loader, init_plugin_loader TTY_IFY_DATA = { @@ -118,6 +118,7 @@ def test_builtin_modules_list(): args = ['ansible-doc', '-l', 'ansible.builtin', '-t', 'module'] obj = DocCLI(args=args) obj.parse() + init_plugin_loader() result = obj._list_plugins('module', module_loader) assert len(result) > 0 diff --git a/test/units/executor/module_common/test_recursive_finder.py b/test/units/executor/module_common/test_recursive_finder.py index 8136a00649..651ed78255 100644 --- a/test/units/executor/module_common/test_recursive_finder.py +++ b/test/units/executor/module_common/test_recursive_finder.py @@ -29,7 +29,7 @@ from io import BytesIO import ansible.errors from ansible.executor.module_common import recursive_finder - +from ansible.plugins.loader import init_plugin_loader # These are the modules that are brought in by module_utils/basic.py This may need to be updated # when basic.py gains new imports @@ -79,6 +79,8 @@ ANSIBLE_LIB = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.pa @pytest.fixture def finder_containers(): + init_plugin_loader() + FinderContainers = namedtuple('FinderContainers', ['zf']) zipoutput = BytesIO() diff --git a/test/units/executor/test_play_iterator.py b/test/units/executor/test_play_iterator.py index 6670888eaf..00c77fcf4b 100644 --- a/test/units/executor/test_play_iterator.py +++ b/test/units/executor/test_play_iterator.py @@ -25,6 +25,7 @@ from unittest.mock import patch, MagicMock from ansible.executor.play_iterator import HostState, PlayIterator, IteratingStates, FailedStates from ansible.playbook import Playbook from ansible.playbook.play_context import PlayContext +from ansible.plugins.loader import init_plugin_loader from units.mock.loader import DictDataLoader from units.mock.path import mock_unfrackpath_noop @@ -286,6 +287,7 @@ class TestPlayIterator(unittest.TestCase): self.assertNotIn(hosts[0], failed_hosts) def test_play_iterator_nested_blocks(self): + init_plugin_loader() fake_loader = DictDataLoader({ "test_play.yml": """ - hosts: all diff --git a/test/units/galaxy/test_collection.py b/test/units/galaxy/test_collection.py index 1f1a499e9c..b8a6d2ebfe 100644 --- a/test/units/galaxy/test_collection.py +++ b/test/units/galaxy/test_collection.py @@ -23,6 +23,7 @@ from ansible import context from ansible.cli.galaxy import GalaxyCLI, SERVER_DEF from ansible.errors import AnsibleError from ansible.galaxy import api, collection, token +from ansible.plugins.loader import init_plugin_loader from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils.six.moves import builtins from ansible.utils import context_objects as co @@ -854,57 +855,6 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch): % galaxy_server.api_server -def test_find_existing_collections(tmp_path_factory, monkeypatch): - test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')) - concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False) - collection1 = os.path.join(test_dir, 'namespace1', 'collection1') - collection2 = os.path.join(test_dir, 'namespace2', 'collection2') - fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3') - fake_collection2 = os.path.join(test_dir, 'namespace4') - os.makedirs(collection1) - os.makedirs(collection2) - os.makedirs(os.path.split(fake_collection1)[0]) - - open(fake_collection1, 'wb+').close() - open(fake_collection2, 'wb+').close() - - collection1_manifest = json.dumps({ - 'collection_info': { - 'namespace': 'namespace1', - 'name': 'collection1', - 'version': '1.2.3', - 'authors': ['Jordan Borean'], - 'readme': 'README.md', - 'dependencies': {}, - }, - 'format': 1, - }) - with open(os.path.join(collection1, 'MANIFEST.json'), 'wb') as manifest_obj: - manifest_obj.write(to_bytes(collection1_manifest)) - - mock_warning = MagicMock() - monkeypatch.setattr(Display, 'warning', mock_warning) - - actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm)) - - assert len(actual) == 2 - for actual_collection in actual: - if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1': - assert actual_collection.namespace == 'namespace1' - assert actual_collection.name == 'collection1' - assert actual_collection.ver == '1.2.3' - assert to_text(actual_collection.src) == collection1 - else: - assert actual_collection.namespace == 'namespace2' - assert actual_collection.name == 'collection2' - assert actual_collection.ver == '*' - assert to_text(actual_collection.src) == collection2 - - assert mock_warning.call_count == 1 - assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \ - "cannot detect version." % to_text(collection2) - - def test_download_file(tmp_path_factory, monkeypatch): temp_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')) diff --git a/test/units/galaxy/test_collection_install.py b/test/units/galaxy/test_collection_install.py index 2118f0ec2e..4a55b3815b 100644 --- a/test/units/galaxy/test_collection_install.py +++ b/test/units/galaxy/test_collection_install.py @@ -919,57 +919,6 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch): assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path) -def test_install_collections_existing_without_force(collection_artifact, monkeypatch): - collection_path, collection_tar = collection_artifact - temp_path = os.path.split(collection_tar)[0] - - mock_display = MagicMock() - monkeypatch.setattr(Display, 'display', mock_display) - - concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False) - - assert os.path.isdir(collection_path) - - requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)] - collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False) - - assert os.path.isdir(collection_path) - - actual_files = os.listdir(collection_path) - actual_files.sort() - assert actual_files == [b'README.md', b'docs', b'galaxy.yml', b'playbooks', b'plugins', b'roles', b'runme.sh'] - - # Filter out the progress cursor display calls. - display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1] - assert len(display_msgs) == 1 - - assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.' - - for msg in display_msgs: - assert 'WARNING' not in msg - - -def test_install_missing_metadata_warning(collection_artifact, monkeypatch): - collection_path, collection_tar = collection_artifact - temp_path = os.path.split(collection_tar)[0] - - mock_display = MagicMock() - monkeypatch.setattr(Display, 'display', mock_display) - - for file in [b'MANIFEST.json', b'galaxy.yml']: - b_path = os.path.join(collection_path, file) - if os.path.isfile(b_path): - os.unlink(b_path) - - concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False) - requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file', None)] - collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, False, concrete_artifact_cm, True, False) - - display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1] - - assert 'WARNING' in display_msgs[0] - - # Makes sure we don't get stuck in some recursive loop @pytest.mark.parametrize('collection_artifact', [ {'ansible_namespace.collection': '>=0.0.1'}, diff --git a/test/units/parsing/test_mod_args.py b/test/units/parsing/test_mod_args.py index 5d3f5d2582..c11cc50910 100644 --- a/test/units/parsing/test_mod_args.py +++ b/test/units/parsing/test_mod_args.py @@ -10,6 +10,7 @@ import re from ansible.errors import AnsibleParserError from ansible.parsing.mod_args import ModuleArgsParser +from ansible.plugins.loader import init_plugin_loader from ansible.utils.sentinel import Sentinel @@ -119,6 +120,7 @@ class TestModArgsDwim: assert err.value.args[0] == msg def test_multiple_actions_ping_shell(self): + init_plugin_loader() args_dict = {'ping': 'data=hi', 'shell': 'echo hi'} m = ModuleArgsParser(args_dict) with pytest.raises(AnsibleParserError) as err: @@ -129,6 +131,7 @@ class TestModArgsDwim: assert actions == set(['ping', 'shell']) def test_bogus_action(self): + init_plugin_loader() args_dict = {'bogusaction': {}} m = ModuleArgsParser(args_dict) with pytest.raises(AnsibleParserError) as err: diff --git a/test/units/playbook/test_task.py b/test/units/playbook/test_task.py index 070d7aa761..e28d2ecdfa 100644 --- a/test/units/playbook/test_task.py +++ b/test/units/playbook/test_task.py @@ -22,6 +22,7 @@ __metaclass__ = type from units.compat import unittest from unittest.mock import patch from ansible.playbook.task import Task +from ansible.plugins.loader import init_plugin_loader from ansible.parsing.yaml import objects from ansible import errors @@ -74,6 +75,7 @@ class TestTask(unittest.TestCase): @patch.object(errors.AnsibleError, '_get_error_lines_from_file') def test_load_task_kv_form_error_36848(self, mock_get_err_lines): + init_plugin_loader() ds = objects.AnsibleMapping(kv_bad_args_ds) ds.ansible_pos = ('test_task_faux_playbook.yml', 1, 1) mock_get_err_lines.return_value = (kv_bad_args_str, '') diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py index f2bbe19470..40af80819f 100644 --- a/test/units/plugins/action/test_action.py +++ b/test/units/plugins/action/test_action.py @@ -22,6 +22,7 @@ __metaclass__ = type import os import re +from importlib import import_module from ansible import constants as C from units.compat import unittest @@ -33,6 +34,7 @@ from ansible.module_utils.six.moves import shlex_quote, builtins from ansible.module_utils._text import to_bytes from ansible.playbook.play_context import PlayContext from ansible.plugins.action import ActionBase +from ansible.plugins.loader import init_plugin_loader from ansible.template import Templar from ansible.vars.clean import clean_facts @@ -109,6 +111,11 @@ class TestActionBase(unittest.TestCase): self.assertEqual(results, {}) def test_action_base__configure_module(self): + init_plugin_loader() + # Pre-populate the ansible.builtin collection + # so reading the ansible_builtin_runtime.yml happens + # before the mock_open below + import_module('ansible_collections.ansible.builtin') fake_loader = DictDataLoader({ }) diff --git a/test/units/plugins/cache/test_cache.py b/test/units/plugins/cache/test_cache.py index 25b84c0624..31a1da7014 100644 --- a/test/units/plugins/cache/test_cache.py +++ b/test/units/plugins/cache/test_cache.py @@ -29,7 +29,7 @@ from units.compat import unittest from ansible.errors import AnsibleError from ansible.plugins.cache import CachePluginAdjudicator from ansible.plugins.cache.memory import CacheModule as MemoryCache -from ansible.plugins.loader import cache_loader +from ansible.plugins.loader import cache_loader, init_plugin_loader from ansible.vars.fact_cache import FactCache import pytest @@ -183,6 +183,7 @@ class TestFactCache(unittest.TestCase): assert len(self.cache.keys()) == 0 def test_plugin_load_failure(self): + init_plugin_loader() # See https://github.com/ansible/ansible/issues/18751 # Note no fact_connection config set, so this will fail with mock.patch('ansible.constants.CACHE_PLUGIN', 'json'): diff --git a/test/units/template/test_templar.py b/test/units/template/test_templar.py index 6747f76831..146da0b40f 100644 --- a/test/units/template/test_templar.py +++ b/test/units/template/test_templar.py @@ -27,6 +27,7 @@ from unittest.mock import patch from ansible import constants as C from ansible.errors import AnsibleError, AnsibleUndefinedVariable from ansible.module_utils.six import string_types +from ansible.plugins.loader import init_plugin_loader from ansible.template import Templar, AnsibleContext, AnsibleEnvironment, AnsibleUndefined from ansible.utils.unsafe_proxy import AnsibleUnsafe, wrap_var from units.mock.loader import DictDataLoader @@ -34,6 +35,7 @@ from units.mock.loader import DictDataLoader class BaseTemplar(object): def setUp(self): + init_plugin_loader() self.test_vars = dict( foo="bar", bam="{{foo}}", diff --git a/test/integration/targets/collections/testcoll2/MANIFEST.json b/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py index e69de29bb2..e69de29bb2 100644 --- a/test/integration/targets/collections/testcoll2/MANIFEST.json +++ b/test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll2/__init__.py diff --git a/test/units/utils/collection_loader/test_collection_loader.py b/test/units/utils/collection_loader/test_collection_loader.py index f7050dcd6d..a8a31a3b01 100644 --- a/test/units/utils/collection_loader/test_collection_loader.py +++ b/test/units/utils/collection_loader/test_collection_loader.py @@ -13,7 +13,7 @@ from ansible.modules import ping as ping_module from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef from ansible.utils.collection_loader._collection_finder import ( _AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader, - _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsiblePathHookFinder, + _AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsibleNSTraversable, _AnsiblePathHookFinder, _get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl ) from ansible.utils.collection_loader._collection_config import _EventSource @@ -828,6 +828,52 @@ def test_collectionref_components_invalid(name, subdirs, resource, ref_type, exp assert re.search(expected_error_expression, str(curerr.value)) +@pytest.mark.skipif(not PY3, reason='importlib.resources only supported for py3') +def test_importlib_resources(): + if sys.version_info < (3, 10): + from importlib_resources import files + else: + from importlib.resources import files + from pathlib import Path + + f = get_default_finder() + reset_collections_loader_state(f) + + ansible_collections_ns = files('ansible_collections') + ansible_ns = files('ansible_collections.ansible') + testns = files('ansible_collections.testns') + testcoll = files('ansible_collections.testns.testcoll') + testcoll2 = files('ansible_collections.testns.testcoll2') + module_utils = files('ansible_collections.testns.testcoll.plugins.module_utils') + + assert isinstance(ansible_collections_ns, _AnsibleNSTraversable) + assert isinstance(ansible_ns, _AnsibleNSTraversable) + assert isinstance(testcoll, Path) + assert isinstance(module_utils, Path) + + assert ansible_collections_ns.is_dir() + assert ansible_ns.is_dir() + assert testcoll.is_dir() + assert module_utils.is_dir() + + first_path = Path(default_test_collection_paths[0]) + second_path = Path(default_test_collection_paths[1]) + testns_paths = [] + ansible_ns_paths = [] + for path in default_test_collection_paths[:2]: + ansible_ns_paths.append(Path(path) / 'ansible_collections' / 'ansible') + testns_paths.append(Path(path) / 'ansible_collections' / 'testns') + + assert testns._paths == testns_paths + assert ansible_ns._paths == ansible_ns_paths + assert ansible_collections_ns._paths == [Path(p) / 'ansible_collections' for p in default_test_collection_paths[:2]] + assert testcoll2 == second_path / 'ansible_collections' / 'testns' / 'testcoll2' + + assert {p.name for p in module_utils.glob('*.py')} == {'__init__.py', 'my_other_util.py', 'my_util.py'} + nestcoll_mu_init = first_path / 'ansible_collections' / 'testns' / 'testcoll' / 'plugins' / 'module_utils' / '__init__.py' + assert next(module_utils.glob('__init__.py')) == nestcoll_mu_init + + # BEGIN TEST SUPPORT default_test_collection_paths = [ |