summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatt Clay <mclay@redhat.com>2021-09-20 18:39:07 -0700
committerGitHub <noreply@github.com>2021-09-20 18:39:07 -0700
commit4ea8d9a7824827cf3d4a206599ffd7fe3a09eafd (patch)
tree57b4ea491f6f93b6b938037e28dca90098754ca2
parent989eeb243fcf9236bd54d4df60c01f6db4e642a7 (diff)
downloadansible-4ea8d9a7824827cf3d4a206599ffd7fe3a09eafd.tar.gz
ansible-test - split controller/target testing (#75605)
-rw-r--r--MANIFEST.in2
l---------bin/ansible-test2
-rw-r--r--changelogs/fragments/ansible-test-split-controller-target.yaml38
-rw-r--r--docs/docsite/rst/dev_guide/testing/sanity/ansible-test-future-boilerplate.rst8
-rw-r--r--docs/docsite/rst/dev_guide/testing_sanity.rst2
-rw-r--r--lib/ansible/executor/module_common.py2
-rw-r--r--lib/ansible/plugins/callback/junit.py72
-rw-r--r--lib/ansible/utils/_junit_xml.py268
-rw-r--r--test/integration/targets/adhoc/aliases1
-rw-r--r--test/integration/targets/ansiballz_python/aliases1
-rw-r--r--test/integration/targets/ansible-doc/aliases1
-rw-r--r--test/integration/targets/ansible-galaxy-collection-scm/aliases3
-rw-r--r--test/integration/targets/ansible-galaxy-collection/aliases1
-rw-r--r--test/integration/targets/ansible-galaxy-role/aliases2
-rw-r--r--test/integration/targets/ansible-galaxy/aliases3
-rw-r--r--test/integration/targets/ansible-inventory/aliases1
-rw-r--r--test/integration/targets/ansible-pull/aliases2
-rw-r--r--test/integration/targets/ansible-runner/aliases3
-rw-r--r--test/integration/targets/ansible-test-cloud-acme/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-cs/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-foreman/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-galaxy/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester-windows/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-httptester/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-nios/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-openshift/aliases1
-rw-r--r--test/integration/targets/ansible-test-cloud-vcenter/aliases1
-rw-r--r--test/integration/targets/ansible-test-docker/aliases1
-rw-r--r--test/integration/targets/ansible-test/aliases2
-rw-r--r--test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases1
-rw-r--r--test/integration/targets/ansible-vault/aliases2
-rw-r--r--test/integration/targets/ansible-vault/single_vault_as_string.yml2
-rw-r--r--test/integration/targets/ansible/aliases2
-rw-r--r--test/integration/targets/any_errors_fatal/aliases1
-rw-r--r--test/integration/targets/args/aliases1
-rw-r--r--test/integration/targets/argspec/aliases1
-rw-r--r--test/integration/targets/assert/aliases2
-rw-r--r--test/integration/targets/async_extra_data/aliases1
-rw-r--r--test/integration/targets/become/aliases1
-rw-r--r--test/integration/targets/become_su/aliases2
-rw-r--r--test/integration/targets/become_unprivileged/aliases2
-rw-r--r--test/integration/targets/binary/aliases1
-rw-r--r--test/integration/targets/binary_modules_posix/aliases1
-rw-r--r--test/integration/targets/blocks/aliases1
-rw-r--r--test/integration/targets/builtin_vars_prompt/aliases1
-rw-r--r--test/integration/targets/callback_default/aliases1
-rw-r--r--test/integration/targets/changed_when/aliases1
-rw-r--r--test/integration/targets/check_mode/aliases1
-rw-r--r--test/integration/targets/cli/aliases1
-rwxr-xr-xtest/integration/targets/collections/runme.sh4
-rw-r--r--test/integration/targets/collections_plugin_namespace/aliases1
-rw-r--r--test/integration/targets/collections_runtime_pythonpath/aliases3
-rwxr-xr-xtest/integration/targets/collections_runtime_pythonpath/runme.sh6
-rw-r--r--test/integration/targets/command_nonexisting/aliases3
-rw-r--r--test/integration/targets/command_shell/tasks/main.yml4
-rw-r--r--test/integration/targets/common_network/aliases1
-rw-r--r--test/integration/targets/conditionals/aliases1
-rw-r--r--test/integration/targets/config/aliases1
-rw-r--r--test/integration/targets/connection_delegation/aliases1
-rw-r--r--test/integration/targets/connection_paramiko_ssh/aliases1
-rw-r--r--test/integration/targets/connection_ssh/aliases1
-rw-r--r--test/integration/targets/controller/aliases2
-rw-r--r--test/integration/targets/controller/tasks/main.yml9
-rw-r--r--test/integration/targets/dataloader/aliases1
-rw-r--r--test/integration/targets/debug/aliases1
-rw-r--r--test/integration/targets/delegate_to/aliases2
-rw-r--r--test/integration/targets/dict_transformations/aliases1
-rw-r--r--test/integration/targets/dnf/tasks/dnf.yml4
-rw-r--r--test/integration/targets/egg-info/aliases1
-rw-r--r--test/integration/targets/embedded_module/aliases1
-rw-r--r--test/integration/targets/environment/aliases1
-rw-r--r--test/integration/targets/error_from_connection/aliases1
-rw-r--r--test/integration/targets/facts_d/aliases1
-rw-r--r--test/integration/targets/facts_linux_network/aliases1
-rw-r--r--test/integration/targets/failed_when/aliases1
-rw-r--r--test/integration/targets/fetch/aliases1
-rw-r--r--test/integration/targets/fetch/hosts.yml8
-rwxr-xr-xtest/integration/targets/fetch/runme.sh6
-rw-r--r--test/integration/targets/file/tasks/main.yml5
-rw-r--r--test/integration/targets/filter_core/aliases2
-rw-r--r--test/integration/targets/filter_encryption/aliases3
-rw-r--r--test/integration/targets/filter_mathstuff/aliases2
-rw-r--r--test/integration/targets/filter_urls/aliases2
-rw-r--r--test/integration/targets/filter_urlsplit/aliases2
-rw-r--r--test/integration/targets/gathering/aliases1
-rw-r--r--test/integration/targets/gathering_facts/aliases1
-rw-r--r--test/integration/targets/groupby_filter/aliases1
-rw-r--r--test/integration/targets/handler_race/aliases3
-rw-r--r--test/integration/targets/handlers/aliases3
-rw-r--r--test/integration/targets/hardware_facts/aliases1
-rw-r--r--test/integration/targets/hash/aliases1
-rw-r--r--test/integration/targets/hosts_field/aliases1
-rw-r--r--test/integration/targets/ignore_errors/aliases1
-rw-r--r--test/integration/targets/ignore_unreachable/aliases1
-rw-r--r--test/integration/targets/import_tasks/aliases2
-rw-r--r--test/integration/targets/incidental_cloud_init_data_facts/aliases1
-rw-r--r--test/integration/targets/incidental_deploy_helper/aliases1
-rw-r--r--test/integration/targets/incidental_inventory_aws_ec2/aliases1
-rwxr-xr-xtest/integration/targets/incidental_inventory_aws_ec2/runme.sh4
-rw-r--r--test/integration/targets/incidental_inventory_docker_swarm/aliases3
-rw-r--r--test/integration/targets/incidental_inventory_foreman/aliases1
-rw-r--r--test/integration/targets/incidental_inventory_foreman/inspect_cache.yml4
-rwxr-xr-xtest/integration/targets/incidental_inventory_foreman/runme.sh4
-rw-r--r--test/integration/targets/incidental_mongodb_parameter/aliases1
-rw-r--r--test/integration/targets/include_import/aliases2
-rw-r--r--test/integration/targets/include_vars-ad-hoc/aliases1
-rw-r--r--test/integration/targets/include_when_parent_is_dynamic/aliases2
-rw-r--r--test/integration/targets/include_when_parent_is_static/aliases2
-rw-r--r--test/integration/targets/includes/aliases1
-rw-r--r--test/integration/targets/includes_race/aliases2
-rw-r--r--test/integration/targets/infra/aliases1
-rw-r--r--test/integration/targets/interpreter_discovery_python/aliases1
-rw-r--r--test/integration/targets/interpreter_discovery_python_delegate_facts/aliases1
-rw-r--r--test/integration/targets/inventory/aliases1
-rw-r--r--test/integration/targets/inventory_cache/aliases1
-rw-r--r--test/integration/targets/inventory_yaml/aliases1
-rw-r--r--test/integration/targets/jinja2_native_types/aliases1
-rw-r--r--test/integration/targets/jinja_plugins/aliases1
-rw-r--r--test/integration/targets/json_cleanup/aliases1
-rw-r--r--test/integration/targets/limit_inventory/aliases1
-rw-r--r--test/integration/targets/lookup_config/aliases2
-rw-r--r--test/integration/targets/lookup_csvfile/aliases1
-rw-r--r--test/integration/targets/lookup_dict/aliases2
-rw-r--r--test/integration/targets/lookup_env/aliases2
-rw-r--r--test/integration/targets/lookup_file/aliases2
-rw-r--r--test/integration/targets/lookup_first_found/aliases2
-rw-r--r--test/integration/targets/lookup_indexed_items/aliases2
-rw-r--r--test/integration/targets/lookup_ini/aliases1
-rw-r--r--test/integration/targets/lookup_inventory_hostnames/aliases1
-rw-r--r--test/integration/targets/lookup_items/aliases2
-rw-r--r--test/integration/targets/lookup_lines/aliases2
-rw-r--r--test/integration/targets/lookup_list/aliases2
-rw-r--r--test/integration/targets/lookup_nested/aliases2
-rw-r--r--test/integration/targets/lookup_password/aliases2
-rw-r--r--test/integration/targets/lookup_pipe/aliases2
-rw-r--r--test/integration/targets/lookup_random_choice/aliases2
-rw-r--r--test/integration/targets/lookup_sequence/aliases2
-rw-r--r--test/integration/targets/lookup_subelements/aliases2
-rw-r--r--test/integration/targets/lookup_template/aliases2
-rw-r--r--test/integration/targets/lookup_together/aliases2
-rw-r--r--test/integration/targets/lookup_unvault/aliases1
-rw-r--r--test/integration/targets/lookup_url/aliases2
-rw-r--r--test/integration/targets/lookup_varnames/aliases1
-rw-r--r--test/integration/targets/lookup_vars/aliases2
-rw-r--r--test/integration/targets/loop_control/aliases1
-rw-r--r--test/integration/targets/loops/aliases2
-rw-r--r--test/integration/targets/meta_tasks/aliases1
-rw-r--r--test/integration/targets/missing_required_lib/aliases1
-rw-r--r--test/integration/targets/module_defaults/aliases1
-rw-r--r--test/integration/targets/module_no_log/aliases2
-rw-r--r--test/integration/targets/module_precedence/aliases1
-rw-r--r--test/integration/targets/module_tracebacks/aliases2
-rw-r--r--test/integration/targets/module_utils/aliases1
-rw-r--r--test/integration/targets/module_utils/module_utils_test_no_log.yml3
-rw-r--r--test/integration/targets/module_utils/module_utils_vvvvv.yml3
-rw-r--r--test/integration/targets/module_utils_distro/aliases3
-rw-r--r--test/integration/targets/module_utils_facts.system.selinux/aliases1
-rw-r--r--test/integration/targets/no_log/aliases1
-rw-r--r--test/integration/targets/noexec/aliases1
-rw-r--r--test/integration/targets/old_style_cache_plugins/aliases1
-rw-r--r--test/integration/targets/old_style_modules_posix/aliases1
-rw-r--r--test/integration/targets/omit/aliases1
-rw-r--r--test/integration/targets/order/aliases1
-rw-r--r--test/integration/targets/parsing/aliases1
-rw-r--r--test/integration/targets/path_lookups/aliases1
-rw-r--r--test/integration/targets/path_with_comma_in_inventory/aliases1
-rw-r--r--test/integration/targets/pause/aliases2
-rw-r--r--test/integration/targets/pkg_resources/aliases1
-rw-r--r--test/integration/targets/play_iterator/aliases1
-rw-r--r--test/integration/targets/playbook/aliases1
-rw-r--r--test/integration/targets/plugin_config_for_inventory/aliases1
-rw-r--r--test/integration/targets/plugin_filtering/aliases1
-rw-r--r--test/integration/targets/plugin_loader/aliases1
-rw-r--r--test/integration/targets/plugin_namespace/aliases1
-rw-r--r--test/integration/targets/rel_plugin_loading/aliases1
-rw-r--r--test/integration/targets/remote_tmp/aliases2
-rw-r--r--test/integration/targets/remote_tmp/playbook.yml14
-rwxr-xr-xtest/integration/targets/remote_tmp/runme.sh2
-rw-r--r--test/integration/targets/retry_task_name_in_callback/aliases1
-rw-r--r--test/integration/targets/roles/aliases1
-rw-r--r--test/integration/targets/roles_arg_spec/aliases1
-rw-r--r--test/integration/targets/roles_var_inheritance/aliases1
-rw-r--r--test/integration/targets/run_modules/aliases1
-rw-r--r--test/integration/targets/set_fact/aliases2
-rw-r--r--test/integration/targets/set_stats/aliases1
-rwxr-xr-xtest/integration/targets/set_stats/runme.sh4
-rw-r--r--test/integration/targets/set_stats/test_aggregate.yml2
-rw-r--r--test/integration/targets/set_stats/test_simple.yml2
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml12
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-13-python-3.yml3
-rw-r--r--test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml (renamed from test/integration/targets/setup_paramiko/install-FreeBSD-13-python-2.yml)2
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-2.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-3.yml4
-rw-r--r--test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml4
-rw-r--r--test/integration/targets/special_vars/aliases1
-rw-r--r--test/integration/targets/special_vars_hosts/aliases1
-rw-r--r--test/integration/targets/split/aliases2
-rw-r--r--test/integration/targets/split/tasks/main.yml30
-rw-r--r--test/integration/targets/subversion/roles/subversion/defaults/main.yml5
-rwxr-xr-xtest/integration/targets/subversion/runme.sh13
-rw-r--r--test/integration/targets/subversion/runme.yml2
-rw-r--r--test/integration/targets/tags/aliases2
-rw-r--r--test/integration/targets/task_ordering/aliases1
-rw-r--r--test/integration/targets/tasks/aliases1
-rw-r--r--test/integration/targets/template/aliases2
-rw-r--r--test/integration/targets/template_jinja2_latest/aliases2
-rw-r--r--test/integration/targets/template_jinja2_non_native/aliases1
-rw-r--r--test/integration/targets/templating_lookups/aliases2
-rw-r--r--test/integration/targets/templating_settings/aliases1
-rw-r--r--test/integration/targets/test_core/aliases1
-rw-r--r--test/integration/targets/test_files/aliases1
-rw-r--r--test/integration/targets/test_mathstuff/aliases1
-rw-r--r--test/integration/targets/throttle/aliases1
-rw-r--r--test/integration/targets/unarchive/tasks/test_unprivileged_user.yml1
-rw-r--r--test/integration/targets/undefined/aliases1
-rw-r--r--test/integration/targets/unicode/aliases1
-rw-r--r--test/integration/targets/unsafe_writes/aliases2
-rw-r--r--test/integration/targets/unsafe_writes/basic.yml21
-rwxr-xr-xtest/integration/targets/unsafe_writes/runme.sh6
-rw-r--r--test/integration/targets/until/aliases1
-rw-r--r--test/integration/targets/unvault/aliases1
-rw-r--r--test/integration/targets/uri/meta/main.yml1
-rw-r--r--test/integration/targets/uri/tasks/main.yml17
-rw-r--r--test/integration/targets/var_blending/aliases1
-rw-r--r--test/integration/targets/var_precedence/aliases1
-rw-r--r--test/integration/targets/var_reserved/aliases1
-rw-r--r--test/integration/targets/var_templating/aliases1
-rw-r--r--test/integration/targets/want_json_modules_posix/aliases1
-rw-r--r--test/integration/targets/yaml_parsing/aliases1
-rw-r--r--test/lib/ansible_test/_data/completion/docker.txt27
-rw-r--r--test/lib/ansible_test/_data/completion/network.txt4
-rw-r--r--test/lib/ansible_test/_data/completion/remote.txt20
-rw-r--r--test/lib/ansible_test/_data/completion/windows.txt8
-rw-r--r--test/lib/ansible_test/_data/cryptography-constraints.txt3
-rw-r--r--test/lib/ansible_test/_data/inventory6
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml21
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml8
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml3
-rw-r--r--test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml3
-rw-r--r--test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml23
-rw-r--r--test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml12
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml11
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml21
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml3
-rw-r--r--test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml3
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible-test.txt6
-rw-r--r--test/lib/ansible_test/_data/requirements/ansible.txt (renamed from test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt)0
-rw-r--r--test/lib/ansible_test/_data/requirements/constraints.txt9
-rw-r--r--test/lib/ansible_test/_data/requirements/coverage.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt3
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt39
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt1
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/integration.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/network-integration.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt10
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.changelog.txt8
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.import.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 (renamed from test/lib/ansible_test/_data/requirements/sanity.ps1)1
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.pylint.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt2
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt7
-rw-r--r--test/lib/ansible_test/_data/requirements/sanity.yamllint.txt4
-rw-r--r--test/lib/ansible_test/_data/requirements/units.txt5
-rw-r--r--test/lib/ansible_test/_data/requirements/windows-integration.txt6
-rw-r--r--test/lib/ansible_test/_internal/__init__.py96
-rw-r--r--test/lib/ansible_test/_internal/ansible_util.py93
-rw-r--r--test/lib/ansible_test/_internal/become.py52
-rw-r--r--test/lib/ansible_test/_internal/bootstrap.py91
-rw-r--r--test/lib/ansible_test/_internal/cache.py33
-rw-r--r--test/lib/ansible_test/_internal/ci/__init__.py35
-rw-r--r--test/lib/ansible_test/_internal/ci/azp.py15
-rw-r--r--test/lib/ansible_test/_internal/ci/local.py16
-rw-r--r--test/lib/ansible_test/_internal/classification/__init__.py37
-rw-r--r--test/lib/ansible_test/_internal/classification/common.py29
-rw-r--r--test/lib/ansible_test/_internal/classification/csharp.py8
-rw-r--r--test/lib/ansible_test/_internal/classification/powershell.py8
-rw-r--r--test/lib/ansible_test/_internal/classification/python.py16
-rw-r--r--test/lib/ansible_test/_internal/cli.py1216
-rw-r--r--test/lib/ansible_test/_internal/cli/__init__.py55
-rw-r--r--test/lib/ansible_test/_internal/cli/actions.py90
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/__init__.py263
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/actions.py18
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py124
-rw-r--r--test/lib/ansible_test/_internal/cli/argparsing/parsers.py581
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/__init__.py240
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py85
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py28
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py49
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py76
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py49
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py65
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/combine.py48
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/erase.py36
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/html.py42
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/report.py60
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/coverage/xml.py42
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/env.py63
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/__init__.py161
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/network.py81
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/posix.py50
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/integration/windows.py50
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/sanity.py113
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/shell.py47
-rw-r--r--test/lib/ansible_test/_internal/cli/commands/units.py65
-rw-r--r--test/lib/ansible_test/_internal/cli/compat.py478
-rw-r--r--test/lib/ansible_test/_internal/cli/completers.py26
-rw-r--r--test/lib/ansible_test/_internal/cli/converters.py20
-rw-r--r--test/lib/ansible_test/_internal/cli/environments.py567
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/__init__.py303
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py73
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/helpers.py59
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py310
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py213
-rw-r--r--test/lib/ansible_test/_internal/cli/parsers/value_parsers.py172
-rw-r--r--test/lib/ansible_test/_internal/commands/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/__init__.py57
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py19
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py21
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py21
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py27
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py21
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/combine.py40
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/erase.py16
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/html.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/report.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/coverage/xml.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/env/__init__.py127
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/__init__.py793
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py51
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/acme.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/aws.py24
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/azure.py30
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/cs.py31
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py17
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py13
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/nios.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py10
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py14
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py18
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py12
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/coverage.py416
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/filters.py273
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/network.py201
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/posix.py31
-rw-r--r--test/lib/ansible_test/_internal/commands/integration/windows.py271
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/__init__.py391
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py36
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py19
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/compile.py33
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/ignores.py13
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/import.py146
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py56
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pep8.py26
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pslint.py24
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/pylint.py44
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py16
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/shellcheck.py18
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/validate_modules.py32
-rw-r--r--test/lib/ansible_test/_internal/commands/sanity/yamllint.py53
-rw-r--r--test/lib/ansible_test/_internal/commands/shell/__init__.py87
-rw-r--r--test/lib/ansible_test/_internal/commands/units/__init__.py136
-rw-r--r--test/lib/ansible_test/_internal/compat/__init__.py2
-rw-r--r--test/lib/ansible_test/_internal/compat/packaging.py16
-rw-r--r--test/lib/ansible_test/_internal/compat/yaml.py3
-rw-r--r--test/lib/ansible_test/_internal/completion.py226
-rw-r--r--test/lib/ansible_test/_internal/config.py263
-rw-r--r--test/lib/ansible_test/_internal/connections.py243
l---------[-rw-r--r--]test/lib/ansible_test/_internal/constants.py11
-rw-r--r--test/lib/ansible_test/_internal/containers.py156
-rw-r--r--test/lib/ansible_test/_internal/content_config.py24
-rw-r--r--test/lib/ansible_test/_internal/core_ci.py125
-rw-r--r--test/lib/ansible_test/_internal/coverage_util.py117
-rw-r--r--test/lib/ansible_test/_internal/data.py70
-rw-r--r--test/lib/ansible_test/_internal/delegation.py625
-rw-r--r--test/lib/ansible_test/_internal/diff.py6
-rw-r--r--test/lib/ansible_test/_internal/docker_util.py128
-rw-r--r--test/lib/ansible_test/_internal/encoding.py5
-rw-r--r--test/lib/ansible_test/_internal/executor.py643
-rw-r--r--test/lib/ansible_test/_internal/git.py6
-rw-r--r--test/lib/ansible_test/_internal/host_configs.py491
-rw-r--r--test/lib/ansible_test/_internal/host_profiles.py755
-rw-r--r--test/lib/ansible_test/_internal/http.py26
-rw-r--r--test/lib/ansible_test/_internal/init.py3
-rw-r--r--test/lib/ansible_test/_internal/inventory.py170
-rw-r--r--test/lib/ansible_test/_internal/io.py13
l---------test/lib/ansible_test/_internal/junit_xml.py1
-rw-r--r--test/lib/ansible_test/_internal/manage_ci.py436
-rw-r--r--test/lib/ansible_test/_internal/metadata.py9
-rw-r--r--test/lib/ansible_test/_internal/payload.py8
-rw-r--r--test/lib/ansible_test/_internal/provider/__init__.py11
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/__init__.py8
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/ansible.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/layout/collection.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/__init__.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/git.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/installed.py6
-rw-r--r--test/lib/ansible_test/_internal/provider/source/unversioned.py6
-rw-r--r--test/lib/ansible_test/_internal/provisioning.py184
-rw-r--r--test/lib/ansible_test/_internal/pypi_proxy.py178
-rw-r--r--test/lib/ansible_test/_internal/python_requirements.py482
-rw-r--r--test/lib/ansible_test/_internal/ssh.py41
-rw-r--r--test/lib/ansible_test/_internal/target.py115
-rw-r--r--test/lib/ansible_test/_internal/test.py133
-rw-r--r--test/lib/ansible_test/_internal/thread.py32
-rw-r--r--test/lib/ansible_test/_internal/timeout.py93
-rw-r--r--test/lib/ansible_test/_internal/types.py32
-rw-r--r--test/lib/ansible_test/_internal/util.py271
-rw-r--r--test/lib/ansible_test/_internal/util_common.py328
-rw-r--r--test/lib/ansible_test/_internal/venv.py109
-rw-r--r--test/lib/ansible_test/_util/__init__.py3
-rwxr-xr-xtest/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py28
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py5
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py5
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py5
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps11
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg (renamed from test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg)13
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg6
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg55
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py7
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py1
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py2
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps11
-rw-r--r--test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py18
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/sslcheck.py1
-rwxr-xr-xtest/lib/ansible_test/_util/controller/tools/versions.py20
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py1
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/controller/tools/yamlcheck.py3
-rw-r--r--test/lib/ansible_test/_util/target/__init__.py3
-rwxr-xr-xtest/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py42
-rw-r--r--test/lib/ansible_test/_util/target/common/__init__.py3
-rw-r--r--test/lib/ansible_test/_util/target/common/constants.py45
-rw-r--r--[-rwxr-xr-x]test/lib/ansible_test/_util/target/sanity/compile/compile.py2
-rw-r--r--test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py1
-rw-r--r--test/lib/ansible_test/_util/target/setup/bootstrap.sh323
-rw-r--r--test/lib/ansible_test/_util/target/setup/docker.sh13
-rw-r--r--test/lib/ansible_test/_util/target/setup/quiet_pip.py (renamed from test/lib/ansible_test/_util/controller/tools/quiet_pip.py)1
-rw-r--r--test/lib/ansible_test/_util/target/setup/remote.sh185
-rw-r--r--test/lib/ansible_test/_util/target/setup/requirements.py252
-rw-r--r--test/lib/ansible_test/_util/target/setup/ssh-keys.sh35
-rw-r--r--test/sanity/code-smell/ansible-requirements.json2
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/ansible-requirements.py3
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.json9
-rw-r--r--test/sanity/code-smell/ansible-test-future-boilerplate.py44
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/botmeta.py1
-rw-r--r--test/sanity/code-smell/botmeta.requirements.txt2
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/configure-remoting-ps1.py1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/deprecated-config.py1
-rw-r--r--test/sanity/code-smell/deprecated-config.requirements.txt7
-rw-r--r--test/sanity/code-smell/docs-build.json1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/docs-build.py1
-rw-r--r--test/sanity/code-smell/docs-build.requirements.txt56
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/no-unwanted-files.py1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/obsolete-files.py1
-rw-r--r--test/sanity/code-smell/package-data.json1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/package-data.py6
-rw-r--r--test/sanity/code-smell/package-data.requirements.txt20
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/release-names.py1
-rw-r--r--test/sanity/code-smell/release-names.requirements.txt2
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/required-and-default-attributes.py1
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/rstcheck.py1
-rw-r--r--test/sanity/code-smell/rstcheck.requirements.txt29
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/test-constraints.py15
-rw-r--r--[-rwxr-xr-x]test/sanity/code-smell/update-bundled.py1
-rw-r--r--test/sanity/code-smell/update-bundled.requirements.txt5
-rw-r--r--test/sanity/ignore.txt3
-rw-r--r--test/units/requirements.txt7
-rwxr-xr-xtest/utils/shippable/incidental/network.sh2
-rwxr-xr-xtest/utils/shippable/incidental/windows.sh4
-rwxr-xr-xtest/utils/shippable/sanity.sh2
-rwxr-xr-xtest/utils/shippable/windows.sh4
517 files changed, 12977 insertions, 6683 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index 9a78e75e13..5fbecb4597 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -25,7 +25,7 @@ recursive-include packaging *
recursive-include test/ansible_test *.py Makefile
recursive-include test/integration *
recursive-include test/lib/ansible_test/config *.yml *.template
-recursive-include test/lib/ansible_test/_data *.cfg *.ini *.ps1 *.txt *.yml coveragerc inventory
+recursive-include test/lib/ansible_test/_data *.cfg *.ini *.ps1 *.txt *.yml coveragerc
recursive-include test/lib/ansible_test/_util *.cfg *.json *.ps1 *.psd1 *.py *.sh *.txt *.yml
recursive-include test/lib/ansible_test/_util/target/injector ansible ansible-config ansible-connection ansible-console ansible-doc ansible-galaxy ansible-inventory ansible-playbook ansible-pull ansible-test ansible-vault pytest
recursive-include test/lib/ansible_test/_util/controller/sanity/validate-modules validate-modules
diff --git a/bin/ansible-test b/bin/ansible-test
index a540045f64..1f5fb214ef 120000
--- a/bin/ansible-test
+++ b/bin/ansible-test
@@ -1 +1 @@
-../test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py \ No newline at end of file
+../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py \ No newline at end of file
diff --git a/changelogs/fragments/ansible-test-split-controller-target.yaml b/changelogs/fragments/ansible-test-split-controller-target.yaml
new file mode 100644
index 0000000000..0c5ac9f142
--- /dev/null
+++ b/changelogs/fragments/ansible-test-split-controller-target.yaml
@@ -0,0 +1,38 @@
+breaking_changes:
+ - ansible-test - Automatic installation of requirements for "cloud" test plugins no longer occurs. The affected test plugins are
+ ``aws``, ``azure``, ``cs``, ``hcloud``, ``nios``, ``opennebula``, ``openshift`` and ``vcenter``. Collections should instead use one of the
+ supported integration test requirements files, such as the ``tests/integration/requirements.txt`` file.
+major_changes:
+ - ansible-test - Python 3.8 - 3.10 are now required to run ``ansible-test``, thus matching the Ansible controller Python requirements.
+ Older Python versions (2.6 - 2.7 and 3.5 - 3.10) can still be the target for relevant tests.
+ - ansible-test - New ``--controller`` and ``--target`` / ``--target-python`` options have been added to allow more control over test environments.
+ - ansible-test - Integration tests run with the ``integration`` command can now be executed on two separate hosts instead of always running on the controller.
+ The target host can be one provided by ``ansible-test`` or by the user, as long as it is accessible using SSH.
+ - ansible-test - Collections can now specify controller and target specific integration test requirements and constraints.
+ If provided, they take precedence over the previously available requirements and constraints files.
+ - ansible-test - Sanity tests always run in isolated Python virtual environments specific to the requirements of each test. The environments are cached.
+ - ansible-test - Sanity tests now use fully pinned requirements that are independent of each other and other test types.
+ - ansible-test - Sanity tests are now separated into two categories, controller and target. All tests except ``import`` and ``compile`` are controller tests.
+ The controller tests always run using the same Python version used to run ``ansible-test``.
+ The target tests use the Python version(s) specified by the user, or all available Python versions.
+ - junit callback - The ``junit_xml`` and ``ordereddict`` Python modules are no longer required to use the ``junit`` callback plugin.
+minor_changes:
+ - ansible-test - Using an unknown ``--docker`` or ``--remote`` environment now requires specifying a Python version.
+ - ansible-test - The ``--docker-keep-git`` option (used only for testing ansible-core) has been renamed to ``--keep-git``.
+ - ansible-test - A new ``base`` test container is available.
+ It is similar to the ``default`` test container, but contains no pre-installed Python packages other than ``pip`` and its dependencies.
+ - ansible-test - Default settings are now applied to unknown versions of known ``--remote`` platforms.
+ - ansible-test - Constraints provided by ``ansible-test`` for Python package installs have been reduced.
+ - ansible-test - Command line help has been updated to hide the ``--remote`` option (and related options) when the user lacks an API key to use the feature.
+ - ansible-test - The ``--python`` option can be used without another delegation option such as the ``--venv`` or ``--docker`` options.
+ - ansible-test - Environment checking (``pip``, ``python``, ``~/.ssh/known_hosts``, etc.) is no longer performed when running integration tests.
+ - ansible-test - Most scripts used internally by ``ansible-test`` no longer have a shebang or the executable bit set.
+bugfixes:
+ - ansible-test - Tab completion after options like ``--docker`` which accept an optional argument will no longer provide incorrect completions.
+ - ansible-test - The ``--python`` and ``--venv`` options are no longer ignored by some commands, such as ``coverage``.
+known_issues:
+ - ansible-test - Tab completion anywhere other than the end of the command with the new composite options will provide incorrect results.
+ See https://github.com/kislyuk/argcomplete/issues/351 for additional details.
+deprecated_features:
+ - ansible-test - The ``--docker-no-pull`` option is deprecated and has no effect.
+ - ansible-test - The ``--no-pip-check`` option is deprecated and has no effect.
diff --git a/docs/docsite/rst/dev_guide/testing/sanity/ansible-test-future-boilerplate.rst b/docs/docsite/rst/dev_guide/testing/sanity/ansible-test-future-boilerplate.rst
new file mode 100644
index 0000000000..43dfe32465
--- /dev/null
+++ b/docs/docsite/rst/dev_guide/testing/sanity/ansible-test-future-boilerplate.rst
@@ -0,0 +1,8 @@
+ansible-test-future-boilerplate
+===============================
+
+The ``_internal`` code for ``ansible-test`` requires the following ``__future__`` import:
+
+.. code-block:: python
+
+ from __future__ import annotations
diff --git a/docs/docsite/rst/dev_guide/testing_sanity.rst b/docs/docsite/rst/dev_guide/testing_sanity.rst
index a4f99eddec..a97dec3f22 100644
--- a/docs/docsite/rst/dev_guide/testing_sanity.rst
+++ b/docs/docsite/rst/dev_guide/testing_sanity.rst
@@ -24,7 +24,7 @@ How to run
.. note::
When using docker and the ``--base-branch`` argument,
- also use the ``--docker-keep-git`` argument to avoid git related errors.
+ also use the ``--keep-git`` argument to avoid git related errors.
.. code:: shell
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index cf69c2daff..c0760fa3f2 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -347,7 +347,7 @@ if __name__ == '__main__':
'''
ANSIBALLZ_COVERAGE_TEMPLATE = '''
- os.environ['COVERAGE_FILE'] = '%(coverage_output)s'
+ os.environ['COVERAGE_FILE'] = '%(coverage_output)s=python-%%s=coverage' %% '.'.join(str(v) for v in sys.version_info[:2])
import atexit
diff --git a/lib/ansible/plugins/callback/junit.py b/lib/ansible/plugins/callback/junit.py
index 8bd1ed6ad2..73284a33ae 100644
--- a/lib/ansible/plugins/callback/junit.py
+++ b/lib/ansible/plugins/callback/junit.py
@@ -73,8 +73,7 @@ DOCUMENTATION = '''
env:
- name: JUNIT_TEST_CASE_PREFIX
requirements:
- - whitelist in configuration
- - junit_xml (python lib)
+ - enable in configuration
'''
import os
@@ -84,31 +83,13 @@ import re
from ansible import constants as C
from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.callback import CallbackBase
-
-try:
- from junit_xml import TestSuite, TestCase
-
- # the junit_xml API is changing in version 2.0.0
- # TestSuite.to_xml_string is being replaced with to_xml_report_string
- # see: https://github.com/kyrus/python-junit-xml/blob/63db26da353790500642fd02cae1543eb41aab8b/junit_xml/__init__.py#L249-L261
- try:
- from junit_xml import to_xml_report_string
- except ImportError:
- to_xml_report_string = TestSuite.to_xml_string
-
- HAS_JUNIT_XML = True
-except ImportError:
- HAS_JUNIT_XML = False
-
-try:
- from collections import OrderedDict
- HAS_ORDERED_DICT = True
-except ImportError:
- try:
- from ordereddict import OrderedDict
- HAS_ORDERED_DICT = True
- except ImportError:
- HAS_ORDERED_DICT = False
+from ansible.utils._junit_xml import (
+ TestCase,
+ TestError,
+ TestFailure,
+ TestSuite,
+ TestSuites,
+)
class CallbackModule(CallbackBase):
@@ -142,10 +123,6 @@ class CallbackModule(CallbackBase):
JUNIT_TEST_CASE_PREFIX (optional): Consider a task only as test case if it has this value as prefix. Additionaly failing tasks are recorded as failed
test cases.
Default: <empty>
-
- Requires:
- junit_xml
-
"""
CALLBACK_VERSION = 2.0
@@ -171,17 +148,7 @@ class CallbackModule(CallbackBase):
self.disabled = False
- if not HAS_JUNIT_XML:
- self.disabled = True
- self._display.warning('The `junit_xml` python module is not installed. '
- 'Disabling the `junit` callback plugin.')
-
- if HAS_ORDERED_DICT:
- self._task_data = OrderedDict()
- else:
- self.disabled = True
- self._display.warning('The `ordereddict` python module is not installed. '
- 'Disabling the `junit` callback plugin.')
+ self._task_data = {}
if not os.path.exists(self._output_dir):
os.makedirs(self._output_dir)
@@ -250,7 +217,7 @@ class CallbackModule(CallbackBase):
junit_classname = re.sub(r'\.yml:[0-9]+$', '', junit_classname)
if host_data.status == 'included':
- return TestCase(name, junit_classname, duration, host_data.result)
+ return TestCase(name=name, classname=junit_classname, time=duration, system_out=str(host_data.result))
res = host_data.result._result
rc = res.get('rc', 0)
@@ -258,26 +225,26 @@ class CallbackModule(CallbackBase):
dump = self._cleanse_string(dump)
if host_data.status == 'ok':
- return TestCase(name, junit_classname, duration, dump)
+ return TestCase(name=name, classname=junit_classname, time=duration, system_out=dump)
- test_case = TestCase(name, junit_classname, duration)
+ test_case = TestCase(name=name, classname=junit_classname, time=duration)
if host_data.status == 'failed':
if 'exception' in res:
message = res['exception'].strip().split('\n')[-1]
output = res['exception']
- test_case.add_error_info(message, output)
+ test_case.errors.append(TestError(message=message, output=output))
elif 'msg' in res:
message = res['msg']
- test_case.add_failure_info(message, dump)
+ test_case.failures.append(TestFailure(message=message, output=dump))
else:
- test_case.add_failure_info('rc=%s' % rc, dump)
+ test_case.failures.append(TestFailure(message='rc=%s' % rc, output=dump))
elif host_data.status == 'skipped':
if 'skip_reason' in res:
message = res['skip_reason']
else:
message = 'skipped'
- test_case.add_skipped_info(message)
+ test_case.skipped = message
return test_case
@@ -297,8 +264,9 @@ class CallbackModule(CallbackBase):
for host_uuid, host_data in task_data.host_data.items():
test_cases.append(self._build_test_case(task_data, host_data))
- test_suite = TestSuite(self._playbook_name, test_cases)
- report = to_xml_report_string([test_suite])
+ test_suite = TestSuite(name=self._playbook_name, cases=test_cases)
+ test_suites = TestSuites(suites=[test_suite])
+ report = test_suites.to_pretty_xml()
output_file = os.path.join(self._output_dir, '%s-%s.xml' % (self._playbook_name, time.time()))
@@ -354,7 +322,7 @@ class TaskData:
self.path = path
self.play = play
self.start = None
- self.host_data = OrderedDict()
+ self.host_data = {}
self.start = time.time()
self.action = action
diff --git a/lib/ansible/utils/_junit_xml.py b/lib/ansible/utils/_junit_xml.py
new file mode 100644
index 0000000000..a21fed4cbf
--- /dev/null
+++ b/lib/ansible/utils/_junit_xml.py
@@ -0,0 +1,268 @@
+"""
+Dataclasses for creating JUnit XML files.
+See: https://github.com/junit-team/junit5/blob/main/platform-tests/src/test/resources/jenkins-junit.xsd
+"""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import datetime
+import decimal
+import typing as t
+
+from xml.dom import minidom
+# noinspection PyPep8Naming
+from xml.etree import ElementTree as ET
+
+
+@dataclasses.dataclass
+class TestResult(metaclass=abc.ABCMeta):
+ """Base class for the result of a test case."""
+ output: t.Optional[str] = None
+ message: t.Optional[str] = None
+ type: t.Optional[str] = None
+
+ def __post_init__(self):
+ if self.type is None:
+ self.type = self.tag
+
+ @property
+ @abc.abstractmethod
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+
+ def get_attributes(self) -> t.Dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ message=self.message,
+ type=self.type,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element(self.tag, self.get_attributes())
+ element.text = self.output
+
+ return element
+
+
+@dataclasses.dataclass
+class TestFailure(TestResult):
+ """Failure info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'failure'
+
+
+@dataclasses.dataclass
+class TestError(TestResult):
+ """Error info for a test case."""
+ @property
+ def tag(self) -> str:
+ """Tag name for the XML element created by this result type."""
+ return 'error'
+
+
+@dataclasses.dataclass
+class TestCase:
+ """An individual test case."""
+ name: str
+ assertions: t.Optional[int] = None
+ classname: t.Optional[str] = None
+ status: t.Optional[str] = None
+ time: t.Optional[decimal.Decimal] = None
+
+ errors: t.List[TestError] = dataclasses.field(default_factory=list)
+ failures: t.List[TestFailure] = dataclasses.field(default_factory=list)
+ skipped: t.Optional[str] = None
+ system_out: t.Optional[str] = None
+ system_err: t.Optional[str] = None
+
+ is_disabled: bool = False
+
+ @property
+ def is_failure(self) -> bool:
+ """True if the test case contains failure info."""
+ return bool(self.failures)
+
+ @property
+ def is_error(self) -> bool:
+ """True if the test case contains error info."""
+ return bool(self.errors)
+
+ @property
+ def is_skipped(self) -> bool:
+ """True if the test case was skipped."""
+ return bool(self.skipped)
+
+ def get_attributes(self) -> t.Dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ assertions=self.assertions,
+ classname=self.classname,
+ name=self.name,
+ status=self.status,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testcase', self.get_attributes())
+
+ if self.skipped:
+ ET.SubElement(element, 'skipped').text = self.skipped
+
+ element.extend([error.get_xml_element() for error in self.errors])
+ element.extend([failure.get_xml_element() for failure in self.failures])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuite:
+ """A collection of test cases."""
+ name: str
+ hostname: t.Optional[str] = None
+ id: t.Optional[str] = None
+ package: t.Optional[str] = None
+ timestamp: t.Optional[datetime.datetime] = None
+
+ properties: t.Dict[str, str] = dataclasses.field(default_factory=dict)
+ cases: t.List[TestCase] = dataclasses.field(default_factory=list)
+ system_out: t.Optional[str] = None
+ system_err: t.Optional[str] = None
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(case.is_disabled for case in self.cases)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(case.is_error for case in self.cases)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(case.is_failure for case in self.cases)
+
+ @property
+ def skipped(self) -> int:
+ """The number of test cases containing skipped info."""
+ return sum(case.is_skipped for case in self.cases)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return len(self.cases)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return sum(case.time for case in self.cases if case.time)
+
+ def get_attributes(self) -> t.Dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ hostname=self.hostname,
+ id=self.id,
+ name=self.name,
+ package=self.package,
+ skipped=self.skipped,
+ tests=self.tests,
+ time=self.time,
+ timestamp=self.timestamp.isoformat(timespec='seconds') if self.timestamp else None,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuite', self.get_attributes())
+
+ if self.properties:
+ ET.SubElement(element, 'properties').extend([ET.Element('property', dict(name=name, value=value)) for name, value in self.properties.items()])
+
+ element.extend([test_case.get_xml_element() for test_case in self.cases])
+
+ if self.system_out:
+ ET.SubElement(element, 'system-out').text = self.system_out
+
+ if self.system_err:
+ ET.SubElement(element, 'system-err').text = self.system_err
+
+ return element
+
+
+@dataclasses.dataclass
+class TestSuites:
+ """A collection of test suites."""
+ name: t.Optional[str] = None
+
+ suites: t.List[TestSuite] = dataclasses.field(default_factory=list)
+
+ @property
+ def disabled(self) -> int:
+ """The number of disabled test cases."""
+ return sum(suite.disabled for suite in self.suites)
+
+ @property
+ def errors(self) -> int:
+ """The number of test cases containing error info."""
+ return sum(suite.errors for suite in self.suites)
+
+ @property
+ def failures(self) -> int:
+ """The number of test cases containing failure info."""
+ return sum(suite.failures for suite in self.suites)
+
+ @property
+ def tests(self) -> int:
+ """The number of test cases."""
+ return sum(suite.tests for suite in self.suites)
+
+ @property
+ def time(self) -> decimal.Decimal:
+ """The total time from all test cases."""
+ return sum(suite.time for suite in self.suites)
+
+ def get_attributes(self) -> t.Dict[str, str]:
+ """Return a dictionary of attributes for this instance."""
+ return _attributes(
+ disabled=self.disabled,
+ errors=self.errors,
+ failures=self.failures,
+ name=self.name,
+ tests=self.tests,
+ time=self.time,
+ )
+
+ def get_xml_element(self) -> ET.Element:
+ """Return an XML element representing this instance."""
+ element = ET.Element('testsuites', self.get_attributes())
+ element.extend([suite.get_xml_element() for suite in self.suites])
+
+ return element
+
+ def to_pretty_xml(self) -> str:
+ """Return a pretty formatted XML string representing this instance."""
+ return _pretty_xml(self.get_xml_element())
+
+
+def _attributes(**kwargs) -> t.Dict[str, str]:
+ """Return the given kwargs as a dictionary with values converted to strings. Items with a value of None will be omitted."""
+ return {key: str(value) for key, value in kwargs.items() if value is not None}
+
+
+def _pretty_xml(element: ET.Element) -> str:
+ """Return a pretty formatted XML string representing the given element."""
+ return minidom.parseString(ET.tostring(element, encoding='unicode')).toprettyxml()
diff --git a/test/integration/targets/adhoc/aliases b/test/integration/targets/adhoc/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/adhoc/aliases
+++ b/test/integration/targets/adhoc/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/ansiballz_python/aliases b/test/integration/targets/ansiballz_python/aliases
index f8e28c7e46..e2c8fd3956 100644
--- a/test/integration/targets/ansiballz_python/aliases
+++ b/test/integration/targets/ansiballz_python/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
skip/aix
+context/target
diff --git a/test/integration/targets/ansible-doc/aliases b/test/integration/targets/ansible-doc/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/ansible-doc/aliases
+++ b/test/integration/targets/ansible-doc/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/ansible-galaxy-collection-scm/aliases b/test/integration/targets/ansible-galaxy-collection-scm/aliases
index 9c34b36064..498fedd558 100644
--- a/test/integration/targets/ansible-galaxy-collection-scm/aliases
+++ b/test/integration/targets/ansible-galaxy-collection-scm/aliases
@@ -1,3 +1,2 @@
shippable/posix/group4
-skip/aix
-skip/python2.6 # ansible-galaxy uses tarfile with features not available until 2.7
+context/controller
diff --git a/test/integration/targets/ansible-galaxy-collection/aliases b/test/integration/targets/ansible-galaxy-collection/aliases
index e501bce588..6c57208aec 100644
--- a/test/integration/targets/ansible-galaxy-collection/aliases
+++ b/test/integration/targets/ansible-galaxy-collection/aliases
@@ -1,3 +1,4 @@
shippable/galaxy/group1
shippable/galaxy/smoketest
cloud/galaxy
+context/controller
diff --git a/test/integration/targets/ansible-galaxy-role/aliases b/test/integration/targets/ansible-galaxy-role/aliases
index 62548acd35..498fedd558 100644
--- a/test/integration/targets/ansible-galaxy-role/aliases
+++ b/test/integration/targets/ansible-galaxy-role/aliases
@@ -1,2 +1,2 @@
shippable/posix/group4
-skip/python2.6 # build uses tarfile with features not available until 2.7
+context/controller
diff --git a/test/integration/targets/ansible-galaxy/aliases b/test/integration/targets/ansible-galaxy/aliases
index 48ed7d608f..275bdbfd49 100644
--- a/test/integration/targets/ansible-galaxy/aliases
+++ b/test/integration/targets/ansible-galaxy/aliases
@@ -1,4 +1,3 @@
destructive
shippable/posix/group4
-skip/python2.6 # build uses tarfile with features not available until 2.7
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-inventory/aliases b/test/integration/targets/ansible-inventory/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/ansible-inventory/aliases
+++ b/test/integration/targets/ansible-inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/ansible-pull/aliases b/test/integration/targets/ansible-pull/aliases
index 757c99661d..8278ec8bcc 100644
--- a/test/integration/targets/ansible-pull/aliases
+++ b/test/integration/targets/ansible-pull/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-runner/aliases b/test/integration/targets/ansible-runner/aliases
index 42d2022b81..17ae2d5eeb 100644
--- a/test/integration/targets/ansible-runner/aliases
+++ b/test/integration/targets/ansible-runner/aliases
@@ -1,6 +1,5 @@
shippable/posix/group3
-skip/python2 # ansible-runner is for controller and deprecated python2 support
-skip/aix
+context/controller
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/ansible-test-cloud-acme/aliases b/test/integration/targets/ansible-test-cloud-acme/aliases
index 4379096ebe..db3ab68041 100644
--- a/test/integration/targets/ansible-test-cloud-acme/aliases
+++ b/test/integration/targets/ansible-test-cloud-acme/aliases
@@ -1,2 +1,3 @@
cloud/acme
shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-cs/aliases b/test/integration/targets/ansible-test-cloud-cs/aliases
index fb8b22c2cc..cf43ff1efa 100644
--- a/test/integration/targets/ansible-test-cloud-cs/aliases
+++ b/test/integration/targets/ansible-test-cloud-cs/aliases
@@ -1,2 +1,3 @@
cloud/cs
shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-foreman/aliases b/test/integration/targets/ansible-test-cloud-foreman/aliases
index b713713bac..a4bdcea66e 100644
--- a/test/integration/targets/ansible-test-cloud-foreman/aliases
+++ b/test/integration/targets/ansible-test-cloud-foreman/aliases
@@ -1,2 +1,3 @@
cloud/foreman
shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-galaxy/aliases b/test/integration/targets/ansible-test-cloud-galaxy/aliases
index e501bce588..6c57208aec 100644
--- a/test/integration/targets/ansible-test-cloud-galaxy/aliases
+++ b/test/integration/targets/ansible-test-cloud-galaxy/aliases
@@ -1,3 +1,4 @@
shippable/galaxy/group1
shippable/galaxy/smoketest
cloud/galaxy
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-httptester-windows/aliases b/test/integration/targets/ansible-test-cloud-httptester-windows/aliases
index 761914ce54..f45a162363 100644
--- a/test/integration/targets/ansible-test-cloud-httptester-windows/aliases
+++ b/test/integration/targets/ansible-test-cloud-httptester-windows/aliases
@@ -1,3 +1,4 @@
cloud/httptester
windows
shippable/windows/group1
+context/target
diff --git a/test/integration/targets/ansible-test-cloud-httptester/aliases b/test/integration/targets/ansible-test-cloud-httptester/aliases
index a918da9d56..eb5f70805a 100644
--- a/test/integration/targets/ansible-test-cloud-httptester/aliases
+++ b/test/integration/targets/ansible-test-cloud-httptester/aliases
@@ -1,2 +1,3 @@
needs/httptester # using legacy alias for testing purposes
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-nios/aliases b/test/integration/targets/ansible-test-cloud-nios/aliases
index 82851da593..136344a979 100644
--- a/test/integration/targets/ansible-test-cloud-nios/aliases
+++ b/test/integration/targets/ansible-test-cloud-nios/aliases
@@ -1,2 +1,3 @@
cloud/nios
shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-openshift/aliases b/test/integration/targets/ansible-test-cloud-openshift/aliases
index efe41a903f..6e32db7b8f 100644
--- a/test/integration/targets/ansible-test-cloud-openshift/aliases
+++ b/test/integration/targets/ansible-test-cloud-openshift/aliases
@@ -1,3 +1,4 @@
cloud/openshift
shippable/generic/group1
disabled # disabled due to requirements conflict: botocore 1.20.6 has requirement urllib3<1.27,>=1.25.4, but you have urllib3 1.24.3.
+context/controller
diff --git a/test/integration/targets/ansible-test-cloud-vcenter/aliases b/test/integration/targets/ansible-test-cloud-vcenter/aliases
index 97d5a97a7f..0cd8ad209e 100644
--- a/test/integration/targets/ansible-test-cloud-vcenter/aliases
+++ b/test/integration/targets/ansible-test-cloud-vcenter/aliases
@@ -1,2 +1,3 @@
cloud/vcenter
shippable/generic/group1
+context/controller
diff --git a/test/integration/targets/ansible-test-docker/aliases b/test/integration/targets/ansible-test-docker/aliases
index d1284cf706..a862ab8b36 100644
--- a/test/integration/targets/ansible-test-docker/aliases
+++ b/test/integration/targets/ansible-test-docker/aliases
@@ -1 +1,2 @@
shippable/generic/group1 # Runs in the default test container so access to tools like pwsh
+context/controller
diff --git a/test/integration/targets/ansible-test/aliases b/test/integration/targets/ansible-test/aliases
index f8e28c7e46..13e01f0c94 100644
--- a/test/integration/targets/ansible-test/aliases
+++ b/test/integration/targets/ansible-test/aliases
@@ -1,2 +1,2 @@
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases b/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases
new file mode 100644
index 0000000000..1af1cf90b6
--- /dev/null
+++ b/test/integration/targets/ansible-test/ansible_collections/ns/col_constraints/tests/integration/targets/constraints/aliases
@@ -0,0 +1 @@
+context/controller
diff --git a/test/integration/targets/ansible-vault/aliases b/test/integration/targets/ansible-vault/aliases
index 757c99661d..8278ec8bcc 100644
--- a/test/integration/targets/ansible-vault/aliases
+++ b/test/integration/targets/ansible-vault/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/ansible-vault/single_vault_as_string.yml b/test/integration/targets/ansible-vault/single_vault_as_string.yml
index 1eb17d04c2..ca147b0b3b 100644
--- a/test/integration/targets/ansible-vault/single_vault_as_string.yml
+++ b/test/integration/targets/ansible-vault/single_vault_as_string.yml
@@ -27,7 +27,7 @@
- vaulted_value|forceescape == 'foo bar'
- vaulted_value|first == 'f'
- "'%s'|format(vaulted_value) == 'foo bar'"
- - vaulted_value|indent(indentfirst=True) == ' foo bar'
+ - vaulted_value|indent(first=True) == ' foo bar'
- vaulted_value.split() == ['foo', 'bar']
- vaulted_value|join('-') == 'f-o-o- -b-a-r'
- vaulted_value|last == 'r'
diff --git a/test/integration/targets/ansible/aliases b/test/integration/targets/ansible/aliases
index f71c8117c7..498fedd558 100644
--- a/test/integration/targets/ansible/aliases
+++ b/test/integration/targets/ansible/aliases
@@ -1,2 +1,2 @@
shippable/posix/group4
-skip/aix
+context/controller
diff --git a/test/integration/targets/any_errors_fatal/aliases b/test/integration/targets/any_errors_fatal/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/any_errors_fatal/aliases
+++ b/test/integration/targets/any_errors_fatal/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/args/aliases b/test/integration/targets/args/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/args/aliases
+++ b/test/integration/targets/args/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/argspec/aliases b/test/integration/targets/argspec/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/argspec/aliases
+++ b/test/integration/targets/argspec/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/assert/aliases b/test/integration/targets/assert/aliases
index 757c99661d..101793239a 100644
--- a/test/integration/targets/assert/aliases
+++ b/test/integration/targets/assert/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/async_extra_data/aliases b/test/integration/targets/async_extra_data/aliases
index 70a7b7a9f3..7bd941e69a 100644
--- a/test/integration/targets/async_extra_data/aliases
+++ b/test/integration/targets/async_extra_data/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/target
diff --git a/test/integration/targets/become/aliases b/test/integration/targets/become/aliases
index 3a07aab32d..ad691e7d03 100644
--- a/test/integration/targets/become/aliases
+++ b/test/integration/targets/become/aliases
@@ -1,3 +1,4 @@
destructive
shippable/posix/group1
skip/aix
+context/target
diff --git a/test/integration/targets/become_su/aliases b/test/integration/targets/become_su/aliases
index 3a07aab32d..f3e45b5e28 100644
--- a/test/integration/targets/become_su/aliases
+++ b/test/integration/targets/become_su/aliases
@@ -1,3 +1,3 @@
destructive
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/become_unprivileged/aliases b/test/integration/targets/become_unprivileged/aliases
index c96617f60c..c97d2f98f9 100644
--- a/test/integration/targets/become_unprivileged/aliases
+++ b/test/integration/targets/become_unprivileged/aliases
@@ -1,5 +1,5 @@
destructive
shippable/posix/group1
-skip/aix
needs/ssh
needs/root
+context/controller
diff --git a/test/integration/targets/binary/aliases b/test/integration/targets/binary/aliases
index 765b70da79..6452e6d4c4 100644
--- a/test/integration/targets/binary/aliases
+++ b/test/integration/targets/binary/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/target
diff --git a/test/integration/targets/binary_modules_posix/aliases b/test/integration/targets/binary_modules_posix/aliases
index 2c6e4a07c8..2cfe7ea80f 100644
--- a/test/integration/targets/binary_modules_posix/aliases
+++ b/test/integration/targets/binary_modules_posix/aliases
@@ -1,2 +1,3 @@
shippable/posix/group3
needs/target/binary_modules
+context/target
diff --git a/test/integration/targets/blocks/aliases b/test/integration/targets/blocks/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/blocks/aliases
+++ b/test/integration/targets/blocks/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/builtin_vars_prompt/aliases b/test/integration/targets/builtin_vars_prompt/aliases
index 4317d11262..4b94ea15e4 100644
--- a/test/integration/targets/builtin_vars_prompt/aliases
+++ b/test/integration/targets/builtin_vars_prompt/aliases
@@ -1,3 +1,4 @@
setup/always/setup_passlib
setup/always/setup_pexpect
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/callback_default/aliases b/test/integration/targets/callback_default/aliases
index f8e28c7e46..a6dafcf8cd 100644
--- a/test/integration/targets/callback_default/aliases
+++ b/test/integration/targets/callback_default/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/changed_when/aliases b/test/integration/targets/changed_when/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/changed_when/aliases
+++ b/test/integration/targets/changed_when/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/check_mode/aliases b/test/integration/targets/check_mode/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/check_mode/aliases
+++ b/test/integration/targets/check_mode/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/cli/aliases b/test/integration/targets/cli/aliases
index a8816e110d..c73d425342 100644
--- a/test/integration/targets/cli/aliases
+++ b/test/integration/targets/cli/aliases
@@ -3,3 +3,4 @@ needs/root
needs/ssh
needs/target/setup_pexpect
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/collections/runme.sh b/test/integration/targets/collections/runme.sh
index 50bc339c07..5a5261bb93 100755
--- a/test/integration/targets/collections/runme.sh
+++ b/test/integration/targets/collections/runme.sh
@@ -8,10 +8,6 @@ export ANSIBLE_GATHER_SUBSET=minimal
export ANSIBLE_HOST_PATTERN_MISMATCH=error
unset ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH
-# FUTURE: just use INVENTORY_PATH as-is once ansible-test sets the right dir
-ipath=../../$(basename "${INVENTORY_PATH:-../../inventory}")
-export INVENTORY_PATH="$ipath"
-
# ensure we can call collection module
ansible localhost -m testns.testcoll.testmodule
diff --git a/test/integration/targets/collections_plugin_namespace/aliases b/test/integration/targets/collections_plugin_namespace/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/collections_plugin_namespace/aliases
+++ b/test/integration/targets/collections_plugin_namespace/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/collections_runtime_pythonpath/aliases b/test/integration/targets/collections_runtime_pythonpath/aliases
index 0a772ad706..498fedd558 100644
--- a/test/integration/targets/collections_runtime_pythonpath/aliases
+++ b/test/integration/targets/collections_runtime_pythonpath/aliases
@@ -1,3 +1,2 @@
shippable/posix/group4
-skip/python2.6
-skip/aix
+context/controller
diff --git a/test/integration/targets/collections_runtime_pythonpath/runme.sh b/test/integration/targets/collections_runtime_pythonpath/runme.sh
index 654104a1e2..38c6c64f24 100755
--- a/test/integration/targets/collections_runtime_pythonpath/runme.sh
+++ b/test/integration/targets/collections_runtime_pythonpath/runme.sh
@@ -25,19 +25,19 @@ ansible \
=== Test that the module \
gets picked up if installed \
into site-packages ===
-python -m pip.__main__ install pep517
+python -m pip install pep517
( # Build a binary Python dist (a wheel) using PEP517:
cp -r ansible-collection-python-dist-boo "${OUTPUT_DIR}/"
cd "${OUTPUT_DIR}/ansible-collection-python-dist-boo"
python -m pep517.build --binary --out-dir dist .
)
# Install a pre-built dist with pip:
-python -m pip.__main__ install \
+python -m pip install \
--no-index \
-f "${OUTPUT_DIR}/ansible-collection-python-dist-boo/dist/" \
--only-binary=ansible-collections.python.dist \
ansible-collections.python.dist
-python -m pip.__main__ show ansible-collections.python.dist
+python -m pip show ansible-collections.python.dist
ansible \
-m python.dist.boo \
-a 'name=Frodo' \
diff --git a/test/integration/targets/command_nonexisting/aliases b/test/integration/targets/command_nonexisting/aliases
index e2dcf795c0..90ea9e1281 100644
--- a/test/integration/targets/command_nonexisting/aliases
+++ b/test/integration/targets/command_nonexisting/aliases
@@ -1 +1,2 @@
-shippable/posix/group2 \ No newline at end of file
+shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/command_shell/tasks/main.yml b/test/integration/targets/command_shell/tasks/main.yml
index 653b00594b..aad63c0dbd 100644
--- a/test/integration/targets/command_shell/tasks/main.yml
+++ b/test/integration/targets/command_shell/tasks/main.yml
@@ -504,11 +504,11 @@
when: ansible_facts.python_version is version('3', '>=')
- name: run command with strip
- command: '{{ ansible_playbook_python}} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
+ command: '{{ ansible_python_interpreter }} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
register: command_strip
- name: run command without strip
- command: '{{ ansible_playbook_python}} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
+ command: '{{ ansible_python_interpreter }} -c "import sys; msg=''hello \n \r''; print(msg); {{ print_error_command }}"'
args:
strip_empty_ends: no
register: command_no_strip
diff --git a/test/integration/targets/common_network/aliases b/test/integration/targets/common_network/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/common_network/aliases
+++ b/test/integration/targets/common_network/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/conditionals/aliases b/test/integration/targets/conditionals/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/conditionals/aliases
+++ b/test/integration/targets/conditionals/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/config/aliases b/test/integration/targets/config/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/config/aliases
+++ b/test/integration/targets/config/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/connection_delegation/aliases b/test/integration/targets/connection_delegation/aliases
index 87caabdf08..44e49e4f66 100644
--- a/test/integration/targets/connection_delegation/aliases
+++ b/test/integration/targets/connection_delegation/aliases
@@ -1,4 +1,5 @@
shippable/posix/group1
+context/controller
skip/freebsd # No sshpass
skip/osx # No sshpass
skip/macos # No sshpass
diff --git a/test/integration/targets/connection_paramiko_ssh/aliases b/test/integration/targets/connection_paramiko_ssh/aliases
index ad44392e92..fd5b08a416 100644
--- a/test/integration/targets/connection_paramiko_ssh/aliases
+++ b/test/integration/targets/connection_paramiko_ssh/aliases
@@ -2,4 +2,3 @@ needs/ssh
shippable/posix/group3
needs/target/setup_paramiko
destructive # potentially installs/uninstalls OS packages via setup_paramiko
-skip/aix
diff --git a/test/integration/targets/connection_ssh/aliases b/test/integration/targets/connection_ssh/aliases
index 1d822b4546..50fb8eb888 100644
--- a/test/integration/targets/connection_ssh/aliases
+++ b/test/integration/targets/connection_ssh/aliases
@@ -1,3 +1,2 @@
needs/ssh
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/controller/aliases b/test/integration/targets/controller/aliases
new file mode 100644
index 0000000000..0ac86c9200
--- /dev/null
+++ b/test/integration/targets/controller/aliases
@@ -0,0 +1,2 @@
+context/controller
+shippable/posix/group1
diff --git a/test/integration/targets/controller/tasks/main.yml b/test/integration/targets/controller/tasks/main.yml
new file mode 100644
index 0000000000..354a593e5e
--- /dev/null
+++ b/test/integration/targets/controller/tasks/main.yml
@@ -0,0 +1,9 @@
+- name: Verify testhost is control host
+ stat:
+ path: "{{ output_dir }}"
+- name: Get control host details
+ setup:
+ register: control_host
+- name: Show control host details
+ debug:
+ msg: "{{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}"
diff --git a/test/integration/targets/dataloader/aliases b/test/integration/targets/dataloader/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/dataloader/aliases
+++ b/test/integration/targets/dataloader/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/debug/aliases b/test/integration/targets/debug/aliases
index a6dafcf8cd..97c468e5d5 100644
--- a/test/integration/targets/debug/aliases
+++ b/test/integration/targets/debug/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/delegate_to/aliases b/test/integration/targets/delegate_to/aliases
index b8e973dace..d6bb651c11 100644
--- a/test/integration/targets/delegate_to/aliases
+++ b/test/integration/targets/delegate_to/aliases
@@ -1,4 +1,4 @@
shippable/posix/group3
needs/ssh
needs/root # only on macOS and FreeBSD to configure network interfaces
-skip/aix
+context/controller
diff --git a/test/integration/targets/dict_transformations/aliases b/test/integration/targets/dict_transformations/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/dict_transformations/aliases
+++ b/test/integration/targets/dict_transformations/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/dnf/tasks/dnf.yml b/test/integration/targets/dnf/tasks/dnf.yml
index 9cfc44e394..bf1ea848b8 100644
--- a/test/integration/targets/dnf/tasks/dnf.yml
+++ b/test/integration/targets/dnf/tasks/dnf.yml
@@ -700,7 +700,7 @@
content: |
[main]
exclude=lsof*
- dest: '{{ output_dir }}/test-dnf.conf'
+ dest: '{{ remote_tmp_dir }}/test-dnf.conf'
register: test_dnf_copy
- block:
@@ -728,7 +728,7 @@
always:
- name: remove exclude lsof conf file
file:
- path: '{{ output_dir }}/test-dnf.conf'
+ path: '{{ remote_tmp_dir }}/test-dnf.conf'
state: absent
# end test case where disable_excludes is supported
diff --git a/test/integration/targets/egg-info/aliases b/test/integration/targets/egg-info/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/egg-info/aliases
+++ b/test/integration/targets/egg-info/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/embedded_module/aliases b/test/integration/targets/embedded_module/aliases
index 765b70da79..6452e6d4c4 100644
--- a/test/integration/targets/embedded_module/aliases
+++ b/test/integration/targets/embedded_module/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/target
diff --git a/test/integration/targets/environment/aliases b/test/integration/targets/environment/aliases
index b59832142f..a3ada117fb 100644
--- a/test/integration/targets/environment/aliases
+++ b/test/integration/targets/environment/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/target
diff --git a/test/integration/targets/error_from_connection/aliases b/test/integration/targets/error_from_connection/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/error_from_connection/aliases
+++ b/test/integration/targets/error_from_connection/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/facts_d/aliases b/test/integration/targets/facts_d/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/facts_d/aliases
+++ b/test/integration/targets/facts_d/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/facts_linux_network/aliases b/test/integration/targets/facts_linux_network/aliases
index 21a4e907df..703c532e90 100644
--- a/test/integration/targets/facts_linux_network/aliases
+++ b/test/integration/targets/facts_linux_network/aliases
@@ -3,3 +3,4 @@ shippable/posix/group2
skip/freebsd
skip/osx
skip/macos
+context/controller
diff --git a/test/integration/targets/failed_when/aliases b/test/integration/targets/failed_when/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/failed_when/aliases
+++ b/test/integration/targets/failed_when/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/fetch/aliases b/test/integration/targets/fetch/aliases
index fb5d6faa35..ff56593df1 100644
--- a/test/integration/targets/fetch/aliases
+++ b/test/integration/targets/fetch/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/target/setup_remote_tmp_dir
+needs/ssh
diff --git a/test/integration/targets/fetch/hosts.yml b/test/integration/targets/fetch/hosts.yml
deleted file mode 100644
index 8465ef166c..0000000000
--- a/test/integration/targets/fetch/hosts.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-all:
- hosts:
- testhost:
- ansible_host: localhost
- ansible_connection: ssh
- ansible_python_interpreter: "{{ ansible_playbook_python }}"
- ansible_host_key_checking: no
- ansible_ssh_common_args: -o UserKnownHostsFile={{ output_dir }}/known_hosts -o StrictHostKeyChecking=no
diff --git a/test/integration/targets/fetch/runme.sh b/test/integration/targets/fetch/runme.sh
index 6a9bfa9902..a508a0a672 100755
--- a/test/integration/targets/fetch/runme.sh
+++ b/test/integration/targets/fetch/runme.sh
@@ -3,7 +3,7 @@
set -eux
function cleanup {
- ansible-playbook -i hosts.yml cleanup.yml -e "output_dir=${OUTPUT_DIR}" -b "$@"
+ ansible-playbook -i "${INVENTORY_PATH}" cleanup.yml -e "output_dir=${OUTPUT_DIR}" -b "$@"
unset ANSIBLE_CACHE_PLUGIN
unset ANSIBLE_CACHE_PLUGIN_CONNECTION
}
@@ -28,7 +28,7 @@ ansible-playbook -i ../../inventory injection/avoid_slurp_return.yml -e "output_
export ANSIBLE_CACHE_PLUGIN=jsonfile
export ANSIBLE_CACHE_PLUGIN_CONNECTION="${OUTPUT_DIR}/cache"
# Create a non-root user account and configure SSH acccess for that account
-ansible-playbook -i hosts.yml setup_unreadable_test.yml -e "output_dir=${OUTPUT_DIR}" "$@"
+ansible-playbook -i "${INVENTORY_PATH}" setup_unreadable_test.yml -e "output_dir=${OUTPUT_DIR}" "$@"
# Run the tests as the unprivileged user without become to test the use of the stat module from the fetch module
-ansible-playbook --user fetcher -i hosts.yml test_unreadable_with_stat.yml -e "output_dir=${OUTPUT_DIR}" "$@"
+ansible-playbook -i "${INVENTORY_PATH}" test_unreadable_with_stat.yml -e ansible_user=fetcher -e ansible_become=no -e "output_dir=${OUTPUT_DIR}" "$@"
diff --git a/test/integration/targets/file/tasks/main.yml b/test/integration/targets/file/tasks/main.yml
index 565afa0260..c96beba3bd 100644
--- a/test/integration/targets/file/tasks/main.yml
+++ b/test/integration/targets/file/tasks/main.yml
@@ -91,7 +91,10 @@
- "file2_result.state == 'absent'"
- name: verify we can touch a file
- file: path={{output_dir}}/baz.txt state=touch
+ file:
+ path: "{{output_dir}}/baz.txt"
+ state: touch
+ mode: '0644'
register: file3_result
- name: verify that the file was marked as changed
diff --git a/test/integration/targets/filter_core/aliases b/test/integration/targets/filter_core/aliases
index 1603f4351b..765b70da79 100644
--- a/test/integration/targets/filter_core/aliases
+++ b/test/integration/targets/filter_core/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_encryption/aliases b/test/integration/targets/filter_encryption/aliases
index 70fd8b04d0..765b70da79 100644
--- a/test/integration/targets/filter_encryption/aliases
+++ b/test/integration/targets/filter_encryption/aliases
@@ -1,4 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/python2.7 # filters are controller only, and we no longer support Python 2.7 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_mathstuff/aliases b/test/integration/targets/filter_mathstuff/aliases
index 1603f4351b..765b70da79 100644
--- a/test/integration/targets/filter_mathstuff/aliases
+++ b/test/integration/targets/filter_mathstuff/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_urls/aliases b/test/integration/targets/filter_urls/aliases
index 1603f4351b..765b70da79 100644
--- a/test/integration/targets/filter_urls/aliases
+++ b/test/integration/targets/filter_urls/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/filter_urlsplit/aliases b/test/integration/targets/filter_urlsplit/aliases
index 1603f4351b..765b70da79 100644
--- a/test/integration/targets/filter_urlsplit/aliases
+++ b/test/integration/targets/filter_urlsplit/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/python2.6 # filters are controller only, and we no longer support Python 2.6 on the controller
-skip/aix
diff --git a/test/integration/targets/gathering/aliases b/test/integration/targets/gathering/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/gathering/aliases
+++ b/test/integration/targets/gathering/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/gathering_facts/aliases b/test/integration/targets/gathering_facts/aliases
index 0ee704e116..027aba88a9 100644
--- a/test/integration/targets/gathering_facts/aliases
+++ b/test/integration/targets/gathering_facts/aliases
@@ -1,2 +1,3 @@
shippable/posix/group3
needs/root
+context/controller
diff --git a/test/integration/targets/groupby_filter/aliases b/test/integration/targets/groupby_filter/aliases
index 31094c3170..58201272a0 100644
--- a/test/integration/targets/groupby_filter/aliases
+++ b/test/integration/targets/groupby_filter/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
+context/controller
diff --git a/test/integration/targets/handler_race/aliases b/test/integration/targets/handler_race/aliases
index 68d6d978e3..1d28bdb2aa 100644
--- a/test/integration/targets/handler_race/aliases
+++ b/test/integration/targets/handler_race/aliases
@@ -1,3 +1,2 @@
shippable/posix/group5
-handler_race
-skip/aix
+context/controller
diff --git a/test/integration/targets/handlers/aliases b/test/integration/targets/handlers/aliases
index 30bb677af2..1d28bdb2aa 100644
--- a/test/integration/targets/handlers/aliases
+++ b/test/integration/targets/handlers/aliases
@@ -1,3 +1,2 @@
shippable/posix/group5
-handlers
-skip/aix
+context/controller
diff --git a/test/integration/targets/hardware_facts/aliases b/test/integration/targets/hardware_facts/aliases
index e00c22c3a2..3933d2e5d3 100644
--- a/test/integration/targets/hardware_facts/aliases
+++ b/test/integration/targets/hardware_facts/aliases
@@ -1,3 +1,4 @@
destructive
needs/privileged
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/hash/aliases b/test/integration/targets/hash/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/hash/aliases
+++ b/test/integration/targets/hash/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/hosts_field/aliases b/test/integration/targets/hosts_field/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/hosts_field/aliases
+++ b/test/integration/targets/hosts_field/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/ignore_errors/aliases b/test/integration/targets/ignore_errors/aliases
index 3005e4b26d..498fedd558 100644
--- a/test/integration/targets/ignore_errors/aliases
+++ b/test/integration/targets/ignore_errors/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/ignore_unreachable/aliases b/test/integration/targets/ignore_unreachable/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/ignore_unreachable/aliases
+++ b/test/integration/targets/ignore_unreachable/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/import_tasks/aliases b/test/integration/targets/import_tasks/aliases
index fff62d9f20..a1b27a8355 100644
--- a/test/integration/targets/import_tasks/aliases
+++ b/test/integration/targets/import_tasks/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/incidental_cloud_init_data_facts/aliases b/test/integration/targets/incidental_cloud_init_data_facts/aliases
index 85f7fe0f04..544fcacdf5 100644
--- a/test/integration/targets/incidental_cloud_init_data_facts/aliases
+++ b/test/integration/targets/incidental_cloud_init_data_facts/aliases
@@ -4,3 +4,4 @@ skip/aix
skip/osx
skip/macos
skip/freebsd
+context/target
diff --git a/test/integration/targets/incidental_deploy_helper/aliases b/test/integration/targets/incidental_deploy_helper/aliases
index 31c6a8b454..3b88c806e4 100644
--- a/test/integration/targets/incidental_deploy_helper/aliases
+++ b/test/integration/targets/incidental_deploy_helper/aliases
@@ -1 +1,2 @@
shippable/posix/incidental
+context/target
diff --git a/test/integration/targets/incidental_inventory_aws_ec2/aliases b/test/integration/targets/incidental_inventory_aws_ec2/aliases
index 29f60feb44..41a05d3cf4 100644
--- a/test/integration/targets/incidental_inventory_aws_ec2/aliases
+++ b/test/integration/targets/incidental_inventory_aws_ec2/aliases
@@ -1,2 +1,3 @@
cloud/aws
shippable/aws/incidental
+context/controller
diff --git a/test/integration/targets/incidental_inventory_aws_ec2/runme.sh b/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
index 916f7e8f7a..339be5dd31 100755
--- a/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
+++ b/test/integration/targets/incidental_inventory_aws_ec2/runme.sh
@@ -2,6 +2,10 @@
set -eux
+source virtualenv.sh
+
+python -m pip install boto3 boto
+
# ensure test config is empty
ansible-playbook playbooks/empty_inventory_config.yml "$@"
diff --git a/test/integration/targets/incidental_inventory_docker_swarm/aliases b/test/integration/targets/incidental_inventory_docker_swarm/aliases
index c3a38c06da..74d3befedf 100644
--- a/test/integration/targets/incidental_inventory_docker_swarm/aliases
+++ b/test/integration/targets/incidental_inventory_docker_swarm/aliases
@@ -1,6 +1,5 @@
shippable/posix/incidental
-skip/aix
-skip/power/centos
+context/controller
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/incidental_inventory_foreman/aliases b/test/integration/targets/incidental_inventory_foreman/aliases
index c28a056e81..7eaacbbc31 100644
--- a/test/integration/targets/incidental_inventory_foreman/aliases
+++ b/test/integration/targets/incidental_inventory_foreman/aliases
@@ -1,3 +1,4 @@
shippable/cloud/incidental
cloud/foreman
destructive
+context/controller
diff --git a/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml b/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
index c91f4c3868..b9e32f7d78 100644
--- a/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
+++ b/test/integration/targets/incidental_inventory_foreman/inspect_cache.yml
@@ -6,6 +6,10 @@
foreman_stub_api_path: /api/v2
cached_hosts_key: "http://{{ foreman_stub_host }}:{{ foreman_stub_port }}{{ foreman_stub_api_path }}/hosts"
tasks:
+ - name: make sure jmespath is installed
+ pip:
+ name: jmespath
+
- name: verify a cache file was created
find:
path:
diff --git a/test/integration/targets/incidental_inventory_foreman/runme.sh b/test/integration/targets/incidental_inventory_foreman/runme.sh
index ba94a9360f..d81fa02fc5 100755
--- a/test/integration/targets/incidental_inventory_foreman/runme.sh
+++ b/test/integration/targets/incidental_inventory_foreman/runme.sh
@@ -43,8 +43,8 @@ password: secure
validate_certs: False
FOREMAN_YAML
-ansible-playbook test_foreman_inventory.yml --connection=local "$@"
-ansible-playbook inspect_cache.yml --connection=local "$@"
+ansible-playbook test_foreman_inventory.yml --connection=local -e 'ansible_python_interpreter={{ ansible_playbook_python }}' "$@"
+ansible-playbook inspect_cache.yml --connection=local -e 'ansible_python_interpreter={{ ansible_playbook_python }}' "$@"
# remove inventory cache
rm -r ./foreman_cache
diff --git a/test/integration/targets/incidental_mongodb_parameter/aliases b/test/integration/targets/incidental_mongodb_parameter/aliases
index dc28548349..72ed62eb1d 100644
--- a/test/integration/targets/incidental_mongodb_parameter/aliases
+++ b/test/integration/targets/incidental_mongodb_parameter/aliases
@@ -6,3 +6,4 @@ skip/macos
skip/freebsd
skip/rhel
needs/root
+context/target
diff --git a/test/integration/targets/include_import/aliases b/test/integration/targets/include_import/aliases
index fff62d9f20..1d28bdb2aa 100644
--- a/test/integration/targets/include_import/aliases
+++ b/test/integration/targets/include_import/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller
diff --git a/test/integration/targets/include_vars-ad-hoc/aliases b/test/integration/targets/include_vars-ad-hoc/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/include_vars-ad-hoc/aliases
+++ b/test/integration/targets/include_vars-ad-hoc/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/include_when_parent_is_dynamic/aliases b/test/integration/targets/include_when_parent_is_dynamic/aliases
index 41c99f5192..8278ec8bcc 100644
--- a/test/integration/targets/include_when_parent_is_dynamic/aliases
+++ b/test/integration/targets/include_when_parent_is_dynamic/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/python2.6 # include is controller only, and we no longer support Python 2.6 on the controller
+context/controller
diff --git a/test/integration/targets/include_when_parent_is_static/aliases b/test/integration/targets/include_when_parent_is_static/aliases
index 41c99f5192..8278ec8bcc 100644
--- a/test/integration/targets/include_when_parent_is_static/aliases
+++ b/test/integration/targets/include_when_parent_is_static/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/python2.6 # include is controller only, and we no longer support Python 2.6 on the controller
+context/controller
diff --git a/test/integration/targets/includes/aliases b/test/integration/targets/includes/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/includes/aliases
+++ b/test/integration/targets/includes/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/includes_race/aliases b/test/integration/targets/includes_race/aliases
index fff62d9f20..1d28bdb2aa 100644
--- a/test/integration/targets/includes_race/aliases
+++ b/test/integration/targets/includes_race/aliases
@@ -1,2 +1,2 @@
shippable/posix/group5
-skip/aix
+context/controller
diff --git a/test/integration/targets/infra/aliases b/test/integration/targets/infra/aliases
index 887d7029da..711032383e 100644
--- a/test/integration/targets/infra/aliases
+++ b/test/integration/targets/infra/aliases
@@ -1,3 +1,4 @@
shippable/posix/group3
needs/file/hacking/test-module.py
needs/file/lib/ansible/modules/ping.py
+context/controller
diff --git a/test/integration/targets/interpreter_discovery_python/aliases b/test/integration/targets/interpreter_discovery_python/aliases
index 740ed1a57f..0dfc90e737 100644
--- a/test/integration/targets/interpreter_discovery_python/aliases
+++ b/test/integration/targets/interpreter_discovery_python/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
non_local # workaround to allow override of ansible_python_interpreter; disables coverage on this integration target
+context/target
diff --git a/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases b/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
index dc9ac4682b..b4026b5f19 100644
--- a/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
+++ b/test/integration/targets/interpreter_discovery_python_delegate_facts/aliases
@@ -1,2 +1,3 @@
shippable/posix/group1
non_local # this test requires interpreter discovery, which means code coverage must be disabled
+context/controller
diff --git a/test/integration/targets/inventory/aliases b/test/integration/targets/inventory/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/inventory/aliases
+++ b/test/integration/targets/inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/inventory_cache/aliases b/test/integration/targets/inventory_cache/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/inventory_cache/aliases
+++ b/test/integration/targets/inventory_cache/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/inventory_yaml/aliases b/test/integration/targets/inventory_yaml/aliases
index f8e28c7e46..a6dafcf8cd 100644
--- a/test/integration/targets/inventory_yaml/aliases
+++ b/test/integration/targets/inventory_yaml/aliases
@@ -1,2 +1 @@
shippable/posix/group1
-skip/aix
diff --git a/test/integration/targets/jinja2_native_types/aliases b/test/integration/targets/jinja2_native_types/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/jinja2_native_types/aliases
+++ b/test/integration/targets/jinja2_native_types/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/jinja_plugins/aliases b/test/integration/targets/jinja_plugins/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/jinja_plugins/aliases
+++ b/test/integration/targets/jinja_plugins/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/json_cleanup/aliases b/test/integration/targets/json_cleanup/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/json_cleanup/aliases
+++ b/test/integration/targets/json_cleanup/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/limit_inventory/aliases b/test/integration/targets/limit_inventory/aliases
index 3005e4b26d..498fedd558 100644
--- a/test/integration/targets/limit_inventory/aliases
+++ b/test/integration/targets/limit_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/lookup_config/aliases b/test/integration/targets/lookup_config/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_config/aliases
+++ b/test/integration/targets/lookup_config/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_csvfile/aliases b/test/integration/targets/lookup_csvfile/aliases
index 45489be80c..765b70da79 100644
--- a/test/integration/targets/lookup_csvfile/aliases
+++ b/test/integration/targets/lookup_csvfile/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_dict/aliases b/test/integration/targets/lookup_dict/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_dict/aliases
+++ b/test/integration/targets/lookup_dict/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_env/aliases b/test/integration/targets/lookup_env/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_env/aliases
+++ b/test/integration/targets/lookup_env/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_file/aliases b/test/integration/targets/lookup_file/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_file/aliases
+++ b/test/integration/targets/lookup_file/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_first_found/aliases b/test/integration/targets/lookup_first_found/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_first_found/aliases
+++ b/test/integration/targets/lookup_first_found/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_indexed_items/aliases b/test/integration/targets/lookup_indexed_items/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_indexed_items/aliases
+++ b/test/integration/targets/lookup_indexed_items/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_ini/aliases b/test/integration/targets/lookup_ini/aliases
index f9f29ef320..b59832142f 100644
--- a/test/integration/targets/lookup_ini/aliases
+++ b/test/integration/targets/lookup_ini/aliases
@@ -1,2 +1 @@
shippable/posix/group3
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_inventory_hostnames/aliases b/test/integration/targets/lookup_inventory_hostnames/aliases
index 45489be80c..765b70da79 100644
--- a/test/integration/targets/lookup_inventory_hostnames/aliases
+++ b/test/integration/targets/lookup_inventory_hostnames/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_items/aliases b/test/integration/targets/lookup_items/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_items/aliases
+++ b/test/integration/targets/lookup_items/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_lines/aliases b/test/integration/targets/lookup_lines/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_lines/aliases
+++ b/test/integration/targets/lookup_lines/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_list/aliases b/test/integration/targets/lookup_list/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_list/aliases
+++ b/test/integration/targets/lookup_list/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_nested/aliases b/test/integration/targets/lookup_nested/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_nested/aliases
+++ b/test/integration/targets/lookup_nested/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_password/aliases b/test/integration/targets/lookup_password/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_password/aliases
+++ b/test/integration/targets/lookup_password/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_pipe/aliases b/test/integration/targets/lookup_pipe/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_pipe/aliases
+++ b/test/integration/targets/lookup_pipe/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_random_choice/aliases b/test/integration/targets/lookup_random_choice/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_random_choice/aliases
+++ b/test/integration/targets/lookup_random_choice/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_sequence/aliases b/test/integration/targets/lookup_sequence/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_sequence/aliases
+++ b/test/integration/targets/lookup_sequence/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_subelements/aliases b/test/integration/targets/lookup_subelements/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_subelements/aliases
+++ b/test/integration/targets/lookup_subelements/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_template/aliases b/test/integration/targets/lookup_template/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_template/aliases
+++ b/test/integration/targets/lookup_template/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_together/aliases b/test/integration/targets/lookup_together/aliases
index bc987654d9..765b70da79 100644
--- a/test/integration/targets/lookup_together/aliases
+++ b/test/integration/targets/lookup_together/aliases
@@ -1,3 +1 @@
shippable/posix/group2
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_unvault/aliases b/test/integration/targets/lookup_unvault/aliases
index 4a2ce27cbd..6bd893d49f 100644
--- a/test/integration/targets/lookup_unvault/aliases
+++ b/test/integration/targets/lookup_unvault/aliases
@@ -1,3 +1,2 @@
shippable/posix/group2
needs/root
-skip/aix
diff --git a/test/integration/targets/lookup_url/aliases b/test/integration/targets/lookup_url/aliases
index 28990148bb..90ef161f59 100644
--- a/test/integration/targets/lookup_url/aliases
+++ b/test/integration/targets/lookup_url/aliases
@@ -1,5 +1,3 @@
destructive
shippable/posix/group1
needs/httptester
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_varnames/aliases b/test/integration/targets/lookup_varnames/aliases
index 45489be80c..765b70da79 100644
--- a/test/integration/targets/lookup_varnames/aliases
+++ b/test/integration/targets/lookup_varnames/aliases
@@ -1,2 +1 @@
shippable/posix/group2
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/lookup_vars/aliases b/test/integration/targets/lookup_vars/aliases
index 07b8702010..a6dafcf8cd 100644
--- a/test/integration/targets/lookup_vars/aliases
+++ b/test/integration/targets/lookup_vars/aliases
@@ -1,3 +1 @@
shippable/posix/group1
-skip/aix
-skip/python2.6 # lookups are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/loop_control/aliases b/test/integration/targets/loop_control/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/loop_control/aliases
+++ b/test/integration/targets/loop_control/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/loops/aliases b/test/integration/targets/loops/aliases
index ed821c2754..90ea9e1281 100644
--- a/test/integration/targets/loops/aliases
+++ b/test/integration/targets/loops/aliases
@@ -1,2 +1,2 @@
shippable/posix/group2
-skip/aix
+context/controller
diff --git a/test/integration/targets/meta_tasks/aliases b/test/integration/targets/meta_tasks/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/meta_tasks/aliases
+++ b/test/integration/targets/meta_tasks/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/missing_required_lib/aliases b/test/integration/targets/missing_required_lib/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/missing_required_lib/aliases
+++ b/test/integration/targets/missing_required_lib/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/module_defaults/aliases b/test/integration/targets/module_defaults/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/module_defaults/aliases
+++ b/test/integration/targets/module_defaults/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/module_no_log/aliases b/test/integration/targets/module_no_log/aliases
index cbbb880435..2e2633090b 100644
--- a/test/integration/targets/module_no_log/aliases
+++ b/test/integration/targets/module_no_log/aliases
@@ -1,5 +1,5 @@
shippable/posix/group1
-skip/aix # not configured to log user.info to /var/log/syslog
+context/controller
skip/freebsd # not configured to log user.info to /var/log/syslog
skip/osx # not configured to log user.info to /var/log/syslog
skip/macos # not configured to log user.info to /var/log/syslog
diff --git a/test/integration/targets/module_precedence/aliases b/test/integration/targets/module_precedence/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/module_precedence/aliases
+++ b/test/integration/targets/module_precedence/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/module_tracebacks/aliases b/test/integration/targets/module_tracebacks/aliases
index 804f046048..757f4fb831 100644
--- a/test/integration/targets/module_tracebacks/aliases
+++ b/test/integration/targets/module_tracebacks/aliases
@@ -1,3 +1,3 @@
shippable/posix/group4
needs/ssh
-skip/aix
+context/controller
diff --git a/test/integration/targets/module_utils/aliases b/test/integration/targets/module_utils/aliases
index 2f5770ffaa..769d265d3a 100644
--- a/test/integration/targets/module_utils/aliases
+++ b/test/integration/targets/module_utils/aliases
@@ -1,3 +1,4 @@
shippable/posix/group3
needs/root
needs/target/setup_nobody
+context/target
diff --git a/test/integration/targets/module_utils/module_utils_test_no_log.yml b/test/integration/targets/module_utils/module_utils_test_no_log.yml
index bad2efd495..2fa3e101ad 100644
--- a/test/integration/targets/module_utils/module_utils_test_no_log.yml
+++ b/test/integration/targets/module_utils/module_utils_test_no_log.yml
@@ -7,3 +7,6 @@
explicit_pass: abc
suboption:
explicit_sub_pass: def
+ environment:
+ SECRET_ENV: ghi
+ SECRET_SUB_ENV: jkl
diff --git a/test/integration/targets/module_utils/module_utils_vvvvv.yml b/test/integration/targets/module_utils/module_utils_vvvvv.yml
index 6a9f92013c..fc2b0c1c90 100644
--- a/test/integration/targets/module_utils/module_utils_vvvvv.yml
+++ b/test/integration/targets/module_utils/module_utils_vvvvv.yml
@@ -7,11 +7,10 @@
# Invocation usually is output with 3vs or more, our callback plugin displays it anyway
- name: Check no_log invocation results
command: ansible-playbook -i {{ inventory_file }} module_utils_test_no_log.yml
+ delegate_to: localhost
environment:
ANSIBLE_CALLBACK_PLUGINS: callback
ANSIBLE_STDOUT_CALLBACK: pure_json
- SECRET_ENV: ghi
- SECRET_SUB_ENV: jkl
register: no_log_invocation
- set_fact:
diff --git a/test/integration/targets/module_utils_distro/aliases b/test/integration/targets/module_utils_distro/aliases
index 0b4d548e4d..8278ec8bcc 100644
--- a/test/integration/targets/module_utils_distro/aliases
+++ b/test/integration/targets/module_utils_distro/aliases
@@ -1 +1,2 @@
-shippable/posix/group3 \ No newline at end of file
+shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/module_utils_facts.system.selinux/aliases b/test/integration/targets/module_utils_facts.system.selinux/aliases
index aab3ff52eb..ee281d2748 100644
--- a/test/integration/targets/module_utils_facts.system.selinux/aliases
+++ b/test/integration/targets/module_utils_facts.system.selinux/aliases
@@ -1,5 +1,4 @@
shippable/posix/group1
-skip/aix
skip/osx
skip/macos
skip/freebsd
diff --git a/test/integration/targets/no_log/aliases b/test/integration/targets/no_log/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/no_log/aliases
+++ b/test/integration/targets/no_log/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/noexec/aliases b/test/integration/targets/noexec/aliases
index 66a77c7b29..edabc85ab9 100644
--- a/test/integration/targets/noexec/aliases
+++ b/test/integration/targets/noexec/aliases
@@ -1,3 +1,4 @@
shippable/posix/group2
+context/controller
skip/docker
skip/macos
diff --git a/test/integration/targets/old_style_cache_plugins/aliases b/test/integration/targets/old_style_cache_plugins/aliases
index 05f65b7188..13906d9e11 100644
--- a/test/integration/targets/old_style_cache_plugins/aliases
+++ b/test/integration/targets/old_style_cache_plugins/aliases
@@ -1,4 +1,5 @@
shippable/posix/group3
+context/controller
skip/osx
skip/macos
disabled
diff --git a/test/integration/targets/old_style_modules_posix/aliases b/test/integration/targets/old_style_modules_posix/aliases
index b59832142f..a3ada117fb 100644
--- a/test/integration/targets/old_style_modules_posix/aliases
+++ b/test/integration/targets/old_style_modules_posix/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/target
diff --git a/test/integration/targets/omit/aliases b/test/integration/targets/omit/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/omit/aliases
+++ b/test/integration/targets/omit/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/order/aliases b/test/integration/targets/order/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/order/aliases
+++ b/test/integration/targets/order/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/parsing/aliases b/test/integration/targets/parsing/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/parsing/aliases
+++ b/test/integration/targets/parsing/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/path_lookups/aliases b/test/integration/targets/path_lookups/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/path_lookups/aliases
+++ b/test/integration/targets/path_lookups/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/path_with_comma_in_inventory/aliases b/test/integration/targets/path_with_comma_in_inventory/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/path_with_comma_in_inventory/aliases
+++ b/test/integration/targets/path_with_comma_in_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/pause/aliases b/test/integration/targets/pause/aliases
index 810f1ab6ed..b07d71c7ea 100644
--- a/test/integration/targets/pause/aliases
+++ b/test/integration/targets/pause/aliases
@@ -1,3 +1,3 @@
needs/target/setup_pexpect
shippable/posix/group1
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/pkg_resources/aliases b/test/integration/targets/pkg_resources/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/pkg_resources/aliases
+++ b/test/integration/targets/pkg_resources/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/play_iterator/aliases b/test/integration/targets/play_iterator/aliases
index 3005e4b26d..498fedd558 100644
--- a/test/integration/targets/play_iterator/aliases
+++ b/test/integration/targets/play_iterator/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/playbook/aliases b/test/integration/targets/playbook/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/playbook/aliases
+++ b/test/integration/targets/playbook/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/plugin_config_for_inventory/aliases b/test/integration/targets/plugin_config_for_inventory/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/plugin_config_for_inventory/aliases
+++ b/test/integration/targets/plugin_config_for_inventory/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/plugin_filtering/aliases b/test/integration/targets/plugin_filtering/aliases
index 3005e4b26d..498fedd558 100644
--- a/test/integration/targets/plugin_filtering/aliases
+++ b/test/integration/targets/plugin_filtering/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/plugin_loader/aliases b/test/integration/targets/plugin_loader/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/plugin_loader/aliases
+++ b/test/integration/targets/plugin_loader/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/plugin_namespace/aliases b/test/integration/targets/plugin_namespace/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/plugin_namespace/aliases
+++ b/test/integration/targets/plugin_namespace/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/rel_plugin_loading/aliases b/test/integration/targets/rel_plugin_loading/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/rel_plugin_loading/aliases
+++ b/test/integration/targets/rel_plugin_loading/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/remote_tmp/aliases b/test/integration/targets/remote_tmp/aliases
index 757c99661d..4b8559d973 100644
--- a/test/integration/targets/remote_tmp/aliases
+++ b/test/integration/targets/remote_tmp/aliases
@@ -1,2 +1,4 @@
shippable/posix/group3
skip/aix
+context/target
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/remote_tmp/playbook.yml b/test/integration/targets/remote_tmp/playbook.yml
index 43f99ca5f2..5adef62690 100644
--- a/test/integration/targets/remote_tmp/playbook.yml
+++ b/test/integration/targets/remote_tmp/playbook.yml
@@ -31,13 +31,16 @@
hosts: testhost
gather_facts: false
tasks:
+ - import_role:
+ name: ../setup_remote_tmp_dir
+
- file:
state: touch
- path: "{{ output_dir }}/65393"
+ path: "{{ remote_tmp_dir }}/65393"
- copy:
- src: "{{ output_dir }}/65393"
- dest: "{{ output_dir }}/65393.2"
+ src: "{{ remote_tmp_dir }}/65393"
+ dest: "{{ remote_tmp_dir }}/65393.2"
remote_src: true
- find:
@@ -52,6 +55,5 @@
- assert:
that:
- # Should only be AnsiballZ_find.py because find is actively running
- - result.files|length == 1
- - result.files[0].path.endswith('/AnsiballZ_find.py')
+ # Should find nothing since pipelining is used
+ - result.files|length == 0
diff --git a/test/integration/targets/remote_tmp/runme.sh b/test/integration/targets/remote_tmp/runme.sh
index 8d1eebd6b1..69efd6e016 100755
--- a/test/integration/targets/remote_tmp/runme.sh
+++ b/test/integration/targets/remote_tmp/runme.sh
@@ -2,4 +2,4 @@
set -ux
-ansible-playbook -i ../../inventory playbook.yml -e "output_dir=${OUTPUT_DIR}" -v "$@"
+ansible-playbook -i ../../inventory playbook.yml -v "$@"
diff --git a/test/integration/targets/retry_task_name_in_callback/aliases b/test/integration/targets/retry_task_name_in_callback/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/retry_task_name_in_callback/aliases
+++ b/test/integration/targets/retry_task_name_in_callback/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/roles/aliases b/test/integration/targets/roles/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/roles/aliases
+++ b/test/integration/targets/roles/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/roles_arg_spec/aliases b/test/integration/targets/roles_arg_spec/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/roles_arg_spec/aliases
+++ b/test/integration/targets/roles_arg_spec/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/roles_var_inheritance/aliases b/test/integration/targets/roles_var_inheritance/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/roles_var_inheritance/aliases
+++ b/test/integration/targets/roles_var_inheritance/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/run_modules/aliases b/test/integration/targets/run_modules/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/run_modules/aliases
+++ b/test/integration/targets/run_modules/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/set_fact/aliases b/test/integration/targets/set_fact/aliases
index 757c99661d..101793239a 100644
--- a/test/integration/targets/set_fact/aliases
+++ b/test/integration/targets/set_fact/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/set_stats/aliases b/test/integration/targets/set_stats/aliases
index 70a7b7a9f3..a1b27a8355 100644
--- a/test/integration/targets/set_stats/aliases
+++ b/test/integration/targets/set_stats/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller # this is a controller-only action, the module is just for documentation
diff --git a/test/integration/targets/set_stats/runme.sh b/test/integration/targets/set_stats/runme.sh
index fa11797269..27193dc8e0 100755
--- a/test/integration/targets/set_stats/runme.sh
+++ b/test/integration/targets/set_stats/runme.sh
@@ -5,9 +5,9 @@ set -eux
export ANSIBLE_SHOW_CUSTOM_STATS=yes
# Simple tests
-ansible-playbook test_simple.yml
+ansible-playbook test_simple.yml -i "${INVENTORY_PATH}"
# This playbook does two set_stats calls setting my_int to 10 and 15.
# The aggregated output should add to 25.
-output=$(ansible-playbook test_aggregate.yml | grep -c '"my_int": 25')
+output=$(ansible-playbook test_aggregate.yml -i "${INVENTORY_PATH}" | grep -c '"my_int": 25')
test "$output" -eq 1
diff --git a/test/integration/targets/set_stats/test_aggregate.yml b/test/integration/targets/set_stats/test_aggregate.yml
index db48875e54..7f12895d1a 100644
--- a/test/integration/targets/set_stats/test_aggregate.yml
+++ b/test/integration/targets/set_stats/test_aggregate.yml
@@ -1,5 +1,5 @@
---
-- hosts: localhost
+- hosts: testhost
gather_facts: false
tasks:
- name: First set_stats
diff --git a/test/integration/targets/set_stats/test_simple.yml b/test/integration/targets/set_stats/test_simple.yml
index d734564a22..0f62120d3a 100644
--- a/test/integration/targets/set_stats/test_simple.yml
+++ b/test/integration/targets/set_stats/test_simple.yml
@@ -1,5 +1,5 @@
---
-- hosts: localhost
+- hosts: testhost
gather_facts: false
tasks:
- name: test simple data with defaults
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml
deleted file mode 100644
index dec5b5481a..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-2.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 2 on FreeBSD 11
- pkgng:
- name: py27-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml
deleted file mode 100644
index eb01d00f1e..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11-python-3.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-- name: Downgrade to pip version 18.1 to work around a PEP 517 virtualenv bug
- # pip 19.0.0 added support for PEP 517
- # versions as recent as 19.0.3 fail to install paramiko in a virtualenv due to a BackendUnavailable exception
- # installation without a virtualenv succeeds
- pip:
- name: pip==18.1
-- name: Setup remote constraints
- include_tasks: setup-remote-constraints.yml
-- name: Install Paramiko for Python 3 on FreeBSD 11
- pip: # no py36-paramiko package exists for FreeBSD 11
- name: paramiko
- extra_args: "-c {{ remote_constraints }}"
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml
deleted file mode 100644
index 9a7bfb6702..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-11.4-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 11.4
- pkgng:
- name: py37-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml
deleted file mode 100644
index 29e7896917..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-2.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 2 on FreeBSD 12
- pkgng:
- name: py27-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml
deleted file mode 100644
index 2aa7b500c7..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 12
- pkgng:
- name: py36-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml
deleted file mode 100644
index 4fe6011bb1..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-12.2-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 12.2
- pkgng:
- name: py37-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-13-python-3.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-13-python-3.yml
deleted file mode 100644
index 68dd3a9b95..0000000000
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-13-python-3.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-- name: Install Paramiko for Python 3 on FreeBSD 13.0
- pkgng:
- name: py37-paramiko
diff --git a/test/integration/targets/setup_paramiko/install-FreeBSD-13-python-2.yml b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml
index 95697465b8..27daf3cfe9 100644
--- a/test/integration/targets/setup_paramiko/install-FreeBSD-13-python-2.yml
+++ b/test/integration/targets/setup_paramiko/install-FreeBSD-python-3.yml
@@ -1,6 +1,6 @@
- name: Setup remote constraints
include_tasks: setup-remote-constraints.yml
-- name: Install Paramiko for Python 2 on FreeBSD 13
+- name: Install Paramiko for Python 3 on FreeBSD
pip: # no package in pkg, just use pip
name: paramiko
extra_args: "-c {{ remote_constraints }}"
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml
deleted file mode 100644
index d27f831c2e..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-2.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 2 on FreeBSD 11
- pkgng:
- name: py27-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml
deleted file mode 100644
index 33f292e8da..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 11
- pip: # no py36-paramiko package exists for FreeBSD 11
- name: paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml
deleted file mode 100644
index 86956fd9fd..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-11.4-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 11.4
- pkgng:
- name: py37-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml
deleted file mode 100644
index 7935248774..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-2.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 2 on FreeBSD 12
- pkgng:
- name: py27-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml
deleted file mode 100644
index 46d26ca344..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 12
- pkgng:
- name: py36-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml
deleted file mode 100644
index 0359bf4cf7..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-12.2-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 12.2
- pkgng:
- name: py37-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-2.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-2.yml
deleted file mode 100644
index 200569695f..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-2.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 2 on FreeBSD 13
- pip:
- name: paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-3.yml
deleted file mode 100644
index 738e2c0b2c..0000000000
--- a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-13-python-3.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-- name: Uninstall Paramiko for Python 3 on FreeBSD 13.0
- pkgng:
- name: py37-paramiko
- state: absent
diff --git a/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml
new file mode 100644
index 0000000000..d3d3739b01
--- /dev/null
+++ b/test/integration/targets/setup_paramiko/uninstall-FreeBSD-python-3.yml
@@ -0,0 +1,4 @@
+- name: Uninstall Paramiko for Python 3 on FreeBSD
+ pip:
+ name: paramiko
+ state: absent
diff --git a/test/integration/targets/special_vars/aliases b/test/integration/targets/special_vars/aliases
index 2d9e6788ad..55b8ec066a 100644
--- a/test/integration/targets/special_vars/aliases
+++ b/test/integration/targets/special_vars/aliases
@@ -1,2 +1,3 @@
shippable/posix/group2
needs/target/include_parent_role_vars
+context/controller
diff --git a/test/integration/targets/special_vars_hosts/aliases b/test/integration/targets/special_vars_hosts/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/special_vars_hosts/aliases
+++ b/test/integration/targets/special_vars_hosts/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/split/aliases b/test/integration/targets/split/aliases
new file mode 100644
index 0000000000..87958830d5
--- /dev/null
+++ b/test/integration/targets/split/aliases
@@ -0,0 +1,2 @@
+context/target
+shippable/posix/group1
diff --git a/test/integration/targets/split/tasks/main.yml b/test/integration/targets/split/tasks/main.yml
new file mode 100644
index 0000000000..ead1c536af
--- /dev/null
+++ b/test/integration/targets/split/tasks/main.yml
@@ -0,0 +1,30 @@
+- name: Get control host details
+ setup:
+ delegate_to: localhost
+ register: control_host
+- name: Get managed host details
+ setup:
+ register: managed_host
+- name: Check split state
+ stat:
+ path: "{{ output_dir }}"
+ register: split
+ ignore_errors: yes
+- name: Build non-split status message
+ set_fact:
+ message: "
+ {{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}
+ {{ control_host.ansible_facts.ansible_python.executable }} ({{ control_host.ansible_facts.ansible_python_version }}) ->
+ {{ managed_host.ansible_facts.ansible_python.executable }} ({{ managed_host.ansible_facts.ansible_python_version }})"
+ when: split is success and split.stat.exists
+- name: Build split status message
+ set_fact:
+ message: "
+ {{ control_host.ansible_facts.ansible_distribution }} {{ control_host.ansible_facts.ansible_distribution_version }}
+ {{ control_host.ansible_facts.ansible_python.executable }} ({{ control_host.ansible_facts.ansible_python_version }}) ->
+ {{ managed_host.ansible_facts.ansible_distribution }} {{ managed_host.ansible_facts.ansible_distribution_version }}
+ {{ managed_host.ansible_facts.ansible_python.executable }} ({{ managed_host.ansible_facts.ansible_python_version }})"
+ when: split is not success or not split.stat.exists
+- name: Show host details
+ debug:
+ msg: "{{ message }}"
diff --git a/test/integration/targets/subversion/roles/subversion/defaults/main.yml b/test/integration/targets/subversion/roles/subversion/defaults/main.yml
index f989345a66..249578d2f2 100644
--- a/test/integration/targets/subversion/roles/subversion/defaults/main.yml
+++ b/test/integration/targets/subversion/roles/subversion/defaults/main.yml
@@ -1,11 +1,10 @@
---
apache_port: 11386 # cannot use 80 as httptester overrides this
-output_dir: "{{ lookup('env', 'OUTPUT_DIR') }}"
-subversion_test_dir: '{{ output_dir }}/svn-test'
+subversion_test_dir: /tmp/ansible-svn-test-dir
subversion_server_dir: /tmp/ansible-svn # cannot use a path in the home dir without userdir or granting exec permission to the apache user
subversion_repo_name: ansible-test-repo
subversion_repo_url: http://127.0.0.1:{{ apache_port }}/svn/{{ subversion_repo_name }}
subversion_repo_auth_url: http://127.0.0.1:{{ apache_port }}/svnauth/{{ subversion_repo_name }}
subversion_username: subsvn_user'''
subversion_password: Password123!
-subversion_external_repo_url: https://github.com/ansible/ansible-base-test-container # GitHub serves SVN
+subversion_external_repo_url: https://github.com/ansible/ansible-core-test-container # GitHub serves SVN
diff --git a/test/integration/targets/subversion/runme.sh b/test/integration/targets/subversion/runme.sh
index 99d56aa79b..c39bdc0090 100755
--- a/test/integration/targets/subversion/runme.sh
+++ b/test/integration/targets/subversion/runme.sh
@@ -4,7 +4,7 @@ set -eu
cleanup() {
echo "Cleanup"
- ansible-playbook runme.yml -e "output_dir=${OUTPUT_DIR}" "$@" --tags cleanup
+ ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" --tags cleanup
echo "Done"
}
@@ -13,15 +13,18 @@ trap cleanup INT TERM EXIT
export ANSIBLE_ROLES_PATH=roles/
# Ensure subversion is set up
-ansible-playbook runme.yml "$@" -v --tags setup
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" -v --tags setup
# Test functionality
-ansible-playbook runme.yml "$@" -v --tags tests
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" -v --tags tests
# Test a warning is displayed for versions < 1.10.0 when a password is provided
-ansible-playbook runme.yml "$@" --tags warnings 2>&1 | tee out.txt
+ansible-playbook runme.yml -i "${INVENTORY_PATH}" "$@" --tags warnings 2>&1 | tee out.txt
+
+version=$(ANSIBLE_FORCE_COLOR=0 ansible -i "${INVENTORY_PATH}" -m shell -a 'svn --version -q' testhost 2>/dev/null | tail -n 1)
+
+echo "svn --version is '${version}'"
-version="$(svn --version -q)"
secure=$(python -c "from ansible.module_utils.compat.version import LooseVersion; print(LooseVersion('$version') >= LooseVersion('1.10.0'))")
if [[ "${secure}" = "False" ]] && [[ "$(grep -c 'To securely pass credentials, upgrade svn to version 1.10.0' out.txt)" -eq 1 ]]; then
diff --git a/test/integration/targets/subversion/runme.yml b/test/integration/targets/subversion/runme.yml
index c67d7b89b1..71c5e4b812 100644
--- a/test/integration/targets/subversion/runme.yml
+++ b/test/integration/targets/subversion/runme.yml
@@ -1,5 +1,5 @@
---
-- hosts: localhost
+- hosts: testhost
tasks:
- name: load OS specific vars
include_vars: '{{ item }}'
diff --git a/test/integration/targets/tags/aliases b/test/integration/targets/tags/aliases
index 757c99661d..8278ec8bcc 100644
--- a/test/integration/targets/tags/aliases
+++ b/test/integration/targets/tags/aliases
@@ -1,2 +1,2 @@
shippable/posix/group3
-skip/aix
+context/controller
diff --git a/test/integration/targets/task_ordering/aliases b/test/integration/targets/task_ordering/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/task_ordering/aliases
+++ b/test/integration/targets/task_ordering/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/tasks/aliases b/test/integration/targets/tasks/aliases
index a6dafcf8cd..13e01f0c94 100644
--- a/test/integration/targets/tasks/aliases
+++ b/test/integration/targets/tasks/aliases
@@ -1 +1,2 @@
shippable/posix/group1
+context/controller
diff --git a/test/integration/targets/template/aliases b/test/integration/targets/template/aliases
index f0c24d2091..327f088b2d 100644
--- a/test/integration/targets/template/aliases
+++ b/test/integration/targets/template/aliases
@@ -1,3 +1,3 @@
needs/root
shippable/posix/group5
-skip/aix
+context/controller # this "module" is actually an action that runs on the controller
diff --git a/test/integration/targets/template_jinja2_latest/aliases b/test/integration/targets/template_jinja2_latest/aliases
index 2a89ae7eb6..b9c19e3d84 100644
--- a/test/integration/targets/template_jinja2_latest/aliases
+++ b/test/integration/targets/template_jinja2_latest/aliases
@@ -1,5 +1,5 @@
needs/root
shippable/posix/group2
needs/target/template
-skip/aix
+context/controller
needs/file/test/lib/ansible_test/_data/requirements/constraints.txt
diff --git a/test/integration/targets/template_jinja2_non_native/aliases b/test/integration/targets/template_jinja2_non_native/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/template_jinja2_non_native/aliases
+++ b/test/integration/targets/template_jinja2_non_native/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/templating_lookups/aliases b/test/integration/targets/templating_lookups/aliases
index f8e28c7e46..13e01f0c94 100644
--- a/test/integration/targets/templating_lookups/aliases
+++ b/test/integration/targets/templating_lookups/aliases
@@ -1,2 +1,2 @@
shippable/posix/group1
-skip/aix
+context/controller
diff --git a/test/integration/targets/templating_settings/aliases b/test/integration/targets/templating_settings/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/templating_settings/aliases
+++ b/test/integration/targets/templating_settings/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/test_core/aliases b/test/integration/targets/test_core/aliases
index 041b0cc7bc..70a7b7a9f3 100644
--- a/test/integration/targets/test_core/aliases
+++ b/test/integration/targets/test_core/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/test_files/aliases b/test/integration/targets/test_files/aliases
index 041b0cc7bc..70a7b7a9f3 100644
--- a/test/integration/targets/test_files/aliases
+++ b/test/integration/targets/test_files/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/test_mathstuff/aliases b/test/integration/targets/test_mathstuff/aliases
index 041b0cc7bc..70a7b7a9f3 100644
--- a/test/integration/targets/test_mathstuff/aliases
+++ b/test/integration/targets/test_mathstuff/aliases
@@ -1,2 +1 @@
shippable/posix/group5
-skip/python2.6 # tests are controller only, and we no longer support Python 2.6 on the controller
diff --git a/test/integration/targets/throttle/aliases b/test/integration/targets/throttle/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/throttle/aliases
+++ b/test/integration/targets/throttle/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml b/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
index d35f88dc7c..7022bba1e0 100644
--- a/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
+++ b/test/integration/targets/unarchive/tasks/test_unprivileged_user.yml
@@ -75,6 +75,7 @@
name: unarchivetest1
state: absent
remove: yes
+ force: yes
become: yes
become_user: root
diff --git a/test/integration/targets/undefined/aliases b/test/integration/targets/undefined/aliases
index 70a7b7a9f3..1d28bdb2aa 100644
--- a/test/integration/targets/undefined/aliases
+++ b/test/integration/targets/undefined/aliases
@@ -1 +1,2 @@
shippable/posix/group5
+context/controller
diff --git a/test/integration/targets/unicode/aliases b/test/integration/targets/unicode/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/unicode/aliases
+++ b/test/integration/targets/unicode/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/unsafe_writes/aliases b/test/integration/targets/unsafe_writes/aliases
index 4fb7a11640..cf954afc1f 100644
--- a/test/integration/targets/unsafe_writes/aliases
+++ b/test/integration/targets/unsafe_writes/aliases
@@ -1,6 +1,8 @@
+context/target
needs/root
skip/freebsd
skip/osx
skip/macos
skip/aix
shippable/posix/group3
+needs/target/setup_remote_tmp_dir
diff --git a/test/integration/targets/unsafe_writes/basic.yml b/test/integration/targets/unsafe_writes/basic.yml
index 410726ad0e..99a3195f65 100644
--- a/test/integration/targets/unsafe_writes/basic.yml
+++ b/test/integration/targets/unsafe_writes/basic.yml
@@ -1,9 +1,23 @@
- hosts: testhost
gather_facts: false
- vars:
- testudir: '{{output_dir}}/unsafe_writes_test'
- testufile: '{{testudir}}/unreplacablefile.txt'
tasks:
+ - import_role:
+ name: ../setup_remote_tmp_dir
+ - name: define test directory
+ set_fact:
+ testudir: '{{remote_tmp_dir}}/unsafe_writes_test'
+ - name: define test file
+ set_fact:
+ testufile: '{{testudir}}/unreplacablefile.txt'
+ - name: define test environment with unsafe writes set
+ set_fact:
+ test_env:
+ ANSIBLE_UNSAFE_WRITES: "{{ lookup('env', 'ANSIBLE_UNSAFE_WRITES') }}"
+ when: lookup('env', 'ANSIBLE_UNSAFE_WRITES')
+ - name: define test environment without unsafe writes set
+ set_fact:
+ test_env: {}
+ when: not lookup('env', 'ANSIBLE_UNSAFE_WRITES')
- name: test unsafe_writes on immutable dir (file cannot be atomically replaced)
block:
- name: create target dir
@@ -61,6 +75,7 @@
msg: "Failed with envvar: {{env_enabled}}, due AUW: to {{q('env', 'ANSIBLE_UNSAFE_WRITES')}}"
that:
- env_enabled and copy_with_env is changed or not env_enabled and copy_with_env is failed
+ environment: "{{ test_env }}"
always:
- name: remove immutable flag from dir to prevent issues with cleanup
file: path={{testudir}} state=directory attributes="-i"
diff --git a/test/integration/targets/unsafe_writes/runme.sh b/test/integration/targets/unsafe_writes/runme.sh
index 791a5676b4..619ce02568 100755
--- a/test/integration/targets/unsafe_writes/runme.sh
+++ b/test/integration/targets/unsafe_writes/runme.sh
@@ -3,10 +3,10 @@
set -eux
# test w/o fallback env var
-ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ansible-playbook basic.yml -i ../../inventory "$@"
# test enabled fallback env var
-ANSIBLE_UNSAFE_WRITES=1 ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ANSIBLE_UNSAFE_WRITES=1 ansible-playbook basic.yml -i ../../inventory "$@"
# test disnabled fallback env var
-ANSIBLE_UNSAFE_WRITES=0 ansible-playbook basic.yml -i ../../inventory -e "output_dir=${OUTPUT_DIR}" "$@"
+ANSIBLE_UNSAFE_WRITES=0 ansible-playbook basic.yml -i ../../inventory "$@"
diff --git a/test/integration/targets/until/aliases b/test/integration/targets/until/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/until/aliases
+++ b/test/integration/targets/until/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/unvault/aliases b/test/integration/targets/unvault/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/unvault/aliases
+++ b/test/integration/targets/unvault/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/uri/meta/main.yml b/test/integration/targets/uri/meta/main.yml
index 39b94950af..2c2155aba0 100644
--- a/test/integration/targets/uri/meta/main.yml
+++ b/test/integration/targets/uri/meta/main.yml
@@ -2,4 +2,3 @@ dependencies:
- prepare_tests
- prepare_http_tests
- setup_remote_tmp_dir
- - setup_remote_constraints
diff --git a/test/integration/targets/uri/tasks/main.yml b/test/integration/targets/uri/tasks/main.yml
index c6ba67224e..700e7f1017 100644
--- a/test/integration/targets/uri/tasks/main.yml
+++ b/test/integration/targets/uri/tasks/main.yml
@@ -366,10 +366,25 @@
with_items: "{{ uri_os_packages[ansible_os_family].step2 | default([]) }}"
when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+- name: create constraints path
+ set_fact:
+ remote_constraints: "{{ remote_tmp_dir }}/constraints.txt"
+ when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+
+- name: create constraints file
+ copy:
+ content: |
+ cryptography == 2.1.4
+ idna == 2.5
+ pyopenssl == 17.5.0
+ six == 1.13.0
+ urllib3 == 1.23
+ dest: "{{ remote_constraints }}"
+ when: not ansible_python.has_sslcontext and not is_ubuntu_precise|bool
+
- name: install urllib3 and pyopenssl via pip
pip:
name: "{{ item }}"
- state: latest
extra_args: "-c {{ remote_constraints }}"
with_items:
- urllib3
diff --git a/test/integration/targets/var_blending/aliases b/test/integration/targets/var_blending/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/var_blending/aliases
+++ b/test/integration/targets/var_blending/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/var_precedence/aliases b/test/integration/targets/var_precedence/aliases
index 3005e4b26d..498fedd558 100644
--- a/test/integration/targets/var_precedence/aliases
+++ b/test/integration/targets/var_precedence/aliases
@@ -1 +1,2 @@
shippable/posix/group4
+context/controller
diff --git a/test/integration/targets/var_reserved/aliases b/test/integration/targets/var_reserved/aliases
index 765b70da79..90ea9e1281 100644
--- a/test/integration/targets/var_reserved/aliases
+++ b/test/integration/targets/var_reserved/aliases
@@ -1 +1,2 @@
shippable/posix/group2
+context/controller
diff --git a/test/integration/targets/var_templating/aliases b/test/integration/targets/var_templating/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/var_templating/aliases
+++ b/test/integration/targets/var_templating/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/want_json_modules_posix/aliases b/test/integration/targets/want_json_modules_posix/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/want_json_modules_posix/aliases
+++ b/test/integration/targets/want_json_modules_posix/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/integration/targets/yaml_parsing/aliases b/test/integration/targets/yaml_parsing/aliases
index b59832142f..8278ec8bcc 100644
--- a/test/integration/targets/yaml_parsing/aliases
+++ b/test/integration/targets/yaml_parsing/aliases
@@ -1 +1,2 @@
shippable/posix/group3
+context/controller
diff --git a/test/lib/ansible_test/_data/completion/docker.txt b/test/lib/ansible_test/_data/completion/docker.txt
index cfabe6db94..a25ed4a3d1 100644
--- a/test/lib/ansible_test/_data/completion/docker.txt
+++ b/test/lib/ansible_test/_data/completion/docker.txt
@@ -1,13 +1,14 @@
-default name=quay.io/ansible/default-test-container:3.7.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection
-default name=quay.io/ansible/ansible-core-test-container:3.7.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core
-alpine3 name=quay.io/ansible/alpine3-test-container:2.0.2 python=3.8
-centos6 name=quay.io/ansible/centos6-test-container:2.0.2 python=2.6 seccomp=unconfined
-centos7 name=quay.io/ansible/centos7-test-container:2.0.2 python=2.7 seccomp=unconfined
-centos8 name=quay.io/ansible/centos8-test-container:2.0.2 python=3.6 seccomp=unconfined
-fedora32 name=quay.io/ansible/fedora32-test-container:2.1.0 python=3.8
-fedora33 name=quay.io/ansible/fedora33-test-container:2.1.0 python=3.9
-fedora34 name=quay.io/ansible/fedora34-test-container:2.1.0 python=3.9 seccomp=unconfined
-opensuse15py2 name=quay.io/ansible/opensuse15py2-test-container:2.0.2 python=2.7
-opensuse15 name=quay.io/ansible/opensuse15-test-container:2.0.2 python=3.6
-ubuntu1804 name=quay.io/ansible/ubuntu1804-test-container:2.0.2 python=3.6 seccomp=unconfined
-ubuntu2004 name=quay.io/ansible/ubuntu2004-test-container:2.0.2 python=3.8 seccomp=unconfined
+base image=quay.io/ansible/base-test-container:1.0.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined
+default image=quay.io/ansible/default-test-container:3.7.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=collection
+default image=quay.io/ansible/ansible-core-test-container:3.7.0 python=3.9,2.6,2.7,3.5,3.6,3.7,3.8,3.10 seccomp=unconfined context=ansible-core
+alpine3 image=quay.io/ansible/alpine3-test-container:2.0.2 python=3.8
+centos6 image=quay.io/ansible/centos6-test-container:2.0.2 python=2.6 seccomp=unconfined
+centos7 image=quay.io/ansible/centos7-test-container:2.0.2 python=2.7 seccomp=unconfined
+centos8 image=quay.io/ansible/centos8-test-container:2.0.2 python=3.6 seccomp=unconfined
+fedora32 image=quay.io/ansible/fedora32-test-container:2.1.0 python=3.8
+fedora33 image=quay.io/ansible/fedora33-test-container:2.1.0 python=3.9
+fedora34 image=quay.io/ansible/fedora34-test-container:2.1.0 python=3.9 seccomp=unconfined
+opensuse15py2 image=quay.io/ansible/opensuse15py2-test-container:2.0.2 python=2.7
+opensuse15 image=quay.io/ansible/opensuse15-test-container:2.0.2 python=3.6
+ubuntu1804 image=quay.io/ansible/ubuntu1804-test-container:2.0.2 python=3.6 seccomp=unconfined
+ubuntu2004 image=quay.io/ansible/ubuntu2004-test-container:2.0.2 python=3.8 seccomp=unconfined
diff --git a/test/lib/ansible_test/_data/completion/network.txt b/test/lib/ansible_test/_data/completion/network.txt
index dca911f89b..8c6243e9a1 100644
--- a/test/lib/ansible_test/_data/completion/network.txt
+++ b/test/lib/ansible_test/_data/completion/network.txt
@@ -1,2 +1,2 @@
-ios/csr1000v collection=cisco.ios connection=ansible.netcommon.network_cli
-vyos/1.1.8 collection=vyos.vyos connection=ansible.netcommon.network_cli
+ios/csr1000v collection=cisco.ios connection=ansible.netcommon.network_cli provider=aws
+vyos/1.1.8 collection=vyos.vyos connection=ansible.netcommon.network_cli provider=aws
diff --git a/test/lib/ansible_test/_data/completion/remote.txt b/test/lib/ansible_test/_data/completion/remote.txt
index e852147256..785fd23b2e 100644
--- a/test/lib/ansible_test/_data/completion/remote.txt
+++ b/test/lib/ansible_test/_data/completion/remote.txt
@@ -1,8 +1,12 @@
-freebsd/11.4 python=2.7,3.7,3.8 python_dir=/usr/local/bin
-freebsd/12.2 python=3.7,2.7,3.8 python_dir=/usr/local/bin
-freebsd/13.0 python=3.7,2.7,3.8,3.9 python_dir=/usr/local/bin
-macos/11.1 python=3.9 python_dir=/usr/local/bin
-rhel/7.9 python=2.7
-rhel/8.3 python=3.6,3.8
-rhel/8.4 python=3.6,3.8
-aix/7.2 python=2.7 temp-unicode=disabled pip-check=disabled
+freebsd/11.4 python=2.7,3.7,3.8 python_dir=/usr/local/bin provider=aws
+freebsd/12.2 python=3.7,2.7,3.8 python_dir=/usr/local/bin provider=aws
+freebsd/13.0 python=3.7,2.7,3.8,3.9 python_dir=/usr/local/bin provider=aws
+freebsd python_dir=/usr/local/bin provider=aws
+macos/11.1 python=3.9 python_dir=/usr/local/bin provider=parallels
+macos python_dir=/usr/local/bin provider=parallels
+rhel/7.9 python=2.7 provider=aws
+rhel/8.3 python=3.6,3.8 provider=aws
+rhel/8.4 python=3.6,3.8 provider=aws
+rhel provider=aws
+aix/7.2 python=2.7,3.7 python_dir=/opt/freeware/bin provider=ibmps
+aix python_dir=/opt/freeware/bin provider=ibmps
diff --git a/test/lib/ansible_test/_data/completion/windows.txt b/test/lib/ansible_test/_data/completion/windows.txt
index 5704fb94ca..b0461ed9b7 100644
--- a/test/lib/ansible_test/_data/completion/windows.txt
+++ b/test/lib/ansible_test/_data/completion/windows.txt
@@ -1,4 +1,4 @@
-2012
-2012-R2
-2016
-2019 \ No newline at end of file
+windows/2012 provider=aws
+windows/2012-R2 provider=aws
+windows/2016 provider=aws
+windows/2019 provider=aws
diff --git a/test/lib/ansible_test/_data/cryptography-constraints.txt b/test/lib/ansible_test/_data/cryptography-constraints.txt
deleted file mode 100644
index 8e3e99b4ae..0000000000
--- a/test/lib/ansible_test/_data/cryptography-constraints.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# do not add a cryptography constraint here, see the get_cryptography_requirement function in executor.py for details
-idna < 2.8 ; python_version < '2.7' # idna 2.8+ requires python 2.7+
-cffi != 1.14.4 # Fails on systems with older gcc. Should be fixed in the next release. https://foss.heptapod.net/pypy/cffi/-/issues/480
diff --git a/test/lib/ansible_test/_data/inventory b/test/lib/ansible_test/_data/inventory
deleted file mode 100644
index 1b77a7ea71..0000000000
--- a/test/lib/ansible_test/_data/inventory
+++ /dev/null
@@ -1,6 +0,0 @@
-# Do not put test specific entries in this inventory file.
-# For script based test targets (using runme.sh) put the inventory file in the test's directory instead.
-
-[testgroup]
-# ansible_python_interpreter must be set to avoid interpreter discovery
-testhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"
diff --git a/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml b/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml
new file mode 100644
index 0000000000..6ed86827ca
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_coverage_setup.yml
@@ -0,0 +1,21 @@
+- name: Setup POSIX code coverage configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Create coverage temporary directory
+ file:
+ path: "{{ common_temp_dir }}"
+ mode: "{{ mode_directory }}"
+ state: directory
+
+ - name: Create coverage configuration file
+ copy:
+ dest: "{{ coverage_config_path }}"
+ content: "{{ coverage_config }}"
+ mode: "{{ mode_file }}"
+
+ - name: Create coverage output directory
+ file:
+ path: "{{ coverage_output_path }}"
+ mode: "{{ mode_directory_write }}"
+ state: directory
diff --git a/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml b/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml
new file mode 100644
index 0000000000..290411b6f5
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/posix_coverage_teardown.yml
@@ -0,0 +1,8 @@
+- name: Teardown POSIX code coverage configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove coverage temporary directory
+ file:
+ path: "{{ common_temp_dir }}"
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml b/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml
index 5556936e85..69a071312e 100644
--- a/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml
+++ b/test/lib/ansible_test/_data/playbooks/posix_hosts_prepare.yml
@@ -1,4 +1,5 @@
-- hosts: all
+- name: Prepare POSIX hosts file
+ hosts: all
gather_facts: no
tasks:
- name: Add container hostname(s) to hosts file
diff --git a/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml b/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml
index eafd237679..1549ed6bdc 100644
--- a/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml
+++ b/test/lib/ansible_test/_data/playbooks/posix_hosts_restore.yml
@@ -1,4 +1,5 @@
-- hosts: all
+- name: Restore POSIX hosts file
+ hosts: all
gather_facts: no
tasks:
- name: Remove container hostname(s) from hosts file
diff --git a/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml b/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml
new file mode 100644
index 0000000000..0f9948c724
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/pypi_proxy_prepare.yml
@@ -0,0 +1,23 @@
+- name: Prepare PyPI proxy configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Make sure the ~/.pip directory exists
+ file:
+ path: ~/.pip
+ state: directory
+ - name: Configure a custom index for pip based installs
+ copy:
+ content: |
+ [global]
+ index-url = {{ pypi_endpoint }}
+ trusted-host = {{ pypi_hostname }}
+ dest: ~/.pip/pip.conf
+ force: "{{ force }}"
+ - name: Configure a custom index for easy_install based installs
+ copy:
+ content: |
+ [easy_install]
+ index_url = {0}
+ dest: ~/.pydistutils.cfg
+ force: "{{ force }}"
diff --git a/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml b/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml
new file mode 100644
index 0000000000..5410fb268d
--- /dev/null
+++ b/test/lib/ansible_test/_data/playbooks/pypi_proxy_restore.yml
@@ -0,0 +1,12 @@
+- name: Restore PyPI proxy configuration
+ hosts: all
+ gather_facts: no
+ tasks:
+ - name: Remove custom index for pip based installs
+ file:
+ path: ~/.pip/pip.conf
+ state: absent
+ - name: Remove custom index for easy_install based installs
+ file:
+ path: ~/.pydistutils.cfg
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml b/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
index 2e5ff9c6c3..db7976e45b 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_coverage_setup.yml
@@ -1,14 +1,13 @@
----
-- name: setup global coverage directory for Windows test targets
- hosts: windows
+- name: Setup Windows code coverage configuration
+ hosts: all
gather_facts: no
tasks:
- - name: create temp directory
+ - name: Create coverage temporary directory
ansible.windows.win_file:
path: '{{ remote_temp_path }}'
state: directory
- - name: allow everyone to write to coverage test dir
+ - name: Allow everyone to write to the temporary coverage directory
ansible.windows.win_acl:
path: '{{ remote_temp_path }}'
user: Everyone
@@ -16,4 +15,4 @@
inherit: ContainerInherit, ObjectInherit
propagation: 'None'
type: allow
- state: present \ No newline at end of file
+ state: present
diff --git a/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml b/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
index ab34dc2770..f1fa433232 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_coverage_teardown.yml
@@ -1,15 +1,8 @@
----
-- name: collect the coverage files from the Windows host
- hosts: windows
+- name: Teardown Windows code coverage configuration
+ hosts: all
gather_facts: no
tasks:
- - name: make sure all vars have been set
- assert:
- that:
- - local_temp_path is defined
- - remote_temp_path is defined
-
- - name: zip up all coverage files in the
+ - name: Zip up all coverage files
ansible.windows.win_shell: |
$coverage_dir = '{{ remote_temp_path }}'
$zip_file = Join-Path -Path $coverage_dir -ChildPath 'coverage.zip'
@@ -65,13 +58,13 @@
}
}
- - name: fetch coverage zip file to localhost
+ - name: Fetch coverage zip
fetch:
src: '{{ remote_temp_path }}\coverage.zip'
- dest: '{{ local_temp_path }}/coverage-{{ inventory_hostname }}.zip'
+ dest: '{{ local_temp_path }}/{{ inventory_hostname }}.zip'
flat: yes
- - name: remove the temporary coverage directory
+ - name: Remove temporary coverage directory
ansible.windows.win_file:
path: '{{ remote_temp_path }}'
- state: absent \ No newline at end of file
+ state: absent
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml
index 04ecf969f7..0a23086bfd 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_prepare.yml
@@ -1,4 +1,5 @@
-- hosts: all
+- name: Prepare Windows hosts file
+ hosts: all
gather_facts: no
tasks:
- name: Add container hostname(s) to hosts file
diff --git a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml
index 313fd0d8f5..c595d5fb75 100644
--- a/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml
+++ b/test/lib/ansible_test/_data/playbooks/windows_hosts_restore.yml
@@ -1,4 +1,5 @@
-- hosts: all
+- name: Restore Windows hosts file
+ hosts: all
gather_facts: no
tasks:
- name: Remove container hostname(s) from hosts file
diff --git a/test/lib/ansible_test/_data/requirements/ansible-test.txt b/test/lib/ansible_test/_data/requirements/ansible-test.txt
deleted file mode 100644
index 7b596e1b6c..0000000000
--- a/test/lib/ansible_test/_data/requirements/ansible-test.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-argparse ; python_version < '2.7'
-
-# pip 7.1 added support for constraints, which are required by ansible-test to install most python requirements
-# see https://github.com/pypa/pip/blame/e648e00dc0226ade30ade99591b245b0c98e86c9/NEWS.rst#L1258
-pip >= 7.1, < 10 ; python_version < '2.7' # pip 10+ drops support for python 2.6 (sanity_ok)
-pip >= 7.1 ; python_version >= '2.7' # sanity_ok
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt b/test/lib/ansible_test/_data/requirements/ansible.txt
index 40cf83a647..40cf83a647 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt
+++ b/test/lib/ansible_test/_data/requirements/ansible.txt
diff --git a/test/lib/ansible_test/_data/requirements/constraints.txt b/test/lib/ansible_test/_data/requirements/constraints.txt
index d1e62c173a..66f294b7c1 100644
--- a/test/lib/ansible_test/_data/requirements/constraints.txt
+++ b/test/lib/ansible_test/_data/requirements/constraints.txt
@@ -1,12 +1,8 @@
+# do not add a cryptography or pyopenssl constraint to this file, they require special handling, see get_cryptography_requirements in python_requirements.py
+# do not add a coverage constraint to this file, it is handled internally by ansible-test
packaging < 21.0 ; python_version < '3.6' # packaging 21.0 requires Python 3.6 or newer
-resolvelib >= 0.5.3, < 0.6.0 # keep in sync with `requirements.txt`
-coverage >= 4.5.1, < 5.0.0 ; python_version < '3.7' # coverage 4.4 required for "disable_warnings" support but 4.5.1 needed for bug fixes, coverage 5.0+ incompatible
-coverage >= 4.5.2, < 5.0.0 ; python_version == '3.7' # coverage 4.5.2 fixes bugs in support for python 3.7, coverage 5.0+ incompatible
-coverage >= 4.5.4, < 5.0.0 ; python_version > '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible
decorator < 5.0.0 ; python_version < '3.5' # decorator 5.0.5 and later require python 3.5 or later
six < 1.14.0 ; python_version < '2.7' # six 1.14.0 drops support for python 2.6
-cryptography < 2.2 ; python_version < '2.7' # cryptography 2.2 drops support for python 2.6
-# do not add a cryptography constraint here unless it is for python version incompatibility, see the get_cryptography_requirement function in executor.py for details
deepdiff < 4.0.0 ; python_version < '3' # deepdiff 4.0.0 and later require python 3
jinja2 < 2.11 ; python_version < '2.7' # jinja2 2.11 and later require python 2.7 or later
urllib3 < 1.24 ; python_version < '2.7' # urllib3 1.24 and later require python 2.7 or later
@@ -30,7 +26,6 @@ requests-credssp >= 0.1.0 # message encryption support
openshift >= 0.6.2, < 0.9.0 # merge_type support
virtualenv < 16.0.0 ; python_version < '2.7' # virtualenv 16.0.0 and later require python 2.7 or later
pathspec < 0.6.0 ; python_version < '2.7' # pathspec 0.6.0 and later require python 2.7 or later
-pyopenssl < 18.0.0 ; python_version < '2.7' # pyOpenSSL 18.0.0 and later require python 2.7 or later
pyparsing < 3.0.0 ; python_version < '3.5' # pyparsing 3 and later require python 3.5 or later
pyyaml < 5.1 ; python_version < '2.7' # pyyaml 5.1 and later require python 2.7 or later
pycparser < 2.19 ; python_version < '2.7' # pycparser 2.19 and later require python 2.7 or later
diff --git a/test/lib/ansible_test/_data/requirements/coverage.txt b/test/lib/ansible_test/_data/requirements/coverage.txt
deleted file mode 100644
index 4ebc8aea50..0000000000
--- a/test/lib/ansible_test/_data/requirements/coverage.txt
+++ /dev/null
@@ -1 +0,0 @@
-coverage
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt
deleted file mode 100644
index aa2f71cc3e..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.aws.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-boto
-boto3
-botocore
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt
deleted file mode 100644
index 6df1a4e827..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt
+++ /dev/null
@@ -1,39 +0,0 @@
-packaging
-requests[security]
-xmltodict
-azure-cli-core==2.0.35
-azure-cli-nspkg==3.0.2
-azure-common==1.1.11
-azure-mgmt-authorization==0.51.1
-azure-mgmt-batch==5.0.1
-azure-mgmt-cdn==3.0.0
-azure-mgmt-compute==10.0.0
-azure-mgmt-containerinstance==1.4.0
-azure-mgmt-containerregistry==2.0.0
-azure-mgmt-containerservice==4.4.0
-azure-mgmt-dns==2.1.0
-azure-mgmt-keyvault==1.1.0
-azure-mgmt-marketplaceordering==0.1.0
-azure-mgmt-monitor==0.5.2
-azure-mgmt-network==4.0.0
-azure-mgmt-nspkg==2.0.0
-azure-mgmt-redis==5.0.0
-azure-mgmt-resource==2.1.0
-azure-mgmt-rdbms==1.4.1
-azure-mgmt-servicebus==0.5.3
-azure-mgmt-sql==0.10.0
-azure-mgmt-storage==3.1.0
-azure-mgmt-trafficmanager==0.50.0
-azure-mgmt-web==0.41.0
-azure-nspkg==2.0.0
-azure-storage==0.35.1
-msrest==0.6.10
-msrestazure==0.6.2
-azure-keyvault==1.0.0a1
-azure-graphrbac==0.40.0
-azure-mgmt-cosmosdb==0.5.2
-azure-mgmt-hdinsight==0.1.0
-azure-mgmt-devtestlabs==3.0.0
-azure-mgmt-loganalytics==0.2.0
-azure-mgmt-automation==0.1.1
-azure-mgmt-iothub==0.7.0
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt
deleted file mode 100644
index f0a89b9126..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.cs.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-cs
-sshpubkeys
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt
deleted file mode 100644
index a6580e69c0..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.hcloud.txt
+++ /dev/null
@@ -1 +0,0 @@
-hcloud>=1.6.0 ; python_version >= '2.7' and python_version < '3.9' # Python 2.6 is not supported (sanity_ok); Only hcloud >= 1.6.0 supports Floating IPs with names; Python 3.9 and later are not supported
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt
deleted file mode 100644
index be61145433..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.nios.txt
+++ /dev/null
@@ -1 +0,0 @@
-infoblox-client
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt
deleted file mode 100644
index acd346682c..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.opennebula.txt
+++ /dev/null
@@ -1 +0,0 @@
-pyone \ No newline at end of file
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt
deleted file mode 100644
index 269bf090c7..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.openshift.txt
+++ /dev/null
@@ -1 +0,0 @@
-openshift
diff --git a/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt b/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt
deleted file mode 100644
index fd8f13982b..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.cloud.vcenter.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pyvmomi
-git+https://github.com/vmware/vsphere-automation-sdk-python.git ; python_version >= '2.7' # Python 2.6 is not supported
diff --git a/test/lib/ansible_test/_data/requirements/integration.txt b/test/lib/ansible_test/_data/requirements/integration.txt
deleted file mode 100644
index 71d78dbc55..0000000000
--- a/test/lib/ansible_test/_data/requirements/integration.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-cryptography
-jinja2
-junit-xml
-ordereddict ; python_version < '2.7'
-packaging
-pyyaml
-resolvelib
diff --git a/test/lib/ansible_test/_data/requirements/network-integration.txt b/test/lib/ansible_test/_data/requirements/network-integration.txt
deleted file mode 100644
index 726d29438f..0000000000
--- a/test/lib/ansible_test/_data/requirements/network-integration.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-cryptography
-jinja2
-junit-xml
-ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
-packaging
-paramiko
-pyyaml
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
index c910f106a8..660620dc7b 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.ansible-doc.txt
@@ -1,3 +1,7 @@
-jinja2 # ansible-core requirement
-pyyaml # ansible-core requirement
-packaging # ansible-doc requirement
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1 # ansible-core requirement
+packaging == 21.0 # ansible-doc requirement
+
+# dependencies
+MarkupSafe == 2.0.1
+pyparsing == 2.4.7
diff --git a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
index a346a8d93e..cb9f02f873 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.changelog.txt
@@ -1 +1,9 @@
antsibull-changelog == 0.9.0
+
+# dependencies
+pyyaml == 5.4.1
+docutils == 0.17.1
+packaging == 21.0
+pyparsing == 2.4.7
+rstcheck == 3.3.1
+semantic-version == 2.8.5
diff --git a/test/lib/ansible_test/_data/requirements/sanity.import.txt b/test/lib/ansible_test/_data/requirements/sanity.import.txt
deleted file mode 100644
index 17e375ce56..0000000000
--- a/test/lib/ansible_test/_data/requirements/sanity.import.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-pyyaml # required for the collection loader to parse yaml for plugin routing
-virtualenv ; python_version <= '2.7' # virtualenv required on Python 2.x, but on Python 3.x we can use the built-in venv instead
diff --git a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
index 7044777a15..cc530e42c2 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.integration-aliases.txt
@@ -1 +1 @@
-pyyaml # not frozen due to usage outside sanity tests
+pyyaml == 5.4.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.ps1 b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
index 1ea1f8e550..79ee81526f 100755..100644
--- a/test/lib/ansible_test/_data/requirements/sanity.ps1
+++ b/test/lib/ansible_test/_data/requirements/sanity.pslint.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
param (
[Switch]
$IsContainer
diff --git a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
index 789e270c68..7332d16238 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.pylint.txt
@@ -1,5 +1,5 @@
pylint == 2.9.3
-pyyaml # needed for collection_detail.py
+pyyaml == 5.4.1 # needed for collection_detail.py
# dependencies
astroid == 2.6.6
diff --git a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
index 1eaef006e9..1281a04528 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.runtime-metadata.txt
@@ -1,2 +1,2 @@
-pyyaml # not frozen due to usage outside sanity tests
+pyyaml == 5.4.1
voluptuous == 0.12.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
index 8288b14b73..4b1d5f05d0 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.validate-modules.txt
@@ -1,3 +1,6 @@
-jinja2 # ansible-core requirement
-pyyaml # needed for collection_detail.py
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1 # needed for collection_detail.py
voluptuous == 0.12.1
+
+# dependencies
+MarkupSafe == 2.0.1
diff --git a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
index e0eac4e790..6738486394 100644
--- a/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
+++ b/test/lib/ansible_test/_data/requirements/sanity.yamllint.txt
@@ -1,5 +1,5 @@
yamllint == 1.26.0
# dependencies
-pathspec # not frozen since it should not impact test results
-pyyaml # not frozen due to usage outside sanity tests
+pathspec == 0.9.0
+pyyaml == 5.4.1
diff --git a/test/lib/ansible_test/_data/requirements/units.txt b/test/lib/ansible_test/_data/requirements/units.txt
index b237a5c38c..d723a65fc6 100644
--- a/test/lib/ansible_test/_data/requirements/units.txt
+++ b/test/lib/ansible_test/_data/requirements/units.txt
@@ -1,8 +1,5 @@
-cryptography
-jinja2
mock
pytest
pytest-mock
pytest-xdist
-pyyaml
-resolvelib
+pyyaml # required by the collection loader (only needed for collections)
diff --git a/test/lib/ansible_test/_data/requirements/windows-integration.txt b/test/lib/ansible_test/_data/requirements/windows-integration.txt
index 86de35eee8..b3554dea2c 100644
--- a/test/lib/ansible_test/_data/requirements/windows-integration.txt
+++ b/test/lib/ansible_test/_data/requirements/windows-integration.txt
@@ -1,11 +1,5 @@
-cryptography
-jinja2
-junit-xml
ntlm-auth
-ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
requests-ntlm
requests-credssp
-packaging
pypsrp
pywinrm[credssp]
-pyyaml
diff --git a/test/lib/ansible_test/_internal/__init__.py b/test/lib/ansible_test/_internal/__init__.py
index 35f044224c..b9d0e98e5d 100644
--- a/test/lib/ansible_test/_internal/__init__.py
+++ b/test/lib/ansible_test/_internal/__init__.py
@@ -1,3 +1,93 @@
-"""Support code for Ansible testing infrastructure."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+"""Test runner for all Ansible tests."""
+from __future__ import annotations
+
+import os
+import sys
+
+# This import should occur as early as possible.
+# It must occur before subprocess has been imported anywhere in the current process.
+from .init import (
+ CURRENT_RLIMIT_NOFILE,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+ MAXFD,
+)
+
+from .delegation import (
+ delegate,
+)
+
+from .executor import (
+ ApplicationWarning,
+ Delegate,
+ ListTargets,
+)
+
+from .timeout import (
+ configure_timeout,
+)
+
+from .data import (
+ data_context,
+)
+
+from .util_common import (
+ CommonConfig,
+)
+
+from .cli import (
+ parse_args,
+)
+
+
+def main():
+ """Main program function."""
+ try:
+ os.chdir(data_context().content.root)
+ args = parse_args()
+ config = args.config(args) # type: CommonConfig
+ display.verbosity = config.verbosity
+ display.truncate = config.truncate
+ display.redact = config.redact
+ display.color = config.color
+ display.info_stderr = config.info_stderr
+ configure_timeout(config)
+
+ display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
+ display.info('MAXFD: %d' % MAXFD, verbosity=2)
+
+ delegate_args = None
+ target_names = None
+
+ try:
+ args.func(config)
+ except ListTargets as ex:
+ # save target_names for use once we exit the exception handler
+ target_names = ex.target_names
+ except Delegate as ex:
+ # save delegation args for use once we exit the exception handler
+ delegate_args = (ex.host_state, ex.exclude, ex.require)
+
+ if delegate_args:
+ # noinspection PyTypeChecker
+ delegate(config, *delegate_args)
+
+ if target_names:
+ for target_name in target_names:
+ print(target_name) # info goes to stderr, this should be on stdout
+
+ display.review_warnings()
+ config.success = True
+ except ApplicationWarning as ex:
+ display.warning(u'%s' % ex)
+ sys.exit(0)
+ except ApplicationError as ex:
+ display.error(u'%s' % ex)
+ sys.exit(1)
+ except KeyboardInterrupt:
+ sys.exit(2)
+ except BrokenPipeError:
+ sys.exit(3)
diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py
index 614e8aacb7..a3446743f2 100644
--- a/test/lib/ansible_test/_internal/ansible_util.py
+++ b/test/lib/ansible_test/_internal/ansible_util.py
@@ -1,11 +1,9 @@
"""Miscellaneous utility functions and classes specific to ansible cli tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
-
-from . import types as t
+import typing as t
from .constants import (
SOFT_RLIMIT_NOFILE,
@@ -17,8 +15,6 @@ from .io import (
from .util import (
common_environment,
- display,
- find_python,
ApplicationError,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_DATA_ROOT,
@@ -33,7 +29,7 @@ from .util_common import (
create_temp_dir,
run_command,
ResultType,
- intercept_command,
+ intercept_python,
)
from .config import (
@@ -47,7 +43,30 @@ from .data import (
data_context,
)
-CHECK_YAML_VERSIONS = {}
+from .python_requirements import (
+ install_requirements,
+)
+
+from .host_configs import (
+ PythonConfig,
+)
+
+
+def parse_inventory(args, inventory_path): # type: (EnvironmentConfig, str) -> t.Dict[str, t.Any]
+ """Return a dict parsed from the given inventory file."""
+ cmd = ['ansible-inventory', '-i', inventory_path, '--list']
+ env = ansible_environment(args)
+ inventory = json.loads(intercept_python(args, args.controller_python, cmd, env, capture=True, always=True)[0])
+ return inventory
+
+
+def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Dict[str, t.Dict[str, t.Any]]
+ """Return a dict of hosts from the specified group in the given inventory."""
+ hostvars = inventory.get('_meta', {}).get('hostvars', {})
+ group = inventory.get(group_name, {})
+ host_names = group.get('hosts', [])
+ hosts = dict((name, hostvars.get(name, {})) for name in host_names)
+ return hosts
def ansible_environment(args, color=True, ansible_config=None):
@@ -233,41 +252,6 @@ License: GPLv3+
write_text_file(pkg_info_path, pkg_info.lstrip(), create_directories=True)
-def check_pyyaml(args, version, required=True, quiet=False):
- """
- :type args: EnvironmentConfig
- :type version: str
- :type required: bool
- :type quiet: bool
- """
- try:
- return CHECK_YAML_VERSIONS[version]
- except KeyError:
- pass
-
- python = find_python(version)
- stdout, _dummy = run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yamlcheck.py')],
- capture=True, always=True)
-
- result = json.loads(stdout)
-
- yaml = result['yaml']
- cloader = result['cloader']
-
- if yaml or required:
- # results are cached only if pyyaml is required or present
- # it is assumed that tests will not uninstall/re-install pyyaml -- if they do, those changes will go undetected
- CHECK_YAML_VERSIONS[version] = result
-
- if not quiet:
- if not yaml and required:
- display.warning('PyYAML is not installed for interpreter: %s' % python)
- elif not cloader:
- display.warning('PyYAML will be slow due to installation without libyaml support for interpreter: %s' % python)
-
- return result
-
-
class CollectionDetail:
"""Collection detail."""
def __init__(self): # type: () -> None
@@ -277,16 +261,16 @@ class CollectionDetail:
class CollectionDetailError(ApplicationError):
"""An error occurred retrieving collection detail."""
def __init__(self, reason): # type: (str) -> None
- super(CollectionDetailError, self).__init__('Error collecting collection detail: %s' % reason)
+ super().__init__('Error collecting collection detail: %s' % reason)
self.reason = reason
-def get_collection_detail(args, python): # type: (EnvironmentConfig, str) -> CollectionDetail
+def get_collection_detail(args, python): # type: (EnvironmentConfig, PythonConfig) -> CollectionDetail
"""Return collection detail."""
collection = data_context().content.collection
directory = os.path.join(collection.root, collection.directory)
- stdout = run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0]
+ stdout = run_command(args, [python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0]
result = json.loads(stdout)
error = result.get('error')
@@ -301,16 +285,23 @@ def get_collection_detail(args, python): # type: (EnvironmentConfig, str) -> Co
return detail
-def run_playbook(args, inventory_path, playbook, run_playbook_vars=None): # type: (CommonConfig, str, str, t.Optional[t.Dict[str, t.Any]]) -> None
+def run_playbook(
+ args, # type: EnvironmentConfig
+ inventory_path, # type: str
+ playbook, # type: str
+ run_playbook_vars=None, # type: t.Optional[t.Dict[str, t.Any]]
+ capture=False, # type: bool
+): # type: (...) -> None
"""Run the specified playbook using the given inventory file and playbook variables."""
playbook_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'playbooks', playbook)
- command = ['ansible-playbook', '-i', inventory_path, playbook_path]
+ cmd = ['ansible-playbook', '-i', inventory_path, playbook_path]
if run_playbook_vars:
- command.extend(['-e', json.dumps(run_playbook_vars)])
+ cmd.extend(['-e', json.dumps(run_playbook_vars)])
if args.verbosity:
- command.append('-%s' % ('v' * args.verbosity))
+ cmd.append('-%s' % ('v' * args.verbosity))
+ install_requirements(args, args.controller_python, ansible=True) # run_playbook()
env = ansible_environment(args)
- intercept_command(args, command, '', env, disable_coverage=True)
+ intercept_python(args, args.controller_python, cmd, env, capture=capture)
diff --git a/test/lib/ansible_test/_internal/become.py b/test/lib/ansible_test/_internal/become.py
new file mode 100644
index 0000000000..dc0a208a62
--- /dev/null
+++ b/test/lib/ansible_test/_internal/become.py
@@ -0,0 +1,52 @@
+"""Become abstraction for interacting with test hosts."""
+from __future__ import annotations
+
+import abc
+import shlex
+import typing as t
+
+
+class Become(metaclass=abc.ABCMeta):
+ """Base class for become implementations."""
+ @property
+ @abc.abstractmethod
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+
+ @abc.abstractmethod
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+
+
+class Su(Become):
+ """Become using 'su'."""
+ @property
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+ return 'su'
+
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+ become = ['su', '-l', 'root']
+
+ if command:
+ become.extend(['-c', ' '.join(shlex.quote(c) for c in command)])
+
+ return become
+
+
+class Sudo(Become):
+ """Become using 'sudo'."""
+ @property
+ def method(self): # type: () -> str
+ """The name of the Ansible become plugin that is equivalent to this."""
+ return 'sudo'
+
+ def prepare_command(self, command): # type: (t.List[str]) -> t.List[str]
+ """Return the given command, if any, with privilege escalation."""
+ become = ['sudo', '-in']
+
+ if command:
+ become.extend(['sh', '-c', ' '.join(shlex.quote(c) for c in command)])
+
+ return become
diff --git a/test/lib/ansible_test/_internal/bootstrap.py b/test/lib/ansible_test/_internal/bootstrap.py
new file mode 100644
index 0000000000..6f675aeb8f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/bootstrap.py
@@ -0,0 +1,91 @@
+"""Bootstrapping for test hosts."""
+from __future__ import annotations
+
+import dataclasses
+import os
+import typing as t
+
+from .io import (
+ read_text_file,
+)
+
+from .util import (
+ ANSIBLE_TEST_TARGET_ROOT,
+)
+
+from .util_common import (
+ ShellScriptTemplate,
+)
+
+from .core_ci import (
+ SshKey,
+)
+
+
+@dataclasses.dataclass
+class Bootstrap:
+ """Base class for bootstrapping systems."""
+ controller: bool
+ python_versions: t.List[str]
+ ssh_key: SshKey
+
+ @property
+ def bootstrap_type(self): # type: () -> str
+ """The bootstrap type to pass to the bootstrapping script."""
+ return self.__class__.__name__.replace('Bootstrap', '').lower()
+
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ return dict(
+ bootstrap_type=self.bootstrap_type,
+ controller='yes' if self.controller else '',
+ python_versions=self.python_versions,
+ ssh_key_type=self.ssh_key.KEY_TYPE,
+ ssh_private_key=self.ssh_key.key_contents,
+ ssh_public_key=self.ssh_key.pub_contents,
+ )
+
+ def get_script(self): # type: () -> str
+ """Return a shell script to bootstrap the specified host."""
+ path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'bootstrap.sh')
+ content = read_text_file(path)
+ template = ShellScriptTemplate(content)
+
+ variables = self.get_variables()
+
+ script = template.substitute(**variables)
+
+ return script
+
+
+@dataclasses.dataclass
+class BootstrapDocker(Bootstrap):
+ """Bootstrap docker instances."""
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ variables = super().get_variables()
+
+ variables.update(
+ platform='',
+ platform_version='',
+ )
+
+ return variables
+
+
+@dataclasses.dataclass
+class BootstrapRemote(Bootstrap):
+ """Bootstrap remote instances."""
+ platform: str
+ platform_version: str
+
+ def get_variables(self): # type: () -> t.Dict[str, str]
+ """The variables to template in the boostrapping script."""
+ variables = super().get_variables()
+
+ variables.update(
+ platform=self.platform,
+ platform_version=self.platform_version,
+ )
+
+ return variables
diff --git a/test/lib/ansible_test/_internal/cache.py b/test/lib/ansible_test/_internal/cache.py
index 85fdbb1f1b..50a6f5e57b 100644
--- a/test/lib/ansible_test/_internal/cache.py
+++ b/test/lib/ansible_test/_internal/cache.py
@@ -1,34 +1,29 @@
"""Cache for commonly shared data that is intended to be immutable."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import typing as t
+
+from .config import (
+ CommonConfig,
+)
+
+TValue = t.TypeVar('TValue')
class CommonCache:
"""Common cache."""
- def __init__(self, args):
- """
- :param args: CommonConfig
- """
+ def __init__(self, args): # type: (CommonConfig) -> None
self.args = args
- def get(self, key, factory):
- """
- :param key: str
- :param factory: () -> any
- :rtype: any
- """
+ def get(self, key, factory): # type: (str, t.Callable[[], TValue]) -> TValue
+ """Return the value from the cache identified by the given key, using the specified factory method if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory()
return self.args.cache[key]
- def get_with_args(self, key, factory):
- """
- :param key: str
- :param factory: (CommonConfig) -> any
- :rtype: any
- """
-
+ def get_with_args(self, key, factory): # type: (str, t.Callable[[CommonConfig], TValue]) -> TValue
+ """Return the value from the cache identified by the given key, using the specified factory method (which accepts args) if it is not found."""
if key not in self.args.cache:
self.args.cache[key] = factory(self.args)
diff --git a/test/lib/ansible_test/_internal/ci/__init__.py b/test/lib/ansible_test/_internal/ci/__init__.py
index 18a097933a..db5ca501f4 100644
--- a/test/lib/ansible_test/_internal/ci/__init__.py
+++ b/test/lib/ansible_test/_internal/ci/__init__.py
@@ -1,15 +1,12 @@
"""Support code for CI environments."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import base64
import json
import os
import tempfile
-
-
-from .. import types as t
+import typing as t
from ..encoding import (
to_bytes,
@@ -27,12 +24,12 @@ from ..config import (
)
from ..util import (
- ABC,
ApplicationError,
display,
get_subclasses,
import_plugins,
raw_command,
+ cache,
)
@@ -40,13 +37,7 @@ class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
-class AuthContext:
- """Context information required for Ansible Core CI authentication."""
- def __init__(self): # type: () -> None
- pass
-
-
-class CIProvider(ABC):
+class CIProvider(metaclass=abc.ABCMeta):
"""Base class for CI provider plugins."""
priority = 500
@@ -78,11 +69,11 @@ class CIProvider(ABC):
"""Initialize change detection."""
@abc.abstractmethod
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
@abc.abstractmethod
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
@abc.abstractmethod
@@ -90,13 +81,9 @@ class CIProvider(ABC):
"""Return details about git in the current environment."""
+@cache
def get_ci_provider(): # type: () -> CIProvider
"""Return a CI provider instance for the current environment."""
- try:
- return get_ci_provider.provider
- except AttributeError:
- pass
-
provider = None
import_plugins('ci')
@@ -111,12 +98,10 @@ def get_ci_provider(): # type: () -> CIProvider
if provider.code:
display.info('Detected CI provider: %s' % provider.name)
- get_ci_provider.provider = provider
-
return provider
-class AuthHelper(ABC):
+class AuthHelper(metaclass=abc.ABCMeta):
"""Public key based authentication helper for Ansible Core CI."""
def sign_request(self, request): # type: (t.Dict[str, t.Any]) -> None
"""Sign the given auth request and make the public key available."""
@@ -154,7 +139,7 @@ class AuthHelper(ABC):
"""Generate a new key pair, publishing the public key and returning the private key."""
-class CryptographyAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
+class CryptographyAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""Cryptography based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
@@ -199,7 +184,7 @@ class CryptographyAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-metho
return private_key_pem
-class OpenSSLAuthHelper(AuthHelper, ABC): # pylint: disable=abstract-method
+class OpenSSLAuthHelper(AuthHelper, metaclass=abc.ABCMeta):
"""OpenSSL based public key based authentication helper for Ansible Core CI."""
def sign_bytes(self, payload_bytes): # type: (bytes) -> bytes
"""Sign the given payload and return the signature, initializing a new key pair if required."""
diff --git a/test/lib/ansible_test/_internal/ci/azp.py b/test/lib/ansible_test/_internal/ci/azp.py
index 9d569816b4..d5b3999ae3 100644
--- a/test/lib/ansible_test/_internal/ci/azp.py
+++ b/test/lib/ansible_test/_internal/ci/azp.py
@@ -1,12 +1,11 @@
"""Support code for working with Azure Pipelines."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import tempfile
import uuid
-
-from .. import types as t
+import typing as t
+import urllib.parse
from ..encoding import (
to_bytes,
@@ -23,7 +22,6 @@ from ..git import (
from ..http import (
HttpClient,
- urlencode,
)
from ..util import (
@@ -32,7 +30,6 @@ from ..util import (
)
from . import (
- AuthContext,
ChangeDetectionNotSupported,
CIProvider,
CryptographyAuthHelper,
@@ -105,11 +102,11 @@ class AzurePipelines(CIProvider):
return result.paths
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
return True
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
try:
request = dict(
@@ -227,7 +224,7 @@ class AzurePipelinesChanges:
repositoryId='%s/%s' % (self.org, self.project),
)
- url = '%s%s/_apis/build/builds?api-version=6.0&%s' % (self.org_uri, self.project, urlencode(parameters))
+ url = '%s%s/_apis/build/builds?api-version=6.0&%s' % (self.org_uri, self.project, urllib.parse.urlencode(parameters))
http = HttpClient(self.args, always=True)
response = http.get(url)
diff --git a/test/lib/ansible_test/_internal/ci/local.py b/test/lib/ansible_test/_internal/ci/local.py
index 5886601b96..e127753381 100644
--- a/test/lib/ansible_test/_internal/ci/local.py
+++ b/test/lib/ansible_test/_internal/ci/local.py
@@ -1,13 +1,11 @@
"""Support code for working without a supported CI provider."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import platform
import random
import re
-
-from .. import types as t
+import typing as t
from ..config import (
CommonConfig,
@@ -30,7 +28,6 @@ from ..util import (
)
from . import (
- AuthContext,
CIProvider,
)
@@ -119,12 +116,12 @@ class Local(CIProvider):
return sorted(names)
- def supports_core_ci_auth(self, context): # type: (AuthContext) -> bool
+ def supports_core_ci_auth(self): # type: () -> bool
"""Return True if Ansible Core CI is supported."""
path = self._get_aci_key_path()
return os.path.exists(path)
- def prepare_core_ci_auth(self, context): # type: (AuthContext) -> t.Dict[str, t.Any]
+ def prepare_core_ci_auth(self): # type: () -> t.Dict[str, t.Any]
"""Return authentication details for Ansible Core CI."""
path = self._get_aci_key_path()
auth_key = read_text_file(path).strip()
@@ -144,7 +141,8 @@ class Local(CIProvider):
"""Return details about git in the current environment."""
return None # not yet implemented for local
- def _get_aci_key_path(self): # type: () -> str
+ @staticmethod
+ def _get_aci_key_path(): # type: () -> str
path = os.path.expanduser('~/.ansible-core-ci.key')
return path
@@ -154,7 +152,7 @@ class InvalidBranch(ApplicationError):
def __init__(self, branch, reason): # type: (str, str) -> None
message = 'Invalid branch: %s\n%s' % (branch, reason)
- super(InvalidBranch, self).__init__(message)
+ super().__init__(message)
self.branch = branch
diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py
index 4a3daef701..896164fe8e 100644
--- a/test/lib/ansible_test/_internal/classification/__init__.py
+++ b/test/lib/ansible_test/_internal/classification/__init__.py
@@ -1,13 +1,11 @@
"""Classify changes in Ansible code."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
import os
import re
import time
-
-from .. import types as t
+import typing as t
from ..target import (
walk_module_targets,
@@ -160,7 +158,7 @@ def categorize_changes(args, paths, verbose_command=None):
targets.discard('none')
if any(target == 'all' for target in targets):
- commands[command] = set(['all'])
+ commands[command] = {'all'}
commands = dict((c, sorted(targets)) for c, targets in commands.items() if targets)
focused_commands = dict((c, sorted(targets)) for c, targets in focused_commands.items())
@@ -716,11 +714,6 @@ class PathMapper:
if path.startswith('changelogs/'):
return minimal
- if path.startswith('contrib/'):
- return {
- 'units': 'test/units/contrib/'
- }
-
if path.startswith('docs/'):
return minimal
@@ -753,22 +746,6 @@ class PathMapper:
return minimal
if path.startswith('packaging/'):
- if path.startswith('packaging/requirements/'):
- if name.startswith('requirements-') and ext == '.txt':
- component = name.split('-', 1)[1]
-
- candidates = (
- 'cloud/%s/' % component,
- )
-
- for candidate in candidates:
- if candidate in self.integration_targets_by_alias:
- return {
- 'integration': candidate,
- }
-
- return all_tests(self.args) # broad impact, run all tests
-
return minimal
if path.startswith('test/ansible_test/'):
@@ -828,14 +805,6 @@ class PathMapper:
name: 'all',
}
- if name.startswith('integration.cloud.'):
- cloud_target = 'cloud/%s/' % name.split('.')[2]
-
- if cloud_target in self.integration_targets_by_alias:
- return {
- 'integration': cloud_target,
- }
-
if path.startswith('test/lib/ansible_test/_util/controller/sanity/') or path.startswith('test/lib/ansible_test/_util/target/sanity/'):
return {
'sanity': 'all', # test infrastructure, run all sanity checks
diff --git a/test/lib/ansible_test/_internal/classification/common.py b/test/lib/ansible_test/_internal/classification/common.py
new file mode 100644
index 0000000000..b206bee3f9
--- /dev/null
+++ b/test/lib/ansible_test/_internal/classification/common.py
@@ -0,0 +1,29 @@
+"""Common classification code used by multiple languages."""
+from __future__ import annotations
+
+import os
+
+from ..data import (
+ data_context,
+)
+
+
+def resolve_csharp_ps_util(import_name, path):
+ """
+ :type import_name: str
+ :type path: str
+ """
+ if data_context().content.is_ansible or not import_name.startswith('.'):
+ # We don't support relative paths for builtin utils, there's no point.
+ return import_name
+
+ packages = import_name.split('.')
+ module_packages = path.split(os.path.sep)
+
+ for package in packages:
+ if not module_packages or package:
+ break
+ del module_packages[-1]
+
+ return 'ansible_collections.%s%s' % (data_context().content.prefix,
+ '.'.join(module_packages + [p for p in packages if p]))
diff --git a/test/lib/ansible_test/_internal/classification/csharp.py b/test/lib/ansible_test/_internal/classification/csharp.py
index a6229ec0f3..57de2c5e83 100644
--- a/test/lib/ansible_test/_internal/classification/csharp.py
+++ b/test/lib/ansible_test/_internal/classification/csharp.py
@@ -1,9 +1,9 @@
"""Analyze C# import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
+import typing as t
from ..io import (
open_text_file,
@@ -13,7 +13,7 @@ from ..util import (
display,
)
-from ..util_common import (
+from .common import (
resolve_csharp_ps_util,
)
@@ -39,7 +39,7 @@ def get_csharp_module_utils_imports(powershell_targets, csharp_targets):
for target in csharp_targets:
imports_by_target_path[target.path] = extract_csharp_module_utils_imports(target.path, module_utils, True)
- imports = dict([(module_util, set()) for module_util in module_utils])
+ imports = {module_util: set() for module_util in module_utils} # type: t.Dict[str, t.Set[str]]
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
diff --git a/test/lib/ansible_test/_internal/classification/powershell.py b/test/lib/ansible_test/_internal/classification/powershell.py
index 67e9efa778..9dbd9d809d 100644
--- a/test/lib/ansible_test/_internal/classification/powershell.py
+++ b/test/lib/ansible_test/_internal/classification/powershell.py
@@ -1,9 +1,9 @@
"""Analyze powershell import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import re
+import typing as t
from ..io import (
read_text_file,
@@ -13,7 +13,7 @@ from ..util import (
display,
)
-from ..util_common import (
+from .common import (
resolve_csharp_ps_util,
)
@@ -35,7 +35,7 @@ def get_powershell_module_utils_imports(powershell_targets):
for target in powershell_targets:
imports_by_target_path[target.path] = extract_powershell_module_utils_imports(target.path, module_utils)
- imports = dict([(module_util, set()) for module_util in module_utils])
+ imports = {module_util: set() for module_util in module_utils} # type: t.Dict[str, t.Set[str]]
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py
index 1e3505db5a..d4171d053d 100644
--- a/test/lib/ansible_test/_internal/classification/python.py
+++ b/test/lib/ansible_test/_internal/classification/python.py
@@ -1,12 +1,10 @@
"""Analyze python import statements."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import ast
import os
import re
-
-from .. import types as t
+import typing as t
from ..io import (
read_binary_file,
@@ -22,9 +20,9 @@ from ..data import (
data_context,
)
-VIRTUAL_PACKAGES = set([
+VIRTUAL_PACKAGES = {
'ansible.module_utils.six',
-])
+}
def get_python_module_utils_imports(compile_targets):
@@ -48,9 +46,9 @@ def get_python_module_utils_imports(compile_targets):
display.info('module_utils import: %s%s' % (' ' * depth, import_name), verbosity=4)
if seen is None:
- seen = set([import_name])
+ seen = {import_name}
- results = set([import_name])
+ results = {import_name}
# virtual packages depend on the modules they contain instead of the reverse
if import_name in VIRTUAL_PACKAGES:
@@ -104,7 +102,7 @@ def get_python_module_utils_imports(compile_targets):
display.info('%s inherits import %s via %s' % (target_path, module_util_import, module_util), verbosity=6)
modules.add(module_util_import)
- imports = dict([(module_util, set()) for module_util in module_utils | virtual_utils])
+ imports = {module_util: set() for module_util in module_utils | virtual_utils} # type: t.Dict[str, t.Set[str]]
for target_path, modules in imports_by_target_path.items():
for module_util in modules:
diff --git a/test/lib/ansible_test/_internal/cli.py b/test/lib/ansible_test/_internal/cli.py
deleted file mode 100644
index 2cc78f5236..0000000000
--- a/test/lib/ansible_test/_internal/cli.py
+++ /dev/null
@@ -1,1216 +0,0 @@
-"""Test runner for all Ansible tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import errno
-import os
-import sys
-
-# This import should occur as early as possible.
-# It must occur before subprocess has been imported anywhere in the current process.
-from .init import (
- CURRENT_RLIMIT_NOFILE,
-)
-
-from . import types as t
-
-from .util import (
- ApplicationError,
- display,
- raw_command,
- generate_pip_command,
- read_lines_without_comments,
- MAXFD,
- ANSIBLE_TEST_DATA_ROOT,
- SUPPORTED_PYTHON_VERSIONS,
-)
-
-from .delegation import (
- check_delegation_args,
- delegate,
-)
-
-from .executor import (
- ApplicationWarning,
- Delegate,
- generate_pip_install,
- configure_pypi_proxy,
-)
-
-from .commands.integration.posix import (
- command_posix_integration,
-)
-
-from .commands.integration.network import (
- command_network_integration,
-)
-
-from .commands.integration.windows import (
- command_windows_integration,
-)
-
-from .commands.shell import (
- command_shell,
-)
-
-from .config import (
- PosixIntegrationConfig,
- WindowsIntegrationConfig,
- NetworkIntegrationConfig,
- SanityConfig,
- UnitsConfig,
- ShellConfig,
-)
-
-from .commands.env import (
- EnvConfig,
- command_env,
- configure_timeout,
-)
-
-from .commands.sanity import (
- command_sanity,
- sanity_init,
- sanity_get_tests,
-)
-
-from .commands.units import (
- command_units,
-)
-
-from .target import (
- find_target_completion,
- walk_posix_integration_targets,
- walk_network_integration_targets,
- walk_windows_integration_targets,
- walk_units_targets,
- walk_sanity_targets,
-)
-
-from .commands.integration.cloud import (
- initialize_cloud_plugins,
-)
-
-from .core_ci import (
- AnsibleCoreCI,
-)
-
-from .data import (
- data_context,
-)
-
-from .util_common import (
- get_docker_completion,
- get_network_completion,
- get_remote_completion,
- CommonConfig,
-)
-
-from .commands.coverage.combine import (
- command_coverage_combine,
- CoverageCombineConfig,
-)
-
-from .commands.coverage.erase import (
- command_coverage_erase,
- CoverageEraseConfig,
-)
-
-from .commands.coverage.html import (
- command_coverage_html,
- CoverageHtmlConfig,
-)
-
-from .commands.coverage.report import (
- command_coverage_report,
- CoverageReportConfig,
-)
-
-from .commands.coverage.xml import (
- command_coverage_xml,
- CoverageXmlConfig,
-)
-
-from .commands.coverage.analyze.targets.generate import (
- command_coverage_analyze_targets_generate,
- CoverageAnalyzeTargetsGenerateConfig,
-)
-
-from .commands.coverage.analyze.targets.expand import (
- command_coverage_analyze_targets_expand,
- CoverageAnalyzeTargetsExpandConfig,
-)
-
-from .commands.coverage.analyze.targets.filter import (
- command_coverage_analyze_targets_filter,
- CoverageAnalyzeTargetsFilterConfig,
-)
-
-from .commands.coverage.analyze.targets.combine import (
- command_coverage_analyze_targets_combine,
- CoverageAnalyzeTargetsCombineConfig,
-)
-
-from .commands.coverage.analyze.targets.missing import (
- command_coverage_analyze_targets_missing,
- CoverageAnalyzeTargetsMissingConfig,
-)
-
-from .commands.coverage import (
- COVERAGE_GROUPS,
-)
-
-if t.TYPE_CHECKING:
- import argparse as argparse_module
-
-
-def main():
- """Main program function."""
- try:
- os.chdir(data_context().content.root)
- initialize_cloud_plugins()
- sanity_init()
- args = parse_args()
- config = args.config(args) # type: CommonConfig
- display.verbosity = config.verbosity
- display.truncate = config.truncate
- display.redact = config.redact
- display.color = config.color
- display.info_stderr = config.info_stderr
- check_delegation_args(config)
- configure_timeout(config)
-
- display.info('RLIMIT_NOFILE: %s' % (CURRENT_RLIMIT_NOFILE,), verbosity=2)
- display.info('MAXFD: %d' % MAXFD, verbosity=2)
-
- try:
- configure_pypi_proxy(config)
- args.func(config)
- delegate_args = None
- except Delegate as ex:
- # save delegation args for use once we exit the exception handler
- delegate_args = (ex.exclude, ex.require)
-
- if delegate_args:
- # noinspection PyTypeChecker
- delegate(config, *delegate_args)
-
- display.review_warnings()
- except ApplicationWarning as ex:
- display.warning(u'%s' % ex)
- sys.exit(0)
- except ApplicationError as ex:
- display.error(u'%s' % ex)
- sys.exit(1)
- except KeyboardInterrupt:
- sys.exit(2)
- except IOError as ex:
- if ex.errno == errno.EPIPE:
- sys.exit(3)
- raise
-
-
-def parse_args():
- """Parse command line arguments."""
- try:
- import argparse
- except ImportError:
- if '--requirements' not in sys.argv:
- raise
- # install argparse without using constraints since pip may be too old to support them
- # not using the ansible-test requirements file since this install is for sys.executable rather than the delegated python (which may be different)
- # argparse has no special requirements, so upgrading pip is not required here
- raw_command(generate_pip_install(generate_pip_command(sys.executable), '', packages=['argparse'], use_constraints=False))
- import argparse
-
- try:
- import argcomplete
- except ImportError:
- argcomplete = None
-
- if argcomplete:
- epilog = 'Tab completion available using the "argcomplete" python package.'
- else:
- epilog = 'Install the "argcomplete" python package to enable tab completion.'
-
- def key_value_type(value): # type: (str) -> t.Tuple[str, str]
- """Wrapper around key_value."""
- return key_value(argparse, value)
-
- parser = argparse.ArgumentParser(epilog=epilog)
-
- common = argparse.ArgumentParser(add_help=False)
-
- common.add_argument('-e', '--explain',
- action='store_true',
- help='explain commands that would be executed')
-
- common.add_argument('-v', '--verbose',
- dest='verbosity',
- action='count',
- default=0,
- help='display more output')
-
- common.add_argument('--pypi-proxy',
- action='store_true',
- help=argparse.SUPPRESS) # internal use only
-
- common.add_argument('--pypi-endpoint',
- metavar='URI',
- default=None,
- help=argparse.SUPPRESS) # internal use only
-
- common.add_argument('--color',
- metavar='COLOR',
- nargs='?',
- help='generate color output: %(choices)s',
- choices=('yes', 'no', 'auto'),
- const='yes',
- default='auto')
-
- common.add_argument('--debug',
- action='store_true',
- help='run ansible commands in debug mode')
-
- # noinspection PyTypeChecker
- common.add_argument('--truncate',
- dest='truncate',
- metavar='COLUMNS',
- type=int,
- default=display.columns,
- help='truncate some long output (0=disabled) (default: auto)')
-
- common.add_argument('--redact',
- dest='redact',
- action='store_true',
- default=True,
- help='redact sensitive values in output')
-
- common.add_argument('--no-redact',
- dest='redact',
- action='store_false',
- default=False,
- help='show sensitive values in output')
-
- common.add_argument('--check-python',
- choices=SUPPORTED_PYTHON_VERSIONS,
- help=argparse.SUPPRESS)
-
- test = argparse.ArgumentParser(add_help=False, parents=[common])
-
- test.add_argument('include',
- metavar='TARGET',
- nargs='*',
- help='test the specified target').completer = complete_target
-
- test.add_argument('--include',
- metavar='TARGET',
- action='append',
- help='include the specified target').completer = complete_target
-
- test.add_argument('--exclude',
- metavar='TARGET',
- action='append',
- help='exclude the specified target').completer = complete_target
-
- test.add_argument('--require',
- metavar='TARGET',
- action='append',
- help='require the specified target').completer = complete_target
-
- test.add_argument('--coverage',
- action='store_true',
- help='analyze code coverage when running tests')
-
- test.add_argument('--coverage-label',
- default='',
- help='label to include in coverage output file names')
-
- test.add_argument('--coverage-check',
- action='store_true',
- help='only verify code coverage can be enabled')
-
- test.add_argument('--metadata',
- help=argparse.SUPPRESS)
-
- test.add_argument('--base-branch',
- help='base branch used for change detection')
-
- add_changes(test, argparse)
- add_environments(test, argparse)
-
- integration = argparse.ArgumentParser(add_help=False, parents=[test])
-
- integration.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- integration.add_argument('--start-at',
- metavar='TARGET',
- help='start at the specified target').completer = complete_target
-
- integration.add_argument('--start-at-task',
- metavar='TASK',
- help='start at the specified task')
-
- integration.add_argument('--tags',
- metavar='TAGS',
- help='only run plays and tasks tagged with these values')
-
- integration.add_argument('--skip-tags',
- metavar='TAGS',
- help='only run plays and tasks whose tags do not match these values')
-
- integration.add_argument('--diff',
- action='store_true',
- help='show diff output')
-
- integration.add_argument('--allow-destructive',
- action='store_true',
- help='allow destructive tests')
-
- integration.add_argument('--allow-root',
- action='store_true',
- help='allow tests requiring root when not root')
-
- integration.add_argument('--allow-disabled',
- action='store_true',
- help='allow tests which have been marked as disabled')
-
- integration.add_argument('--allow-unstable',
- action='store_true',
- help='allow tests which have been marked as unstable')
-
- integration.add_argument('--allow-unstable-changed',
- action='store_true',
- help='allow tests which have been marked as unstable when focused changes are detected')
-
- integration.add_argument('--allow-unsupported',
- action='store_true',
- help='allow tests which have been marked as unsupported')
-
- integration.add_argument('--retry-on-error',
- action='store_true',
- help='retry failed test with increased verbosity')
-
- integration.add_argument('--continue-on-error',
- action='store_true',
- help='continue after failed test')
-
- integration.add_argument('--debug-strategy',
- action='store_true',
- help='run test playbooks using the debug strategy')
-
- integration.add_argument('--changed-all-target',
- metavar='TARGET',
- default='all',
- help='target to run when all tests are needed')
-
- integration.add_argument('--changed-all-mode',
- metavar='MODE',
- choices=('default', 'include', 'exclude'),
- help='include/exclude behavior with --changed-all-target: %(choices)s')
-
- integration.add_argument('--list-targets',
- action='store_true',
- help='list matching targets instead of running tests')
-
- integration.add_argument('--no-temp-workdir',
- action='store_true',
- help='do not run tests from a temporary directory (use only for verifying broken tests)')
-
- integration.add_argument('--no-temp-unicode',
- action='store_true',
- help='avoid unicode characters in temporary directory (use only for verifying broken tests)')
-
- subparsers = parser.add_subparsers(metavar='COMMAND')
- subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- posix_integration = subparsers.add_parser('integration',
- parents=[integration],
- help='posix integration tests')
-
- posix_integration.set_defaults(func=command_posix_integration,
- targets=walk_posix_integration_targets,
- config=PosixIntegrationConfig)
-
- add_extra_docker_options(posix_integration)
-
- network_integration = subparsers.add_parser('network-integration',
- parents=[integration],
- help='network integration tests')
-
- network_integration.set_defaults(func=command_network_integration,
- targets=walk_network_integration_targets,
- config=NetworkIntegrationConfig)
-
- add_extra_docker_options(network_integration, integration=False)
-
- network_integration.add_argument('--platform',
- metavar='PLATFORM',
- action='append',
- help='network platform/version').completer = complete_network_platform
-
- network_integration.add_argument('--platform-collection',
- type=key_value_type,
- metavar='PLATFORM=COLLECTION',
- action='append',
- help='collection used to test platform').completer = complete_network_platform_collection
-
- network_integration.add_argument('--platform-connection',
- type=key_value_type,
- metavar='PLATFORM=CONNECTION',
- action='append',
- help='connection used to test platform').completer = complete_network_platform_connection
-
- network_integration.add_argument('--inventory',
- metavar='PATH',
- help='path to inventory used for tests')
-
- network_integration.add_argument('--testcase',
- metavar='TESTCASE',
- help='limit a test to a specified testcase').completer = complete_network_testcase
-
- windows_integration = subparsers.add_parser('windows-integration',
- parents=[integration],
- help='windows integration tests')
-
- windows_integration.set_defaults(func=command_windows_integration,
- targets=walk_windows_integration_targets,
- config=WindowsIntegrationConfig)
-
- add_extra_docker_options(windows_integration, integration=False)
-
- windows_integration.add_argument('--windows',
- metavar='VERSION',
- action='append',
- help='windows version').completer = complete_windows
-
- windows_integration.add_argument('--inventory',
- metavar='PATH',
- help='path to inventory used for tests')
-
- units = subparsers.add_parser('units',
- parents=[test],
- help='unit tests')
-
- units.set_defaults(func=command_units,
- targets=walk_units_targets,
- config=UnitsConfig)
-
- units.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- units.add_argument('--collect-only',
- action='store_true',
- help='collect tests but do not execute them')
-
- # noinspection PyTypeChecker
- units.add_argument('--num-workers',
- type=int,
- help='number of workers to use (default: auto)')
-
- units.add_argument('--requirements-mode',
- choices=('only', 'skip'),
- help=argparse.SUPPRESS)
-
- add_extra_docker_options(units, integration=False)
-
- sanity = subparsers.add_parser('sanity',
- parents=[test],
- help='sanity tests')
-
- sanity.set_defaults(func=command_sanity,
- targets=walk_sanity_targets,
- config=SanityConfig)
-
- sanity.add_argument('--test',
- metavar='TEST',
- action='append',
- choices=[test.name for test in sanity_get_tests()],
- help='tests to run').completer = complete_sanity_test
-
- sanity.add_argument('--skip-test',
- metavar='TEST',
- action='append',
- choices=[test.name for test in sanity_get_tests()],
- help='tests to skip').completer = complete_sanity_test
-
- sanity.add_argument('--allow-disabled',
- action='store_true',
- help='allow tests to run which are disabled by default')
-
- sanity.add_argument('--list-tests',
- action='store_true',
- help='list available tests')
-
- sanity.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- sanity.add_argument('--enable-optional-errors',
- action='store_true',
- help='enable optional errors')
-
- add_lint(sanity)
- add_extra_docker_options(sanity, integration=False)
-
- shell = subparsers.add_parser('shell',
- parents=[common],
- help='open an interactive shell')
-
- shell.add_argument('--python',
- metavar='VERSION',
- choices=SUPPORTED_PYTHON_VERSIONS + ('default',),
- help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
-
- shell.set_defaults(func=command_shell,
- config=ShellConfig)
-
- shell.add_argument('--raw',
- action='store_true',
- help='direct to shell with no setup')
-
- add_environments(shell, argparse)
- add_extra_docker_options(shell)
-
- coverage_common = argparse.ArgumentParser(add_help=False, parents=[common])
-
- add_environments(coverage_common, argparse, isolated_delegation=False)
-
- coverage_common_isolated_delegation = argparse.ArgumentParser(add_help=False, parents=[common])
-
- add_environments(coverage_common_isolated_delegation, argparse)
-
- coverage = subparsers.add_parser('coverage',
- help='code coverage management and reporting')
-
- coverage_subparsers = coverage.add_subparsers(metavar='COMMAND')
- coverage_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- add_coverage_analyze(coverage_subparsers, coverage_common)
-
- coverage_combine = coverage_subparsers.add_parser('combine',
- parents=[coverage_common_isolated_delegation],
- help='combine coverage data and rewrite remote paths')
-
- coverage_combine.set_defaults(func=command_coverage_combine,
- config=CoverageCombineConfig)
-
- coverage_combine.add_argument('--export',
- help='directory to export combined coverage files to')
-
- add_extra_coverage_options(coverage_combine)
-
- coverage_erase = coverage_subparsers.add_parser('erase',
- parents=[coverage_common],
- help='erase coverage data files')
-
- coverage_erase.set_defaults(func=command_coverage_erase,
- config=CoverageEraseConfig)
-
- coverage_report = coverage_subparsers.add_parser('report',
- parents=[coverage_common_isolated_delegation],
- help='generate console coverage report')
-
- coverage_report.set_defaults(func=command_coverage_report,
- config=CoverageReportConfig)
-
- coverage_report.add_argument('--show-missing',
- action='store_true',
- help='show line numbers of statements not executed')
- coverage_report.add_argument('--include',
- metavar='PAT1,PAT2,...',
- help='include only files whose paths match one of these '
- 'patterns. Accepts shell-style wildcards, which must be '
- 'quoted.')
- coverage_report.add_argument('--omit',
- metavar='PAT1,PAT2,...',
- help='omit files whose paths match one of these patterns. '
- 'Accepts shell-style wildcards, which must be quoted.')
-
- add_extra_coverage_options(coverage_report)
-
- coverage_html = coverage_subparsers.add_parser('html',
- parents=[coverage_common_isolated_delegation],
- help='generate html coverage report')
-
- coverage_html.set_defaults(func=command_coverage_html,
- config=CoverageHtmlConfig)
-
- add_extra_coverage_options(coverage_html)
-
- coverage_xml = coverage_subparsers.add_parser('xml',
- parents=[coverage_common_isolated_delegation],
- help='generate xml coverage report')
-
- coverage_xml.set_defaults(func=command_coverage_xml,
- config=CoverageXmlConfig)
-
- add_extra_coverage_options(coverage_xml)
-
- env = subparsers.add_parser('env',
- parents=[common],
- help='show information about the test environment')
-
- env.set_defaults(func=command_env,
- config=EnvConfig)
-
- env.add_argument('--show',
- action='store_true',
- help='show environment on stdout')
-
- env.add_argument('--dump',
- action='store_true',
- help='dump environment to disk')
-
- env.add_argument('--list-files',
- action='store_true',
- help='list files on stdout')
-
- # noinspection PyTypeChecker
- env.add_argument('--timeout',
- type=int,
- metavar='MINUTES',
- help='timeout for future ansible-test commands (0 clears)')
-
- if argcomplete:
- argcomplete.autocomplete(parser, always_complete_options=False, validator=lambda i, k: True)
-
- args = parser.parse_args()
-
- if args.explain and not args.verbosity:
- args.verbosity = 1
-
- if args.color == 'yes':
- args.color = True
- elif args.color == 'no':
- args.color = False
- else:
- args.color = sys.stdout.isatty()
-
- return args
-
-
-def key_value(argparse, value): # type: (argparse_module, str) -> t.Tuple[str, str]
- """Type parsing and validation for argparse key/value pairs separated by an '=' character."""
- parts = value.split('=')
-
- if len(parts) != 2:
- raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
-
- return parts[0], parts[1]
-
-
-# noinspection PyProtectedMember,PyUnresolvedReferences
-def add_coverage_analyze(coverage_subparsers, coverage_common): # type: (argparse_module._SubParsersAction, argparse_module.ArgumentParser) -> None
- """Add the `coverage analyze` subcommand."""
- analyze = coverage_subparsers.add_parser(
- 'analyze',
- help='analyze collected coverage data',
- )
-
- analyze_subparsers = analyze.add_subparsers(metavar='COMMAND')
- analyze_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- targets = analyze_subparsers.add_parser(
- 'targets',
- help='analyze integration test target coverage',
- )
-
- targets_subparsers = targets.add_subparsers(metavar='COMMAND')
- targets_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
-
- targets_generate = targets_subparsers.add_parser(
- 'generate',
- parents=[coverage_common],
- help='aggregate coverage by integration test target',
- )
-
- targets_generate.set_defaults(
- func=command_coverage_analyze_targets_generate,
- config=CoverageAnalyzeTargetsGenerateConfig,
- )
-
- targets_generate.add_argument(
- 'input_dir',
- nargs='?',
- help='directory to read coverage from',
- )
-
- targets_generate.add_argument(
- 'output_file',
- help='output file for aggregated coverage',
- )
-
- targets_expand = targets_subparsers.add_parser(
- 'expand',
- parents=[coverage_common],
- help='expand target names from integers in aggregated coverage',
- )
-
- targets_expand.set_defaults(
- func=command_coverage_analyze_targets_expand,
- config=CoverageAnalyzeTargetsExpandConfig,
- )
-
- targets_expand.add_argument(
- 'input_file',
- help='input file to read aggregated coverage from',
- )
-
- targets_expand.add_argument(
- 'output_file',
- help='output file to write expanded coverage to',
- )
-
- targets_filter = targets_subparsers.add_parser(
- 'filter',
- parents=[coverage_common],
- help='filter aggregated coverage data',
- )
-
- targets_filter.set_defaults(
- func=command_coverage_analyze_targets_filter,
- config=CoverageAnalyzeTargetsFilterConfig,
- )
-
- targets_filter.add_argument(
- 'input_file',
- help='input file to read aggregated coverage from',
- )
-
- targets_filter.add_argument(
- 'output_file',
- help='output file to write expanded coverage to',
- )
-
- targets_filter.add_argument(
- '--include-target',
- dest='include_targets',
- action='append',
- help='include the specified targets',
- )
-
- targets_filter.add_argument(
- '--exclude-target',
- dest='exclude_targets',
- action='append',
- help='exclude the specified targets',
- )
-
- targets_filter.add_argument(
- '--include-path',
- help='include paths matching the given regex',
- )
-
- targets_filter.add_argument(
- '--exclude-path',
- help='exclude paths matching the given regex',
- )
-
- targets_combine = targets_subparsers.add_parser(
- 'combine',
- parents=[coverage_common],
- help='combine multiple aggregated coverage files',
- )
-
- targets_combine.set_defaults(
- func=command_coverage_analyze_targets_combine,
- config=CoverageAnalyzeTargetsCombineConfig,
- )
-
- targets_combine.add_argument(
- 'input_file',
- nargs='+',
- help='input file to read aggregated coverage from',
- )
-
- targets_combine.add_argument(
- 'output_file',
- help='output file to write aggregated coverage to',
- )
-
- targets_missing = targets_subparsers.add_parser(
- 'missing',
- parents=[coverage_common],
- help='identify coverage in one file missing in another',
- )
-
- targets_missing.set_defaults(
- func=command_coverage_analyze_targets_missing,
- config=CoverageAnalyzeTargetsMissingConfig,
- )
-
- targets_missing.add_argument(
- 'from_file',
- help='input file containing aggregated coverage',
- )
-
- targets_missing.add_argument(
- 'to_file',
- help='input file containing aggregated coverage',
- )
-
- targets_missing.add_argument(
- 'output_file',
- help='output file to write aggregated coverage to',
- )
-
- targets_missing.add_argument(
- '--only-gaps',
- action='store_true',
- help='report only arcs/lines not hit by any target',
- )
-
- targets_missing.add_argument(
- '--only-exists',
- action='store_true',
- help='limit results to files that exist',
- )
-
-
-def add_lint(parser):
- """
- :type parser: argparse.ArgumentParser
- """
- parser.add_argument('--lint',
- action='store_true',
- help='write lint output to stdout, everything else stderr')
-
- parser.add_argument('--junit',
- action='store_true',
- help='write test failures to junit xml files')
-
- parser.add_argument('--failure-ok',
- action='store_true',
- help='exit successfully on failed tests after saving results')
-
-
-def add_changes(parser, argparse):
- """
- :type parser: argparse.ArgumentParser
- :type argparse: argparse
- """
- parser.add_argument('--changed', action='store_true', help='limit targets based on changes')
-
- changes = parser.add_argument_group(title='change detection arguments')
-
- changes.add_argument('--tracked', action='store_true', help=argparse.SUPPRESS)
- changes.add_argument('--untracked', action='store_true', help='include untracked files')
- changes.add_argument('--ignore-committed', dest='committed', action='store_false', help='exclude committed files')
- changes.add_argument('--ignore-staged', dest='staged', action='store_false', help='exclude staged files')
- changes.add_argument('--ignore-unstaged', dest='unstaged', action='store_false', help='exclude unstaged files')
-
- changes.add_argument('--changed-from', metavar='PATH', help=argparse.SUPPRESS)
- changes.add_argument('--changed-path', metavar='PATH', action='append', help=argparse.SUPPRESS)
-
-
-def add_environments(parser, argparse, isolated_delegation=True):
- """
- :type parser: argparse.ArgumentParser
- :type argparse: argparse
- :type isolated_delegation: bool
- """
- parser.add_argument('--requirements',
- action='store_true',
- help='install command requirements')
-
- parser.add_argument('--python-interpreter',
- metavar='PATH',
- default=None,
- help='path to the docker or remote python interpreter')
-
- parser.add_argument('--no-pip-check',
- dest='pip_check',
- default=True,
- action='store_false',
- help='do not run "pip check" to verify requirements')
-
- environments = parser.add_mutually_exclusive_group()
-
- environments.add_argument('--local',
- action='store_true',
- help='run from the local environment')
-
- environments.add_argument('--venv',
- action='store_true',
- help='run from ansible-test managed virtual environments')
-
- venv = parser.add_argument_group(title='venv arguments')
-
- venv.add_argument('--venv-system-site-packages',
- action='store_true',
- help='enable system site packages')
-
- if not isolated_delegation:
- environments.set_defaults(
- containers=None,
- docker=None,
- remote=None,
- remote_stage=None,
- remote_provider=None,
- remote_terminate=None,
- remote_endpoint=None,
- python_interpreter=None,
- )
-
- return
-
- parser.add_argument('--containers',
- help=argparse.SUPPRESS) # internal use only
-
- environments.add_argument('--docker',
- metavar='IMAGE',
- nargs='?',
- default=None,
- const='default',
- help='run from a docker container').completer = complete_docker
-
- environments.add_argument('--remote',
- metavar='PLATFORM',
- default=None,
- help='run from a remote instance').completer = complete_remote_shell if parser.prog.endswith(' shell') else complete_remote
-
- remote = parser.add_argument_group(title='remote arguments')
-
- remote.add_argument('--remote-stage',
- metavar='STAGE',
- help='remote stage to use: prod, dev',
- default='prod').completer = complete_remote_stage
-
- remote.add_argument('--remote-provider',
- metavar='PROVIDER',
- help='remote provider to use: %(choices)s',
- choices=['default'] + sorted(AnsibleCoreCI.PROVIDERS.keys()),
- default='default')
-
- remote.add_argument('--remote-endpoint',
- metavar='ENDPOINT',
- help='remote provisioning endpoint to use (default: auto)',
- default=None)
-
- remote.add_argument('--remote-terminate',
- metavar='WHEN',
- help='terminate remote instance: %(choices)s (default: %(default)s)',
- choices=['never', 'always', 'success'],
- default='never')
-
-
-def add_extra_coverage_options(parser):
- """
- :type parser: argparse.ArgumentParser
- """
- parser.add_argument('--group-by',
- metavar='GROUP',
- action='append',
- choices=COVERAGE_GROUPS,
- help='group output by: %s' % ', '.join(COVERAGE_GROUPS))
-
- parser.add_argument('--all',
- action='store_true',
- help='include all python/powershell source files')
-
- parser.add_argument('--stub',
- action='store_true',
- help='generate empty report of all python/powershell source files')
-
-
-def add_extra_docker_options(parser, integration=True):
- """
- :type parser: argparse.ArgumentParser
- :type integration: bool
- """
- docker = parser.add_argument_group(title='docker arguments')
-
- docker.add_argument('--docker-no-pull',
- action='store_false',
- dest='docker_pull',
- help='do not explicitly pull the latest docker images')
-
- if data_context().content.is_ansible:
- docker.add_argument('--docker-keep-git',
- action='store_true',
- help='transfer git related files into the docker container')
- else:
- docker.set_defaults(
- docker_keep_git=False,
- )
-
- docker.add_argument('--docker-seccomp',
- metavar='SC',
- choices=('default', 'unconfined'),
- default=None,
- help='set seccomp confinement for the test container: %(choices)s')
-
- docker.add_argument('--docker-terminate',
- metavar='WHEN',
- help='terminate docker container: %(choices)s (default: %(default)s)',
- choices=['never', 'always', 'success'],
- default='always')
-
- if not integration:
- return
-
- docker.add_argument('--docker-privileged',
- action='store_true',
- help='run docker container in privileged mode')
-
- docker.add_argument('--docker-network',
- help='run using the specified docker network')
-
- # noinspection PyTypeChecker
- docker.add_argument('--docker-memory',
- help='memory limit for docker in bytes', type=int)
-
-
-# noinspection PyUnusedLocal
-def complete_remote_stage(prefix, parsed_args, **_): # pylint: disable=unused-argument
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
-
-
-def complete_target(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- return find_target_completion(parsed_args.targets, prefix)
-
-
-# noinspection PyUnusedLocal
-def complete_remote(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_remote_completion().keys())
-
- return [i for i in images if i.startswith(prefix)]
-
-
-# noinspection PyUnusedLocal
-def complete_remote_shell(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_remote_completion().keys())
-
- windows_completion_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt')
- images.extend(["windows/%s" % i for i in read_lines_without_comments(windows_completion_path, remove_blank_lines=True)])
-
- return [i for i in images if i.startswith(prefix)]
-
-
-# noinspection PyUnusedLocal
-def complete_docker(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- images = sorted(get_docker_completion().keys())
-
- return [i for i in images if i.startswith(prefix)]
-
-
-def complete_windows(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', 'windows.txt'), remove_blank_lines=True)
-
- return [i for i in images if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
-
-
-def complete_network_platform(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- images = sorted(get_network_completion())
-
- return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
-
-
-def complete_network_platform_collection(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- left = prefix.split('=')[0]
- images = sorted(set(image.split('/')[0] for image in get_network_completion()))
-
- return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
-
-
-def complete_network_platform_connection(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- left = prefix.split('=')[0]
- images = sorted(set(image.split('/')[0] for image in get_network_completion()))
-
- return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
-
-
-def complete_network_testcase(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- testcases = []
-
- # since testcases are module specific, don't autocomplete if more than one
- # module is specidied
- if len(parsed_args.include) != 1:
- return []
-
- test_dir = os.path.join(data_context().content.integration_targets_path, parsed_args.include[0], 'tests')
- connection_dirs = data_context().content.get_dirs(test_dir)
-
- for connection_dir in connection_dirs:
- for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
- if testcase.startswith(prefix):
- testcases.append(testcase.split('.')[0])
-
- return testcases
-
-
-# noinspection PyUnusedLocal
-def complete_sanity_test(prefix, parsed_args, **_):
- """
- :type prefix: unicode
- :type parsed_args: any
- :rtype: list[str]
- """
- del parsed_args
-
- tests = sorted(test.name for test in sanity_get_tests())
-
- return [i for i in tests if i.startswith(prefix)]
diff --git a/test/lib/ansible_test/_internal/cli/__init__.py b/test/lib/ansible_test/_internal/cli/__init__.py
new file mode 100644
index 0000000000..21c45b6e32
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/__init__.py
@@ -0,0 +1,55 @@
+"""Command line parsing."""
+from __future__ import annotations
+
+import argparse
+import os
+import sys
+
+from .argparsing import (
+ CompositeActionCompletionFinder,
+)
+
+from .commands import (
+ do_commands,
+)
+
+
+from .compat import (
+ HostSettings,
+ convert_legacy_args,
+)
+
+
+def parse_args(): # type: () -> argparse.Namespace
+ """Parse command line arguments."""
+ completer = CompositeActionCompletionFinder()
+
+ if completer.enabled:
+ epilog = 'Tab completion available using the "argcomplete" python package.'
+ else:
+ epilog = 'Install the "argcomplete" python package to enable tab completion.'
+
+ parser = argparse.ArgumentParser(epilog=epilog)
+
+ do_commands(parser, completer)
+
+ completer(
+ parser,
+ always_complete_options=False,
+ )
+
+ argv = sys.argv[1:]
+ args = parser.parse_args(argv)
+
+ if args.explain and not args.verbosity:
+ args.verbosity = 1
+
+ if args.no_environment:
+ pass
+ elif args.host_path:
+ args.host_settings = HostSettings.deserialize(os.path.join(args.host_path, 'settings.dat'))
+ else:
+ args.host_settings = convert_legacy_args(argv, args, args.target_mode)
+ args.host_settings.apply_defaults()
+
+ return args
diff --git a/test/lib/ansible_test/_internal/cli/actions.py b/test/lib/ansible_test/_internal/cli/actions.py
new file mode 100644
index 0000000000..e22a7b0e59
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/actions.py
@@ -0,0 +1,90 @@
+"""Actions for handling composite arguments with argparse."""
+from __future__ import annotations
+
+from .argparsing import (
+ CompositeAction,
+ NamespaceParser,
+)
+
+from .parsers import (
+ DelegatedControllerParser,
+ NetworkSshTargetParser,
+ NetworkTargetParser,
+ OriginControllerParser,
+ PosixSshTargetParser,
+ PosixTargetParser,
+ SanityPythonTargetParser,
+ UnitsPythonTargetParser,
+ WindowsSshTargetParser,
+ WindowsTargetParser,
+)
+
+
+class OriginControllerAction(CompositeAction):
+ """Composite action parser for the controller when the only option is `origin`."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return OriginControllerParser()
+
+
+class DelegatedControllerAction(CompositeAction):
+ """Composite action parser for the controller when delegation is supported."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return DelegatedControllerParser()
+
+
+class PosixTargetAction(CompositeAction):
+ """Composite action parser for a POSIX target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return PosixTargetParser()
+
+
+class WindowsTargetAction(CompositeAction):
+ """Composite action parser for a Windows target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return WindowsTargetParser()
+
+
+class NetworkTargetAction(CompositeAction):
+ """Composite action parser for a network target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return NetworkTargetParser()
+
+
+class SanityPythonTargetAction(CompositeAction):
+ """Composite action parser for a sanity target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return SanityPythonTargetParser()
+
+
+class UnitsPythonTargetAction(CompositeAction):
+ """Composite action parser for a units target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return UnitsPythonTargetParser()
+
+
+class PosixSshTargetAction(CompositeAction):
+ """Composite action parser for a POSIX SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return PosixSshTargetParser()
+
+
+class WindowsSshTargetAction(CompositeAction):
+ """Composite action parser for a Windows SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return WindowsSshTargetParser()
+
+
+class NetworkSshTargetAction(CompositeAction):
+ """Composite action parser for a network SSH target."""
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+ return NetworkSshTargetParser()
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/__init__.py b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
new file mode 100644
index 0000000000..8a087ebf8f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/__init__.py
@@ -0,0 +1,263 @@
+"""Completion finder which brings together custom options and completion logic."""
+from __future__ import annotations
+
+import abc
+import argparse
+import os
+import re
+import typing as t
+
+from .argcompletion import (
+ OptionCompletionFinder,
+ get_comp_type,
+ register_safe_action,
+ warn,
+)
+
+from .parsers import (
+ Completion,
+ CompletionError,
+ CompletionSuccess,
+ CompletionUnavailable,
+ DocumentationState,
+ NamespaceParser,
+ Parser,
+ ParserError,
+ ParserMode,
+ ParserState,
+)
+
+
+class RegisteredCompletionFinder(OptionCompletionFinder):
+ """
+ Custom option completion finder for argcomplete which allows completion results to be registered.
+ These registered completions, if provided, are used to filter the final completion results.
+ This works around a known bug: https://github.com/kislyuk/argcomplete/issues/221
+ """
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ self.registered_completions = None # type: t.Optional[str]
+
+ def completer(
+ self,
+ prefix, # type: str
+ action, # type: argparse.Action
+ parsed_args, # type: argparse.Namespace
+ **kwargs,
+ ): # type: (...) -> t.List[str]
+ """
+ Return a list of completions for the specified prefix and action.
+ Use this as the completer function for argcomplete.
+ """
+ kwargs.clear()
+ del kwargs
+
+ completions = self.get_completions(prefix, action, parsed_args)
+
+ if action.nargs and not isinstance(action.nargs, int):
+ # prevent argcomplete from including unrelated arguments in the completion results
+ self.registered_completions = completions
+
+ return completions
+
+ @abc.abstractmethod
+ def get_completions(
+ self,
+ prefix, # type: str
+ action, # type: argparse.Action
+ parsed_args, # type: argparse.Namespace
+ ): # type: (...) -> t.List[str]
+ """
+ Return a list of completions for the specified prefix and action.
+ Called by the complete function.
+ """
+
+ def quote_completions(self, completions, cword_prequote, last_wordbreak_pos):
+ """Modify completion results before returning them."""
+ if self.registered_completions is not None:
+ # If one of the completion handlers registered their results, only allow those exact results to be returned.
+ # This prevents argcomplete from adding results from other completers when they are known to be invalid.
+ allowed_completions = set(self.registered_completions)
+ completions = [completion for completion in completions if completion in allowed_completions]
+
+ return super().quote_completions(completions, cword_prequote, last_wordbreak_pos)
+
+
+class CompositeAction(argparse.Action, metaclass=abc.ABCMeta):
+ """Base class for actions that parse composite arguments."""
+ documentation_state = {} # type: t.Dict[t.Type[CompositeAction], DocumentationState]
+
+ # noinspection PyUnusedLocal
+ def __init__(
+ self,
+ *args,
+ dest, # type: str
+ **kwargs,
+ ):
+ del dest
+
+ self.definition = self.create_parser()
+ self.documentation_state[type(self)] = documentation_state = DocumentationState()
+ self.definition.document(documentation_state)
+
+ super().__init__(*args, dest=self.definition.dest, **kwargs)
+
+ register_safe_action(type(self))
+
+ @abc.abstractmethod
+ def create_parser(self): # type: () -> NamespaceParser
+ """Return a namespace parser to parse the argument associated with this action."""
+
+ def __call__(
+ self,
+ parser,
+ namespace,
+ values,
+ option_string=None,
+ ):
+ state = ParserState(mode=ParserMode.PARSE, namespaces=[namespace], remainder=values)
+
+ try:
+ self.definition.parse(state)
+ except ParserError as ex:
+ error = str(ex)
+ except CompletionError as ex:
+ error = ex.message
+ else:
+ return
+
+ if get_comp_type():
+ # FUTURE: It may be possible to enhance error handling by surfacing this error message during downstream completion.
+ return # ignore parse errors during completion to avoid breaking downstream completion
+
+ raise argparse.ArgumentError(self, error)
+
+
+class CompositeActionCompletionFinder(RegisteredCompletionFinder):
+ """Completion finder with support for composite argument parsing."""
+ def get_completions(
+ self,
+ prefix, # type: str
+ action, # type: CompositeAction
+ parsed_args, # type: argparse.Namespace
+ ): # type: (...) -> t.List[str]
+ """Return a list of completions appropriate for the given prefix and action, taking into account the arguments that have already been parsed."""
+ state = ParserState(
+ mode=ParserMode.LIST if self.list_mode else ParserMode.COMPLETE,
+ remainder=prefix,
+ namespaces=[parsed_args],
+ )
+
+ answer = complete(action.definition, state)
+
+ completions = []
+
+ if isinstance(answer, CompletionSuccess):
+ self.disable_completion_mangling = answer.preserve
+ completions = answer.completions
+
+ if isinstance(answer, CompletionError):
+ warn(answer.message)
+
+ return completions
+
+
+def detect_file_listing(value, mode): # type: (str, ParserMode) -> bool
+ """
+ Return True if Bash will show a file listing and redraw the prompt, otherwise return False.
+
+ If there are no list results, a file listing will be shown if the value after the last `=` or `:` character:
+
+ - is empty
+ - matches a full path
+ - matches a partial path
+
+ Otherwise Bash will play the bell sound and display nothing.
+
+ see: https://github.com/kislyuk/argcomplete/issues/328
+ see: https://github.com/kislyuk/argcomplete/pull/284
+ """
+ listing = False
+
+ if mode == ParserMode.LIST:
+ right = re.split('[=:]', value)[-1]
+ listing = not right or os.path.exists(right)
+
+ if not listing:
+ directory = os.path.dirname(right)
+
+ # noinspection PyBroadException
+ try:
+ filenames = os.listdir(directory or '.')
+ except Exception: # pylint: disable=broad-except
+ pass
+ else:
+ listing = any(filename.startswith(right) for filename in filenames)
+
+ return listing
+
+
+def detect_false_file_completion(value, mode): # type: (str, ParserMode) -> bool
+ """
+ Return True if Bash will provide an incorrect file completion, otherwise return False.
+
+ If there are no completion results, a filename will be automatically completed if the value after the last `=` or `:` character:
+
+ - matches exactly one partial path
+
+ Otherwise Bash will play the bell sound and display nothing.
+
+ see: https://github.com/kislyuk/argcomplete/issues/328
+ see: https://github.com/kislyuk/argcomplete/pull/284
+ """
+ completion = False
+
+ if mode == ParserMode.COMPLETE:
+ completion = True
+
+ right = re.split('[=:]', value)[-1]
+ directory, prefix = os.path.split(right)
+
+ # noinspection PyBroadException
+ try:
+ filenames = os.listdir(directory or '.')
+ except Exception: # pylint: disable=broad-except
+ pass
+ else:
+ matches = [filename for filename in filenames if filename.startswith(prefix)]
+ completion = len(matches) == 1
+
+ return completion
+
+
+def complete(
+ completer, # type: Parser
+ state, # type: ParserState
+): # type: (...) -> Completion
+ """Perform argument completion using the given completer and return the completion result."""
+ value = state.remainder
+
+ try:
+ completer.parse(state)
+ raise ParserError('completion expected')
+ except CompletionUnavailable as ex:
+ if detect_file_listing(value, state.mode):
+ # Displaying a warning before the file listing informs the user it is invalid. Bash will redraw the prompt after the list.
+ # If the file listing is not shown, a warning could be helpful, but would introduce noise on the terminal since the prompt is not redrawn.
+ answer = CompletionError(ex.message)
+ elif detect_false_file_completion(value, state.mode):
+ # When the current prefix provides no matches, but matches files a single file on disk, Bash will perform an incorrect completion.
+ # Returning multiple invalid matches instead of no matches will prevent Bash from using its own completion logic in this case.
+ answer = CompletionSuccess(
+ list_mode=True, # abuse list mode to enable preservation of the literal results
+ consumed='',
+ continuation='',
+ matches=['completion', 'invalid']
+ )
+ else:
+ answer = ex
+ except Completion as ex:
+ answer = ex
+
+ return answer
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/actions.py b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
new file mode 100644
index 0000000000..c2b573e639
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/actions.py
@@ -0,0 +1,18 @@
+"""Actions for argparse."""
+from __future__ import annotations
+
+import argparse
+import enum
+import typing as t
+
+
+class EnumAction(argparse.Action):
+ """Parse an enum using the lowercases enum names."""
+ def __init__(self, **kwargs): # type: (t.Dict[str, t.Any]) -> None
+ self.enum_type = kwargs.pop('type', None) # type: t.Type[enum.Enum]
+ kwargs.setdefault('choices', tuple(e.name.lower() for e in self.enum_type))
+ super().__init__(**kwargs)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ value = self.enum_type[values.upper()]
+ setattr(namespace, self.dest, value)
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
new file mode 100644
index 0000000000..ca502c530a
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/argcompletion.py
@@ -0,0 +1,124 @@
+"""Wrapper around argcomplete providing bug fixes and additional features."""
+from __future__ import annotations
+
+import argparse
+import enum
+import os
+import typing as t
+
+
+class Substitute:
+ """Substitute for missing class which accepts all arguments."""
+ def __init__(self, *args, **kwargs):
+ pass
+
+
+try:
+ import argcomplete
+
+ from argcomplete import (
+ CompletionFinder,
+ default_validator,
+ )
+
+ warn = argcomplete.warn # pylint: disable=invalid-name
+except ImportError:
+ argcomplete = None
+
+ CompletionFinder = Substitute
+ default_validator = Substitute # pylint: disable=invalid-name
+ warn = Substitute # pylint: disable=invalid-name
+
+
+class CompType(enum.Enum):
+ """
+ Bash COMP_TYPE argument completion types.
+ For documentation, see: https://www.gnu.org/software/bash/manual/html_node/Bash-Variables.html#index-COMP_005fTYPE
+ """
+ COMPLETION = '\t'
+ """
+ Standard completion, typically triggered by a single tab.
+ """
+ MENU_COMPLETION = '%'
+ """
+ Menu completion, which cyles through each completion instead of showing a list.
+ For help using this feature, see: https://stackoverflow.com/questions/12044574/getting-complete-and-menu-complete-to-work-together
+ """
+ LIST = '?'
+ """
+ Standard list, typically triggered by a double tab.
+ """
+ LIST_AMBIGUOUS = '!'
+ """
+ Listing with `show-all-if-ambiguous` set.
+ For documentation, see https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html#index-show_002dall_002dif_002dambiguous
+ For additional details, see: https://unix.stackexchange.com/questions/614123/explanation-of-bash-completion-comp-type
+ """
+ LIST_UNMODIFIED = '@'
+ """
+ Listing with `show-all-if-unmodified` set.
+ For documentation, see https://www.gnu.org/software/bash/manual/html_node/Readline-Init-File-Syntax.html#index-show_002dall_002dif_002dunmodified
+ For additional details, see: : https://unix.stackexchange.com/questions/614123/explanation-of-bash-completion-comp-type
+ """
+
+ @property
+ def list_mode(self): # type: () -> bool
+ """True if completion is running in list mode, otherwise False."""
+ return self in (CompType.LIST, CompType.LIST_AMBIGUOUS, CompType.LIST_UNMODIFIED)
+
+
+def register_safe_action(action_type): # type: (t.Type[argparse.Action]) -> None
+ """Register the given action as a safe action for argcomplete to use during completion if it is not already registered."""
+ if argcomplete and action_type not in argcomplete.safe_actions:
+ argcomplete.safe_actions += (action_type,)
+
+
+def get_comp_type(): # type: () -> t.Optional[CompType]
+ """Parse the COMP_TYPE environment variable (if present) and return the associated CompType enum value."""
+ value = os.environ.get('COMP_TYPE')
+ comp_type = CompType(chr(int(value))) if value else None
+ return comp_type
+
+
+class OptionCompletionFinder(CompletionFinder):
+ """
+ Custom completion finder for argcomplete.
+ It provides support for running completion in list mode, which argcomplete natively handles the same as standard completion.
+ """
+ enabled = bool(argcomplete)
+
+ def __init__(self, *args, validator=None, **kwargs):
+ if validator:
+ raise ValueError()
+
+ self.comp_type = get_comp_type()
+ self.list_mode = self.comp_type.list_mode if self.comp_type else False
+ self.disable_completion_mangling = False
+
+ finder = self
+
+ def custom_validator(completion, prefix):
+ """Completion validator used to optionally bypass validation."""
+ if finder.disable_completion_mangling:
+ return True
+
+ return default_validator(completion, prefix)
+
+ super().__init__(
+ *args,
+ validator=custom_validator,
+ **kwargs,
+ )
+
+ def __call__(self, *args, **kwargs):
+ if self.enabled:
+ super().__call__(*args, **kwargs)
+
+ def quote_completions(self, completions, cword_prequote, last_wordbreak_pos):
+ """Intercept default quoting behavior to optionally block mangling of completion entries."""
+ if self.disable_completion_mangling:
+ # Word breaks have already been handled when generating completions, don't mangle them further.
+ # This is needed in many cases when returning completion lists which lack the existing completion prefix.
+ last_wordbreak_pos = None
+
+ return super().quote_completions(completions, cword_prequote, last_wordbreak_pos)
diff --git a/test/lib/ansible_test/_internal/cli/argparsing/parsers.py b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
new file mode 100644
index 0000000000..fe80a68e5d
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/argparsing/parsers.py
@@ -0,0 +1,581 @@
+"""General purpose composite argument parsing and completion."""
+from __future__ import annotations
+
+import abc
+import contextlib
+import dataclasses
+import enum
+import os
+import re
+import typing as t
+
+# NOTE: When choosing delimiters, take into account Bash and argcomplete behavior.
+#
+# Recommended characters for assignment and/or continuation: `/` `:` `=`
+#
+# The recommended assignment_character list is due to how argcomplete handles continuation characters.
+# see: https://github.com/kislyuk/argcomplete/blob/5a20d6165fbb4d4d58559378919b05964870cc16/argcomplete/__init__.py#L557-L558
+
+PAIR_DELIMITER = ','
+ASSIGNMENT_DELIMITER = '='
+PATH_DELIMITER = '/'
+
+
+@dataclasses.dataclass(frozen=True)
+class Completion(Exception):
+ """Base class for argument completion results."""
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionUnavailable(Completion):
+ """Argument completion unavailable."""
+ message: str = 'No completions available.'
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionError(Completion):
+ """Argument completion error."""
+ message: t.Optional[str] = None
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionSuccess(Completion):
+ """Successful argument completion result."""
+ list_mode: bool
+ consumed: str
+ continuation: str
+ matches: t.List[str] = dataclasses.field(default_factory=list)
+
+ @property
+ def preserve(self): # type: () -> bool
+ """
+ True if argcomplete should not mangle completion values, otherwise False.
+ Only used when more than one completion exists to avoid overwriting the word undergoing completion.
+ """
+ return len(self.matches) > 1 and self.list_mode
+
+ @property
+ def completions(self): # type: () -> t.List[str]
+ """List of completion values to return to argcomplete."""
+ completions = self.matches
+ continuation = '' if self.list_mode else self.continuation
+
+ if not self.preserve:
+ # include the existing prefix to avoid rewriting the word undergoing completion
+ completions = [f'{self.consumed}{completion}{continuation}' for completion in completions]
+
+ return completions
+
+
+class ParserMode(enum.Enum):
+ """Mode the parser is operating in."""
+ PARSE = enum.auto()
+ COMPLETE = enum.auto()
+ LIST = enum.auto()
+
+
+class ParserError(Exception):
+ """Base class for all parsing exceptions."""
+
+
+@dataclasses.dataclass
+class ParserBoundary:
+ """Boundary details for parsing composite input."""
+ delimiters: str
+ required: bool
+ match: t.Optional[str] = None
+ ready: bool = True
+
+
+@dataclasses.dataclass
+class ParserState:
+ """State of the composite argument parser."""
+ mode: ParserMode
+ remainder: str = ''
+ consumed: str = ''
+ boundaries: t.List[ParserBoundary] = dataclasses.field(default_factory=list)
+ namespaces: t.List[t.Any] = dataclasses.field(default_factory=list)
+ parts: t.List[str] = dataclasses.field(default_factory=list)
+
+ @property
+ def incomplete(self): # type: () -> bool
+ """True if parsing is incomplete (unparsed input remains), otherwise False."""
+ return self.remainder is not None
+
+ def match(self, value, choices): # type: (str, t.List[str]) -> bool
+ """Return True if the given value matches the provided choices, taking into account parsing boundaries, otherwise return False."""
+ if self.current_boundary:
+ delimiters, delimiter = self.current_boundary.delimiters, self.current_boundary.match
+ else:
+ delimiters, delimiter = '', None
+
+ for choice in choices:
+ if choice.rstrip(delimiters) == choice:
+ # choice is not delimited
+ if value == choice:
+ return True # value matched
+ else:
+ # choice is delimited
+ if f'{value}{delimiter}' == choice:
+ return True # value and delimiter matched
+
+ return False
+
+ def read(self): # type: () -> str
+ """Read and return the next input segment, taking into account parsing boundaries."""
+ delimiters = "".join(boundary.delimiters for boundary in self.boundaries)
+
+ if delimiters:
+ pattern = '([' + re.escape(delimiters) + '])'
+ regex = re.compile(pattern)
+ parts = regex.split(self.remainder, 1)
+ else:
+ parts = [self.remainder]
+
+ if len(parts) > 1:
+ value, delimiter, remainder = parts
+ else:
+ value, delimiter, remainder = parts[0], None, None
+
+ for boundary in reversed(self.boundaries):
+ if delimiter and delimiter in boundary.delimiters:
+ boundary.match = delimiter
+ self.consumed += value + delimiter
+ break
+
+ boundary.match = None
+ boundary.ready = False
+
+ if boundary.required:
+ break
+
+ self.remainder = remainder
+
+ return value
+
+ @property
+ def root_namespace(self): # type: () -> t.Any
+ """THe root namespace."""
+ return self.namespaces[0]
+
+ @property
+ def current_namespace(self): # type: () -> t.Any
+ """The current namespace."""
+ return self.namespaces[-1]
+
+ @property
+ def current_boundary(self): # type: () -> t.Optional[ParserBoundary]
+ """The current parser boundary, if any, otherwise None."""
+ return self.boundaries[-1] if self.boundaries else None
+
+ def set_namespace(self, namespace): # type: (t.Any) -> None
+ """Set the current namespace."""
+ self.namespaces.append(namespace)
+
+ @contextlib.contextmanager
+ def delimit(self, delimiters, required=True): # type: (str, bool) -> t.ContextManager[ParserBoundary]
+ """Context manager for delimiting parsing of input."""
+ boundary = ParserBoundary(delimiters=delimiters, required=required)
+
+ self.boundaries.append(boundary)
+
+ try:
+ yield boundary
+ finally:
+ self.boundaries.pop()
+
+ if boundary.required and not boundary.match:
+ raise ParserError('required delimiter not found, hit up-level delimiter or end of input instead')
+
+
+@dataclasses.dataclass
+class DocumentationState:
+ """State of the composite argument parser's generated documentation."""
+ sections: t.Dict[str, str] = dataclasses.field(default_factory=dict)
+
+
+class Parser(metaclass=abc.ABCMeta):
+ """Base class for all composite argument parsers."""
+ @abc.abstractmethod
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ raise Exception(f'Undocumented parser: {type(self)}')
+
+
+class MatchConditions(enum.Flag):
+ """Acceptable condition(s) for matching user input to available choices."""
+ CHOICE = enum.auto()
+ """Match any choice."""
+ ANY = enum.auto()
+ """Match any non-empty string."""
+ NOTHING = enum.auto()
+ """Match an empty string which is not followed by a boundary match."""
+
+
+class DynamicChoicesParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers which use a list of choices that can be generated during completion."""
+ def __init__(self, conditions=MatchConditions.CHOICE): # type: (MatchConditions) -> None
+ self.conditions = conditions
+
+ @abc.abstractmethod
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+
+ def no_completion_match(self, value): # type: (str) -> CompletionUnavailable # pylint: disable=unused-argument
+ """Return an instance of CompletionUnavailable when no match was found for the given value."""
+ return CompletionUnavailable()
+
+ def no_choices_available(self, value): # type: (str) -> ParserError # pylint: disable=unused-argument
+ """Return an instance of ParserError when parsing fails and no choices are available."""
+ return ParserError('No choices available.')
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = state.read()
+ choices = self.get_choices(value)
+
+ if state.mode == ParserMode.PARSE or state.incomplete:
+ if self.conditions & MatchConditions.CHOICE and state.match(value, choices):
+ return value
+
+ if self.conditions & MatchConditions.ANY and value:
+ return value
+
+ if self.conditions & MatchConditions.NOTHING and not value and state.current_boundary and not state.current_boundary.match:
+ return value
+
+ if state.mode == ParserMode.PARSE:
+ if choices:
+ raise ParserError(f'"{value}" not in: {", ".join(choices)}')
+
+ raise self.no_choices_available(value)
+
+ raise CompletionUnavailable()
+
+ matches = [choice for choice in choices if choice.startswith(value)]
+
+ if not matches:
+ raise self.no_completion_match(value)
+
+ continuation = state.current_boundary.delimiters if state.current_boundary and state.current_boundary.required else ''
+
+ raise CompletionSuccess(
+ list_mode=state.mode == ParserMode.LIST,
+ consumed=state.consumed,
+ continuation=continuation,
+ matches=matches,
+ )
+
+
+class ChoicesParser(DynamicChoicesParser):
+ """Composite argument parser which relies on a static list of choices."""
+ def __init__(self, choices, conditions=MatchConditions.CHOICE): # type: (t.List[str], MatchConditions) -> None
+ self.choices = choices
+
+ super().__init__(conditions=conditions)
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ return self.choices
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '|'.join(self.choices)
+
+
+class IntegerParser(DynamicChoicesParser):
+ """Composite argument parser for integers."""
+ PATTERN = re.compile('^[1-9][0-9]*$')
+
+ def __init__(self, maximum=None): # type: (t.Optional[int]) -> None
+ self.maximum = maximum
+
+ super().__init__()
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ if not value:
+ numbers = list(range(1, 10))
+ elif self.PATTERN.search(value):
+ int_prefix = int(value)
+ base = int_prefix * 10
+ numbers = [int_prefix] + [base + i for i in range(0, 10)]
+ else:
+ numbers = []
+
+ # NOTE: the minimum is currently fixed at 1
+
+ if self.maximum is not None:
+ numbers = [n for n in numbers if n <= self.maximum]
+
+ return [str(n) for n in numbers]
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+ return int(value)
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{integer}'
+
+
+class BooleanParser(ChoicesParser):
+ """Composite argument parser for boolean (yes/no) values."""
+ def __init__(self):
+ super().__init__(['yes', 'no'])
+
+ def parse(self, state): # type: (ParserState) -> bool
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+ return value == 'yes'
+
+
+class AnyParser(ChoicesParser):
+ """Composite argument parser which accepts any input value."""
+ def __init__(self, nothing=False, no_match_message=None): # type: (bool, t.Optional[str]) -> None
+ self.no_match_message = no_match_message
+
+ conditions = MatchConditions.ANY
+
+ if nothing:
+ conditions |= MatchConditions.NOTHING
+
+ super().__init__([], conditions=conditions)
+
+ def no_completion_match(self, value): # type: (str) -> CompletionUnavailable
+ """Return an instance of CompletionUnavailable when no match was found for the given value."""
+ if self.no_match_message:
+ return CompletionUnavailable(message=self.no_match_message)
+
+ return super().no_completion_match(value)
+
+ def no_choices_available(self, value): # type: (str) -> ParserError
+ """Return an instance of ParserError when parsing fails and no choices are available."""
+ if self.no_match_message:
+ return ParserError(self.no_match_message)
+
+ return super().no_choices_available(value)
+
+
+class RelativePathNameParser(DynamicChoicesParser):
+ """Composite argument parser for relative path names."""
+ RELATIVE_NAMES = ['.', '..']
+
+ def __init__(self, choices): # type: (t.List[str]) -> None
+ self.choices = choices
+
+ super().__init__()
+
+ def get_choices(self, value): # type: (str) -> t.List[str]
+ """Return a list of valid choices based on the given input value."""
+ choices = list(self.choices)
+
+ if value in self.RELATIVE_NAMES:
+ # complete relative names, but avoid suggesting them unless the current name is relative
+ # unfortunately this will be sorted in reverse of what bash presents ("../ ./" instead of "./ ../")
+ choices.extend(f'{item}{PATH_DELIMITER}' for item in self.RELATIVE_NAMES)
+
+ return choices
+
+
+class FileParser(Parser):
+ """Composite argument parser for absolute or relative file paths."""
+ def parse(self, state): # type: (ParserState) -> str
+ """Parse the input from the given state and return the result."""
+ if state.mode == ParserMode.PARSE:
+ path = AnyParser().parse(state)
+
+ if not os.path.isfile(path):
+ raise ParserError(f'Not a file: {path}')
+ else:
+ path = ''
+
+ with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ while boundary.ready:
+ directory = path or '.'
+
+ try:
+ with os.scandir(directory) as scan: # type: t.Iterator[os.DirEntry]
+ choices = [f'{item.name}{PATH_DELIMITER}' if item.is_dir() else item.name for item in scan]
+ except OSError:
+ choices = []
+
+ if not path:
+ choices.append(PATH_DELIMITER) # allow absolute paths
+ choices.append('../') # suggest relative paths
+
+ part = RelativePathNameParser(choices).parse(state)
+ path += f'{part}{boundary.match or ""}'
+
+ return path
+
+
+class AbsolutePathParser(Parser):
+ """Composite argument parser for absolute file paths. Paths are only verified for proper syntax, not for existence."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ path = ''
+
+ with state.delimit(PATH_DELIMITER, required=False) as boundary:
+ while boundary.ready:
+ if path:
+ path += AnyParser(nothing=True).parse(state)
+ else:
+ path += ChoicesParser([PATH_DELIMITER]).parse(state)
+
+ path += (boundary.match or '')
+
+ return path
+
+
+class NamespaceParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers that store their results in a namespace."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+ current = getattr(namespace, self.dest)
+
+ if current and self.limit_one:
+ if state.mode == ParserMode.PARSE:
+ raise ParserError('Option cannot be specified more than once.')
+
+ raise CompletionError('Option cannot be specified more than once.')
+
+ value = self.get_value(state)
+
+ if self.use_list:
+ if not current:
+ current = []
+ setattr(namespace, self.dest, current)
+
+ current.append(value)
+ else:
+ setattr(namespace, self.dest, value)
+
+ return value
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ return super().parse(state)
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return not self.use_list
+
+ @property
+ @abc.abstractmethod
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+
+
+class NamespaceWrappedParser(NamespaceParser):
+ """Composite argument parser that wraps a non-namespace parser and stores the result in a namespace."""
+ def __init__(self, dest, parser): # type: (str, Parser) -> None
+ self._dest = dest
+ self.parser = parser
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ return self.parser.parse(state)
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return self._dest
+
+
+class KeyValueParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for key/value composite argument parsers."""
+ @abc.abstractmethod
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+ parsers = self.get_parsers(state)
+ keys = list(parsers)
+
+ with state.delimit(PAIR_DELIMITER, required=False) as pair:
+ while pair.ready:
+ with state.delimit(ASSIGNMENT_DELIMITER):
+ key = ChoicesParser(keys).parse(state)
+
+ value = parsers[key].parse(state)
+
+ setattr(namespace, key, value)
+
+ keys.remove(key)
+
+ return namespace
+
+
+class PairParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers consisting of a left and right argument parser, with input separated by a delimiter."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = self.create_namespace()
+
+ state.set_namespace(namespace)
+
+ with state.delimit(self.delimiter, self.required) as boundary:
+ choice = self.get_left_parser(state).parse(state)
+
+ if boundary.match:
+ self.get_right_parser(choice).parse(state)
+
+ return namespace
+
+ @property
+ def required(self): # type: () -> bool
+ """True if the delimiter (and thus right parser) is required, otherwise False."""
+ return False
+
+ @property
+ def delimiter(self): # type: () -> str
+ """The delimiter to use between the left and right parser."""
+ return PAIR_DELIMITER
+
+ @abc.abstractmethod
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+
+ @abc.abstractmethod
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+
+ @abc.abstractmethod
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+
+
+class TypeParser(Parser, metaclass=abc.ABCMeta):
+ """Base class for composite argument parsers which parse a type name, a colon and then parse results based on the type given by the type name."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser] # pylint: disable=unused-argument
+ """Return a dictionary of type names and type parsers."""
+ return self.get_stateless_parsers()
+
+ @abc.abstractmethod
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ parsers = self.get_parsers(state)
+
+ with state.delimit(':'):
+ key = ChoicesParser(list(parsers)).parse(state)
+
+ value = parsers[key].parse(state)
+
+ return value
diff --git a/test/lib/ansible_test/_internal/cli/commands/__init__.py b/test/lib/ansible_test/_internal/cli/commands/__init__.py
new file mode 100644
index 0000000000..5cd37f4f91
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/__init__.py
@@ -0,0 +1,240 @@
+"""Command line parsing for all commands."""
+from __future__ import annotations
+
+import argparse
+import functools
+import sys
+
+from ...util import (
+ display,
+)
+
+from ..completers import (
+ complete_target,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .coverage import (
+ do_coverage,
+)
+
+from .env import (
+ do_env,
+)
+
+from .integration import (
+ do_integration,
+)
+
+from .sanity import (
+ do_sanity,
+)
+
+from .shell import (
+ do_shell,
+)
+
+from .units import (
+ do_units,
+)
+
+
+def do_commands(
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all commands."""
+ common = argparse.ArgumentParser(add_help=False)
+
+ common.add_argument(
+ '-e',
+ '--explain',
+ action='store_true',
+ help='explain commands that would be executed',
+ )
+
+ common.add_argument(
+ '-v',
+ '--verbose',
+ dest='verbosity',
+ action='count',
+ default=0,
+ help='display more output',
+ )
+
+ common.add_argument(
+ '--color',
+ metavar='COLOR',
+ nargs='?',
+ help='generate color output: yes, no, auto',
+ const='yes',
+ default='auto',
+ type=color,
+ )
+
+ common.add_argument(
+ '--debug',
+ action='store_true',
+ help='run ansible commands in debug mode',
+ )
+
+ common.add_argument(
+ '--truncate',
+ dest='truncate',
+ metavar='COLUMNS',
+ type=int,
+ default=display.columns,
+ help='truncate some long output (0=disabled) (default: auto)',
+ )
+
+ common.add_argument(
+ '--redact',
+ dest='redact',
+ action='store_true',
+ default=True,
+ help=argparse.SUPPRESS, # kept for backwards compatibility, but no point in advertising since it's the default
+ )
+
+ common.add_argument(
+ '--no-redact',
+ dest='redact',
+ action='store_false',
+ default=False,
+ help='show sensitive values in output',
+ )
+
+ test = argparse.ArgumentParser(add_help=False, parents=[common])
+
+ testing = test.add_argument_group(title='common testing arguments')
+
+ testing.add_argument(
+ 'include',
+ metavar='TARGET',
+ nargs='*',
+ help='test the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--include',
+ metavar='TARGET',
+ action='append',
+ help='include the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--exclude',
+ metavar='TARGET',
+ action='append',
+ help='exclude the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--require',
+ metavar='TARGET',
+ action='append',
+ help='require the specified target',
+ ).completer = functools.partial(complete_target, completer)
+
+ testing.add_argument(
+ '--coverage',
+ action='store_true',
+ help='analyze code coverage when running tests',
+ )
+
+ testing.add_argument(
+ '--coverage-check',
+ action='store_true',
+ help='only verify code coverage can be enabled',
+ )
+
+ testing.add_argument(
+ '--metadata',
+ help=argparse.SUPPRESS,
+ )
+
+ testing.add_argument(
+ '--base-branch',
+ metavar='BRANCH',
+ help='base branch used for change detection',
+ )
+
+ testing.add_argument(
+ '--changed',
+ action='store_true',
+ help='limit targets based on changes',
+ )
+
+ changes = test.add_argument_group(title='change detection arguments')
+
+ changes.add_argument(
+ '--tracked',
+ action='store_true',
+ help=argparse.SUPPRESS,
+ )
+
+ changes.add_argument(
+ '--untracked',
+ action='store_true',
+ help='include untracked files',
+ )
+
+ changes.add_argument(
+ '--ignore-committed',
+ dest='committed',
+ action='store_false',
+ help='exclude committed files',
+ )
+
+ changes.add_argument(
+ '--ignore-staged',
+ dest='staged',
+ action='store_false',
+ help='exclude staged files',
+ )
+
+ changes.add_argument(
+ '--ignore-unstaged',
+ dest='unstaged',
+ action='store_false',
+ help='exclude unstaged files',
+ )
+
+ changes.add_argument(
+ '--changed-from',
+ metavar='PATH',
+ help=argparse.SUPPRESS,
+ )
+
+ changes.add_argument(
+ '--changed-path',
+ metavar='PATH',
+ action='append',
+ help=argparse.SUPPRESS,
+ )
+
+ subparsers = parent.add_subparsers(metavar='COMMAND', required=True)
+
+ do_coverage(subparsers, common, completer)
+ do_env(subparsers, common, completer)
+ do_shell(subparsers, common, completer)
+
+ do_integration(subparsers, test, completer)
+ do_sanity(subparsers, test, completer)
+ do_units(subparsers, test, completer)
+
+
+def color(value): # type: (str) -> bool
+ """Strict converter for color option."""
+ if value == 'yes':
+ return True
+
+ if value == 'no':
+ return False
+
+ if value == 'auto':
+ return sys.stdout.isatty()
+
+ raise argparse.ArgumentTypeError(f"invalid choice: '{value}' (choose from 'yes', 'no', 'auto')")
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py
new file mode 100644
index 0000000000..a57ed126ce
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/__init__.py
@@ -0,0 +1,85 @@
+"""Command line parsing for all `coverage` commands."""
+from __future__ import annotations
+
+import argparse
+
+from ....commands.coverage import (
+ COVERAGE_GROUPS,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .analyze import (
+ do_analyze,
+)
+
+from .combine import (
+ do_combine,
+)
+
+from .erase import (
+ do_erase,
+)
+
+from .html import (
+ do_html,
+)
+
+from .report import (
+ do_report,
+)
+
+from .xml import (
+ do_xml,
+)
+
+
+def do_coverage(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage` commands."""
+ coverage_common = argparse.ArgumentParser(add_help=False, parents=[parent])
+
+ parser = subparsers.add_parser(
+ 'coverage',
+ help='code coverage management and reporting',
+ )
+
+ coverage_subparsers = parser.add_subparsers(metavar='COMMAND', required=True)
+
+ do_analyze(coverage_subparsers, coverage_common, completer)
+ do_erase(coverage_subparsers, coverage_common, completer)
+
+ do_combine(coverage_subparsers, parent, add_coverage_common, completer)
+ do_report(coverage_subparsers, parent, add_coverage_common, completer)
+ do_html(coverage_subparsers, parent, add_coverage_common, completer)
+ do_xml(coverage_subparsers, parent, add_coverage_common, completer)
+
+
+def add_coverage_common(
+ parser, # type: argparse.ArgumentParser
+):
+ """Add common coverage arguments."""
+ parser.add_argument(
+ '--group-by',
+ metavar='GROUP',
+ action='append',
+ choices=COVERAGE_GROUPS,
+ help='group output by: %s' % ', '.join(COVERAGE_GROUPS),
+ )
+
+ parser.add_argument(
+ '--all',
+ action='store_true',
+ help='include all python/powershell source files',
+ )
+
+ parser.add_argument(
+ '--stub',
+ action='store_true',
+ help='generate empty report of all python/powershell source files',
+ )
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py
new file mode 100644
index 0000000000..0f4568dcad
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/__init__.py
@@ -0,0 +1,28 @@
+"""Command line parsing for all `coverage analyze` commands."""
+from __future__ import annotations
+
+import argparse
+
+from .targets import (
+ do_targets,
+)
+
+from ....environments import (
+ CompositeActionCompletionFinder,
+)
+
+
+def do_analyze(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage analyze` commands."""
+ parser = subparsers.add_parser(
+ 'analyze',
+ help='analyze collected coverage data',
+ ) # type: argparse.ArgumentParser
+
+ analyze_subparsers = parser.add_subparsers(metavar='COMMAND', required=True)
+
+ do_targets(analyze_subparsers, parent, completer)
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py
new file mode 100644
index 0000000000..c572b3bbdf
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/__init__.py
@@ -0,0 +1,48 @@
+"""Command line parsing for all `coverage analyze targets` commands."""
+from __future__ import annotations
+
+import argparse
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .combine import (
+ do_combine,
+)
+
+from .expand import (
+ do_expand,
+)
+
+from .filter import (
+ do_filter,
+)
+
+from .generate import (
+ do_generate,
+)
+
+from .missing import (
+ do_missing,
+)
+
+
+def do_targets(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for all `coverage analyze targets` commands."""
+ targets = subparsers.add_parser(
+ 'targets',
+ help='analyze integration test target coverage',
+ )
+
+ targets_subparsers = targets.add_subparsers(metavar='COMMAND', required=True)
+
+ do_generate(targets_subparsers, parent, completer)
+ do_expand(targets_subparsers, parent, completer)
+ do_filter(targets_subparsers, parent, completer)
+ do_combine(targets_subparsers, parent, completer)
+ do_missing(targets_subparsers, parent, completer)
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py
new file mode 100644
index 0000000000..c5b666f65b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/combine.py
@@ -0,0 +1,49 @@
+"""Command line parsing for the `coverage analyze targets combine` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.combine import (
+ command_coverage_analyze_targets_combine,
+ CoverageAnalyzeTargetsCombineConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_combine(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets combine` command."""
+ parser = subparsers.add_parser(
+ 'combine',
+ parents=[parent],
+ help='combine multiple aggregated coverage files',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_combine,
+ config=CoverageAnalyzeTargetsCombineConfig,
+ )
+
+ targets_combine = parser.add_argument_group('coverage arguments')
+
+ targets_combine.add_argument(
+ 'input_file',
+ nargs='+',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_combine.add_argument(
+ 'output_file',
+ help='output file to write aggregated coverage to',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets combine
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py
new file mode 100644
index 0000000000..ec74cab697
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/expand.py
@@ -0,0 +1,48 @@
+"""Command line parsing for the `coverage analyze targets expand` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.expand import (
+ command_coverage_analyze_targets_expand,
+ CoverageAnalyzeTargetsExpandConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_expand(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets expand` command."""
+ parser = subparsers.add_parser(
+ 'expand',
+ parents=[parent],
+ help='expand target names from integers in aggregated coverage',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_expand,
+ config=CoverageAnalyzeTargetsExpandConfig,
+ )
+
+ targets_expand = parser.add_argument_group(title='coverage arguments')
+
+ targets_expand.add_argument(
+ 'input_file',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_expand.add_argument(
+ 'output_file',
+ help='output file to write expanded coverage to',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets expand
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py
new file mode 100644
index 0000000000..b746fe7b72
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/filter.py
@@ -0,0 +1,76 @@
+"""Command line parsing for the `coverage analyze targets filter` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.filter import (
+ command_coverage_analyze_targets_filter,
+ CoverageAnalyzeTargetsFilterConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_filter(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets filter` command."""
+ parser = subparsers.add_parser(
+ 'filter',
+ parents=[parent],
+ help='filter aggregated coverage data',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_filter,
+ config=CoverageAnalyzeTargetsFilterConfig,
+ )
+
+ targets_filter = parser.add_argument_group(title='coverage arguments')
+
+ targets_filter.add_argument(
+ 'input_file',
+ help='input file to read aggregated coverage from',
+ )
+
+ targets_filter.add_argument(
+ 'output_file',
+ help='output file to write expanded coverage to',
+ )
+
+ targets_filter.add_argument(
+ '--include-target',
+ metavar='TGT',
+ dest='include_targets',
+ action='append',
+ help='include the specified targets',
+ )
+
+ targets_filter.add_argument(
+ '--exclude-target',
+ metavar='TGT',
+ dest='exclude_targets',
+ action='append',
+ help='exclude the specified targets',
+ )
+
+ targets_filter.add_argument(
+ '--include-path',
+ metavar='REGEX',
+ help='include paths matching the given regex',
+ )
+
+ targets_filter.add_argument(
+ '--exclude-path',
+ metavar='REGEX',
+ help='exclude paths matching the given regex',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets filter
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py
new file mode 100644
index 0000000000..ed7be95d14
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/generate.py
@@ -0,0 +1,49 @@
+"""Command line parsing for the `coverage analyze targets generate` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.generate import (
+ command_coverage_analyze_targets_generate,
+ CoverageAnalyzeTargetsGenerateConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_generate(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets generate` command."""
+ parser = subparsers.add_parser(
+ 'generate',
+ parents=[parent],
+ help='aggregate coverage by integration test target',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_generate,
+ config=CoverageAnalyzeTargetsGenerateConfig,
+ )
+
+ targets_generate = parser.add_argument_group(title='coverage arguments')
+
+ targets_generate.add_argument(
+ 'input_dir',
+ nargs='?',
+ help='directory to read coverage from',
+ )
+
+ targets_generate.add_argument(
+ 'output_file',
+ help='output file for aggregated coverage',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets generate
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py
new file mode 100644
index 0000000000..45db16e00a
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/analyze/targets/missing.py
@@ -0,0 +1,65 @@
+"""Command line parsing for the `coverage analyze targets missing` command."""
+from __future__ import annotations
+
+import argparse
+
+from ......commands.coverage.analyze.targets.missing import (
+ command_coverage_analyze_targets_missing,
+ CoverageAnalyzeTargetsMissingConfig,
+)
+
+from .....environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_missing(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `coverage analyze targets missing` command."""
+ parser = subparsers.add_parser(
+ 'missing',
+ parents=[parent],
+ help='identify coverage in one file missing in another',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_analyze_targets_missing,
+ config=CoverageAnalyzeTargetsMissingConfig,
+ )
+
+ targets_missing = parser.add_argument_group(title='coverage arguments')
+
+ targets_missing.add_argument(
+ 'from_file',
+ help='input file containing aggregated coverage',
+ )
+
+ targets_missing.add_argument(
+ 'to_file',
+ help='input file containing aggregated coverage',
+ )
+
+ targets_missing.add_argument(
+ 'output_file',
+ help='output file to write aggregated coverage to',
+ )
+
+ targets_missing.add_argument(
+ '--only-gaps',
+ action='store_true',
+ help='report only arcs/lines not hit by any target',
+ )
+
+ targets_missing.add_argument(
+ '--only-exists',
+ action='store_true',
+ help='limit results to files that exist',
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage analyze targets missing
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py
new file mode 100644
index 0000000000..fd4b0003aa
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/combine.py
@@ -0,0 +1,48 @@
+"""Command line parsing for the `coverage combine` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.combine import (
+ command_coverage_combine,
+ CoverageCombineConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_combine(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage combine` command."""
+ parser = subparsers.add_parser(
+ 'combine',
+ parents=[parent],
+ help='combine coverage data and rewrite remote paths',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_combine,
+ config=CoverageCombineConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ coverage_combine.add_argument(
+ '--export',
+ metavar='DIR',
+ help='directory to export combined coverage files to',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage combine
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py
new file mode 100644
index 0000000000..31432849bf
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/erase.py
@@ -0,0 +1,36 @@
+"""Command line parsing for the `coverage erase` command."""
+from __future__ import annotations
+
+import argparse
+
+from ....commands.coverage.erase import (
+ command_coverage_erase,
+ CoverageEraseConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_erase(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage erase` command."""
+ parser = subparsers.add_parser(
+ 'erase',
+ parents=[parent],
+ help='erase coverage data files',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_erase,
+ config=CoverageEraseConfig,
+ )
+
+ add_environments(parser, completer, ControllerMode.ORIGIN, TargetMode.NO_TARGETS) # coverage erase
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/html.py b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py
new file mode 100644
index 0000000000..e4b023ffd1
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/html.py
@@ -0,0 +1,42 @@
+"""Command line parsing for the `coverage html` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.html import (
+ command_coverage_html,
+ CoverageHtmlConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_html(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage html` command."""
+ parser = subparsers.add_parser(
+ 'html',
+ parents=[parent],
+ help='generate html coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_html,
+ config=CoverageHtmlConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage html
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/report.py b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py
new file mode 100644
index 0000000000..af5950b3a9
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/report.py
@@ -0,0 +1,60 @@
+"""Command line parsing for the `coverage report` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.report import (
+ command_coverage_report,
+ CoverageReportConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_report(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage report` command."""
+ parser = subparsers.add_parser(
+ 'report',
+ parents=[parent],
+ help='generate console coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_report,
+ config=CoverageReportConfig,
+ )
+
+ coverage_report = t.cast(argparse.ArgumentParser, parser.add_argument_group('coverage arguments'))
+
+ add_coverage_common(coverage_report)
+
+ coverage_report.add_argument(
+ '--show-missing',
+ action='store_true',
+ help='show line numbers of statements not executed',
+ )
+
+ coverage_report.add_argument(
+ '--include',
+ metavar='PAT[,...]',
+ help='only include paths that match a pattern (accepts quoted shell wildcards)',
+ )
+
+ coverage_report.add_argument(
+ '--omit',
+ metavar='PAT[,...]',
+ help='omit paths that match a pattern (accepts quoted shell wildcards)',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage report
diff --git a/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py
new file mode 100644
index 0000000000..5079c8f74a
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/coverage/xml.py
@@ -0,0 +1,42 @@
+"""Command line parsing for the `coverage xml` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.coverage.xml import (
+ command_coverage_xml,
+ CoverageXmlConfig,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_xml(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_coverage_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+): # type: (...) -> None
+ """Command line parsing for the `coverage xml` command."""
+ parser = subparsers.add_parser(
+ 'xml',
+ parents=[parent],
+ help='generate xml coverage report',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_coverage_xml,
+ config=CoverageXmlConfig,
+ )
+
+ coverage_combine = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='coverage arguments'))
+
+ add_coverage_common(coverage_combine)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NO_TARGETS) # coverage xml
diff --git a/test/lib/ansible_test/_internal/cli/commands/env.py b/test/lib/ansible_test/_internal/cli/commands/env.py
new file mode 100644
index 0000000000..53437a1f96
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/env.py
@@ -0,0 +1,63 @@
+"""Command line parsing for the `env` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...commands.env import (
+ EnvConfig,
+ command_env,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_env(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `env` command."""
+ parser = subparsers.add_parser(
+ 'env',
+ parents=[parent],
+ help='show information about the test environment',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_env,
+ config=EnvConfig,
+ )
+
+ env = parser.add_argument_group(title='env arguments')
+
+ env.add_argument(
+ '--show',
+ action='store_true',
+ help='show environment on stdout',
+ )
+
+ env.add_argument(
+ '--dump',
+ action='store_true',
+ help='dump environment to disk',
+ )
+
+ env.add_argument(
+ '--list-files',
+ action='store_true',
+ help='list files on stdout',
+ )
+
+ env.add_argument(
+ '--timeout',
+ type=int,
+ metavar='MINUTES',
+ help='timeout for future ansible-test commands (0 clears)',
+ )
+
+ add_environments(parser, completer, ControllerMode.NO_DELEGATION, TargetMode.NO_TARGETS) # env
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
new file mode 100644
index 0000000000..f79fb1cfc2
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/__init__.py
@@ -0,0 +1,161 @@
+"""Command line parsing for all integration commands."""
+from __future__ import annotations
+
+import argparse
+
+from ...completers import (
+ complete_target,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+)
+
+from .network import (
+ do_network_integration,
+)
+
+from .posix import (
+ do_posix_integration,
+)
+
+from .windows import (
+ do_windows_integration,
+)
+
+
+def do_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for all integration commands."""
+ parser = argparse.ArgumentParser(
+ add_help=False,
+ parents=[parent],
+ ) # type: argparse.ArgumentParser
+
+ do_posix_integration(subparsers, parser, add_integration_common, completer)
+ do_network_integration(subparsers, parser, add_integration_common, completer)
+ do_windows_integration(subparsers, parser, add_integration_common, completer)
+
+
+def add_integration_common(
+ parser, # type: argparse.ArgumentParser
+):
+ """Add common integration argumetns."""
+ parser.add_argument(
+ '--start-at',
+ metavar='TARGET',
+ help='start at the specified target',
+ ).completer = complete_target
+
+ parser.add_argument(
+ '--start-at-task',
+ metavar='TASK',
+ help='start at the specified task',
+ )
+
+ parser.add_argument(
+ '--tags',
+ metavar='TAGS',
+ help='only run plays and tasks tagged with these values',
+ )
+
+ parser.add_argument(
+ '--skip-tags',
+ metavar='TAGS',
+ help='only run plays and tasks whose tags do not match these values',
+ )
+
+ parser.add_argument(
+ '--diff',
+ action='store_true',
+ help='show diff output',
+ )
+
+ parser.add_argument(
+ '--allow-destructive',
+ action='store_true',
+ help='allow destructive tests',
+ )
+
+ parser.add_argument(
+ '--allow-root',
+ action='store_true',
+ help='allow tests requiring root when not root',
+ )
+
+ parser.add_argument(
+ '--allow-disabled',
+ action='store_true',
+ help='allow tests which have been marked as disabled',
+ )
+
+ parser.add_argument(
+ '--allow-unstable',
+ action='store_true',
+ help='allow tests which have been marked as unstable',
+ )
+
+ parser.add_argument(
+ '--allow-unstable-changed',
+ action='store_true',
+ help='allow tests which have been marked as unstable when focused changes are detected',
+ )
+
+ parser.add_argument(
+ '--allow-unsupported',
+ action='store_true',
+ help='allow tests which have been marked as unsupported',
+ )
+
+ parser.add_argument(
+ '--retry-on-error',
+ action='store_true',
+ help='retry failed test with increased verbosity',
+ )
+
+ parser.add_argument(
+ '--continue-on-error',
+ action='store_true',
+ help='continue after failed test',
+ )
+
+ parser.add_argument(
+ '--debug-strategy',
+ action='store_true',
+ help='run test playbooks using the debug strategy',
+ )
+
+ parser.add_argument(
+ '--changed-all-target',
+ metavar='TARGET',
+ default='all',
+ help='target to run when all tests are needed',
+ )
+
+ parser.add_argument(
+ '--changed-all-mode',
+ metavar='MODE',
+ choices=('default', 'include', 'exclude'),
+ help='include/exclude behavior with --changed-all-target: %(choices)s',
+ )
+
+ parser.add_argument(
+ '--list-targets',
+ action='store_true',
+ help='list matching targets instead of running tests',
+ )
+
+ parser.add_argument(
+ '--no-temp-workdir',
+ action='store_true',
+ help='do not run tests from a temporary directory (use only for verifying broken tests)',
+ )
+
+ parser.add_argument(
+ '--no-temp-unicode',
+ action='store_true',
+ help='avoid unicode characters in temporary directory (use only for verifying broken tests)',
+ )
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/network.py b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
new file mode 100644
index 0000000000..d070afda9b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/network.py
@@ -0,0 +1,81 @@
+"""Command line parsing for the `network-integration` command."""
+from __future__ import annotations
+
+import argparse
+import os
+import typing as t
+
+from ....commands.integration.network import (
+ command_network_integration,
+)
+
+from ....config import (
+ NetworkIntegrationConfig,
+)
+
+from ....target import (
+ walk_network_integration_targets,
+)
+
+from ....data import (
+ data_context,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_network_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `network-integration` command."""
+ parser = subparsers.add_parser(
+ 'network-integration',
+ parents=[parent],
+ help='network integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_network_integration,
+ targets_func=walk_network_integration_targets,
+ config=NetworkIntegrationConfig)
+
+ network_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='network integration test arguments'))
+
+ add_integration_common(network_integration)
+
+ network_integration.add_argument(
+ '--testcase',
+ metavar='TESTCASE',
+ help='limit a test to a specified testcase',
+ ).completer = complete_network_testcase
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.NETWORK_INTEGRATION) # network-integration
+
+
+def complete_network_testcase(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of test cases matching the given prefix if only one target was parsed from the command line, otherwise return an empty list."""
+ testcases = []
+
+ # since testcases are module specific, don't autocomplete if more than one
+ # module is specidied
+ if len(parsed_args.include) != 1:
+ return []
+
+ target = parsed_args.include[0]
+ test_dir = os.path.join(data_context().content.integration_targets_path, target, 'tests')
+ connection_dirs = data_context().content.get_dirs(test_dir)
+
+ for connection_dir in connection_dirs:
+ for testcase in [os.path.basename(path) for path in data_context().content.get_files(connection_dir)]:
+ if testcase.startswith(prefix):
+ testcases.append(testcase.split('.', 1)[0])
+
+ return testcases
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/posix.py b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py
new file mode 100644
index 0000000000..01d906b270
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/posix.py
@@ -0,0 +1,50 @@
+"""Command line parsing for the `integration` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.integration.posix import (
+ command_posix_integration,
+)
+
+from ....config import (
+ PosixIntegrationConfig,
+)
+
+from ....target import (
+ walk_posix_integration_targets,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_posix_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `integration` command."""
+ parser = subparsers.add_parser(
+ 'integration',
+ parents=[parent],
+ help='posix integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_posix_integration,
+ targets_func=walk_posix_integration_targets,
+ config=PosixIntegrationConfig,
+ )
+
+ posix_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='integration test arguments'))
+
+ add_integration_common(posix_integration)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.POSIX_INTEGRATION) # integration
diff --git a/test/lib/ansible_test/_internal/cli/commands/integration/windows.py b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py
new file mode 100644
index 0000000000..6fef9334c9
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/integration/windows.py
@@ -0,0 +1,50 @@
+"""Command line parsing for the `windows-integration` command."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ....commands.integration.windows import (
+ command_windows_integration,
+)
+
+from ....config import (
+ WindowsIntegrationConfig,
+)
+
+from ....target import (
+ walk_windows_integration_targets,
+)
+
+from ...environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_windows_integration(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ add_integration_common, # type: t.Callable[[argparse.ArgumentParser], None]
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `windows-integration` command."""
+ parser = subparsers.add_parser(
+ 'windows-integration',
+ parents=[parent],
+ help='windows integration tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_windows_integration,
+ targets_func=walk_windows_integration_targets,
+ config=WindowsIntegrationConfig,
+ )
+
+ windows_integration = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='windows integration test arguments'))
+
+ add_integration_common(windows_integration)
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.WINDOWS_INTEGRATION) # windows-integration
diff --git a/test/lib/ansible_test/_internal/cli/commands/sanity.py b/test/lib/ansible_test/_internal/cli/commands/sanity.py
new file mode 100644
index 0000000000..9e4ab11b3f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/sanity.py
@@ -0,0 +1,113 @@
+"""Command line parsing for the `sanity` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...config import (
+ SanityConfig,
+)
+
+from ...commands.sanity import (
+ command_sanity,
+ sanity_get_tests,
+)
+
+from ...target import (
+ walk_sanity_targets,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_sanity(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `sanity` command."""
+ parser = subparsers.add_parser(
+ 'sanity',
+ parents=[parent],
+ help='sanity tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_sanity,
+ targets_func=walk_sanity_targets,
+ config=SanityConfig)
+
+ sanity = parser.add_argument_group(title='sanity test arguments')
+
+ sanity.add_argument(
+ '--test',
+ metavar='TEST',
+ action='append',
+ choices=[test.name for test in sanity_get_tests()],
+ help='tests to run',
+ )
+
+ sanity.add_argument(
+ '--skip-test',
+ metavar='TEST',
+ action='append',
+ choices=[test.name for test in sanity_get_tests()],
+ help='tests to skip',
+ )
+
+ sanity.add_argument(
+ '--allow-disabled',
+ action='store_true',
+ help='allow tests to run which are disabled by default',
+ )
+
+ sanity.add_argument(
+ '--list-tests',
+ action='store_true',
+ help='list available tests',
+ )
+
+ sanity.add_argument(
+ '--enable-optional-errors',
+ action='store_true',
+ help='enable optional errors',
+ )
+
+ if data_context().content.is_ansible:
+ sanity.add_argument(
+ '--keep-git',
+ action='store_true',
+ help='transfer git related files to the remote host/container',
+ )
+ else:
+ sanity.set_defaults(
+ keep_git=False,
+ )
+
+ sanity.add_argument(
+ '--lint',
+ action='store_true',
+ help='write lint output to stdout, everything else stderr',
+ )
+
+ sanity.add_argument(
+ '--junit',
+ action='store_true',
+ help='write test failures to junit xml files',
+ )
+
+ sanity.add_argument(
+ '--failure-ok',
+ action='store_true',
+ help='exit successfully on failed tests after saving results',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.SANITY) # sanity
diff --git a/test/lib/ansible_test/_internal/cli/commands/shell.py b/test/lib/ansible_test/_internal/cli/commands/shell.py
new file mode 100644
index 0000000000..301ff70e90
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/shell.py
@@ -0,0 +1,47 @@
+"""Command line parsing for the `shell` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...commands.shell import (
+ command_shell,
+)
+
+from ...config import (
+ ShellConfig,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_shell(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `shell` command."""
+ parser = subparsers.add_parser(
+ 'shell',
+ parents=[parent],
+ help='open an interactive shell',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_shell,
+ config=ShellConfig,
+ )
+
+ shell = parser.add_argument_group(title='shell arguments')
+
+ shell.add_argument(
+ '--raw',
+ action='store_true',
+ help='direct to shell with no setup',
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.SHELL) # shell
diff --git a/test/lib/ansible_test/_internal/cli/commands/units.py b/test/lib/ansible_test/_internal/cli/commands/units.py
new file mode 100644
index 0000000000..fdbbbc494d
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/commands/units.py
@@ -0,0 +1,65 @@
+"""Command line parsing for the `units` command."""
+from __future__ import annotations
+
+import argparse
+
+from ...config import (
+ UnitsConfig,
+)
+
+from ...commands.units import (
+ command_units,
+)
+
+from ...target import (
+ walk_units_targets,
+)
+
+from ..environments import (
+ CompositeActionCompletionFinder,
+ ControllerMode,
+ TargetMode,
+ add_environments,
+)
+
+
+def do_units(
+ subparsers,
+ parent, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+):
+ """Command line parsing for the `units` command."""
+ parser = subparsers.add_parser(
+ 'units',
+ parents=[parent],
+ help='unit tests',
+ ) # type: argparse.ArgumentParser
+
+ parser.set_defaults(
+ func=command_units,
+ targets_func=walk_units_targets,
+ config=UnitsConfig,
+ )
+
+ units = parser.add_argument_group(title='unit test arguments')
+
+ units.add_argument(
+ '--collect-only',
+ action='store_true',
+ help='collect tests but do not execute them',
+ )
+
+ units.add_argument(
+ '--num-workers',
+ metavar='INT',
+ type=int,
+ help='number of workers to use (default: auto)',
+ )
+
+ units.add_argument(
+ '--requirements-mode',
+ choices=('only', 'skip'),
+ help=argparse.SUPPRESS,
+ )
+
+ add_environments(parser, completer, ControllerMode.DELEGATED, TargetMode.UNITS) # units
diff --git a/test/lib/ansible_test/_internal/cli/compat.py b/test/lib/ansible_test/_internal/cli/compat.py
new file mode 100644
index 0000000000..b1ca08e39c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/compat.py
@@ -0,0 +1,478 @@
+"""Provides compatibility with first-generation host delegation options in ansible-test."""
+from __future__ import annotations
+
+import argparse
+import dataclasses
+import enum
+import os
+import types
+import typing as t
+
+from ..constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ..util import (
+ ApplicationError,
+ display,
+ filter_args,
+ sorted_versions,
+ str_to_version,
+)
+
+from ..docker_util import (
+ docker_available,
+)
+
+from ..completion import (
+ DOCKER_COMPLETION,
+ REMOTE_COMPLETION,
+ filter_completion,
+)
+
+from ..host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ DockerConfig,
+ FallbackDetail,
+ FallbackReason,
+ HostConfig,
+ HostContext,
+ HostSettings,
+ NativePythonConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ VirtualPythonConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+
+def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.List[str]]) -> t.Optional[str]
+ """If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
+ return version if version in versions else None
+
+
+def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str]
+ """If a Python version is given and is supported by the controller, return that Python version, otherwise return None."""
+ return filter_python(version, CONTROLLER_PYTHON_VERSIONS)
+
+
+def get_fallback_remote_controller(): # type: () -> str
+ """Return the remote fallback platform for the controller."""
+ platform = 'freebsd' # lower cost than RHEL and macOS
+ candidates = [item for item in filter_completion(REMOTE_COMPLETION).values() if item.controller_supported and item.platform == platform]
+ fallback = sorted(candidates, key=lambda value: str_to_version(value.version), reverse=True)[0]
+ return fallback.name
+
+
+def get_option_name(name): # type: (str) -> str
+ """Return a command-line option name from the given option name."""
+ if name == 'targets':
+ name = 'target'
+
+ return f'--{name.replace("_", "-")}'
+
+
+class PythonVersionUnsupportedError(ApplicationError):
+ """A Python version was requested for a context which does not support that version."""
+ def __init__(self, context, version, versions):
+ super().__init__(f'Python {version} is not supported by environment `{context}`. Supported Python version(s) are: {", ".join(versions)}')
+
+
+class PythonVersionUnspecifiedError(ApplicationError):
+ """A Python version was not specified for a context which is unknown, thus the Python version is unknown."""
+ def __init__(self, context):
+ super().__init__(f'A Python version was not specified for environment `{context}`. Use the `--python` option to specify a Python version.')
+
+
+class ControllerNotSupportedError(ApplicationError):
+ """Option(s) were specified which do not provide support for the controller and would be ignored because they are irrelevant for the target."""
+ def __init__(self, context):
+ super().__init__(f'Environment `{context}` does not provide a Python version supported by the controller.')
+
+
+class OptionsConflictError(ApplicationError):
+ """Option(s) were specified which conflict with other options."""
+ def __init__(self, first, second):
+ super().__init__(f'Options `{" ".join(first)}` cannot be combined with options `{" ".join(second)}`.')
+
+
+@dataclasses.dataclass(frozen=True)
+class LegacyHostOptions:
+ """Legacy host options used prior to the availability of separate controller and target host configuration."""
+ python: t.Optional[str] = None
+ python_interpreter: t.Optional[str] = None
+ local: t.Optional[bool] = None
+ venv: t.Optional[bool] = None
+ venv_system_site_packages: t.Optional[bool] = None
+ remote: t.Optional[str] = None
+ remote_provider: t.Optional[str] = None
+ docker: t.Optional[str] = None
+ docker_privileged: t.Optional[bool] = None
+ docker_seccomp: t.Optional[str] = None
+ docker_memory: t.Optional[int] = None
+ windows: t.Optional[t.List[str]] = None
+ platform: t.Optional[t.List[str]] = None
+ platform_collection: t.Optional[t.List[t.Tuple[str, str]]] = None
+ platform_connection: t.Optional[t.List[t.Tuple[str, str]]] = None
+ inventory: t.Optional[str] = None
+
+ @staticmethod
+ def create(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> LegacyHostOptions
+ """Create legacy host options from the given namespace."""
+ kwargs = {field.name: getattr(namespace, field.name, None) for field in dataclasses.fields(LegacyHostOptions)}
+
+ if kwargs['python'] == 'default':
+ kwargs['python'] = None
+
+ return LegacyHostOptions(**kwargs)
+
+ @staticmethod
+ def purge_namespace(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> None
+ """Purge legacy host options fields from the given namespace."""
+ for field in dataclasses.fields(LegacyHostOptions): # type: dataclasses.Field
+ if hasattr(namespace, field.name):
+ delattr(namespace, field.name)
+
+ @staticmethod
+ def purge_args(args): # type: (t.List[str]) -> t.List[str]
+ """Purge legacy host options from the given command line arguments."""
+ fields = dataclasses.fields(LegacyHostOptions) # type: t.Tuple[dataclasses.Field, ...]
+ filters = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields} # type: t.Dict[str, int]
+
+ return filter_args(args, filters)
+
+ def get_options_used(self): # type: () -> t.Tuple[str, ...]
+ """Return a tuple of the command line options used."""
+ fields = dataclasses.fields(self) # type: t.Tuple[dataclasses.Field, ...]
+ options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name)))
+ return options
+
+
+class TargetMode(enum.Enum):
+ """Type of provisioning to use for the targets."""
+ WINDOWS_INTEGRATION = enum.auto() # windows-integration
+ NETWORK_INTEGRATION = enum.auto() # network-integration
+ POSIX_INTEGRATION = enum.auto() # integration
+ SANITY = enum.auto() # sanity
+ UNITS = enum.auto() # units
+ SHELL = enum.auto() # shell
+ NO_TARGETS = enum.auto() # coverage
+
+ @property
+ def one_host(self):
+ """Return True if only one host (the controller) should be used, otherwise return False."""
+ return self in (TargetMode.SANITY, TargetMode.UNITS, TargetMode.NO_TARGETS)
+
+ @property
+ def no_fallback(self):
+ """Return True if no fallback is acceptable for the controller (due to options not applying to the target), otherwise return False."""
+ return self in (TargetMode.WINDOWS_INTEGRATION, TargetMode.NETWORK_INTEGRATION, TargetMode.NO_TARGETS)
+
+ @property
+ def multiple_pythons(self):
+ """Return True if multiple Python versions are allowed, otherwise False."""
+ return self in (TargetMode.SANITY, TargetMode.UNITS)
+
+ @property
+ def has_python(self):
+ """Return True if this mode uses Python, otherwise False."""
+ return self in (TargetMode.POSIX_INTEGRATION, TargetMode.SANITY, TargetMode.UNITS, TargetMode.SHELL)
+
+
+def convert_legacy_args(
+ argv, # type: t.List[str]
+ args, # type: t.Union[argparse.Namespace, types.SimpleNamespace]
+ mode, # type: TargetMode
+): # type: (...) -> HostSettings
+ """Convert pre-split host arguments in the given namespace to their split counterparts."""
+ old_options = LegacyHostOptions.create(args)
+ old_options.purge_namespace(args)
+
+ new_options = [
+ '--controller',
+ '--target',
+ '--target-python',
+ ]
+
+ used_old_options = old_options.get_options_used()
+ used_new_options = [name for name in new_options if name in argv]
+
+ if used_old_options:
+ if used_new_options:
+ raise OptionsConflictError(used_old_options, used_new_options)
+
+ controller, targets, controller_fallback = get_legacy_host_config(mode, old_options)
+
+ if controller_fallback:
+ if mode.one_host:
+ display.info(controller_fallback.message, verbosity=1)
+ else:
+ display.warning(controller_fallback.message)
+
+ used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS) and not native_python(old_options)
+ else:
+ controller = args.controller or OriginConfig()
+ controller_fallback = None
+
+ if mode == TargetMode.NO_TARGETS:
+ targets = []
+ used_default_pythons = False
+ elif args.targets:
+ targets = args.targets
+ used_default_pythons = False
+ else:
+ targets = default_targets(mode, controller)
+ used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS)
+
+ args.controller = controller
+ args.targets = targets
+
+ if used_default_pythons:
+ targets = t.cast(t.List[ControllerConfig], targets)
+ skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in targets}))
+ else:
+ skipped_python_versions = []
+
+ filtered_args = old_options.purge_args(argv)
+ filtered_args = filter_args(filtered_args, {name: 1 for name in new_options})
+
+ host_settings = HostSettings(
+ controller=controller,
+ targets=targets,
+ skipped_python_versions=skipped_python_versions,
+ filtered_args=filtered_args,
+ controller_fallback=controller_fallback,
+ )
+
+ return host_settings
+
+
+def controller_targets(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+ controller, # type: ControllerHostConfig
+): # type: (...) -> t.List[ControllerConfig]
+ """Return the configuration for controller targets."""
+ python = native_python(options)
+
+ if python:
+ targets = [ControllerConfig(python=python)]
+ else:
+ targets = default_targets(mode, controller)
+
+ return targets
+
+
+def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePythonConfig]
+ """Return a NativePythonConfig for the given version if it is not None, otherwise return None."""
+ if not options.python and not options.python_interpreter:
+ return None
+
+ return NativePythonConfig(version=options.python, path=options.python_interpreter)
+
+
+def get_legacy_host_config(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+): # type: (...) -> t.Tuple[HostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]
+ """
+ Returns controller and target host configs derived from the provided legacy host options.
+ The goal is to match the original behavior, by using non-split testing whenever possible.
+ When the options support the controller, use the options for the controller and use ControllerConfig for the targets.
+ When the options do not support the controller, use the options for the targets and use a default controller config influenced by the options.
+ """
+ venv_fallback = 'venv/default'
+ docker_fallback = 'default'
+ remote_fallback = get_fallback_remote_controller()
+
+ controller_fallback = None # type: t.Optional[t.Tuple[str, str, FallbackReason]]
+
+ if options.venv:
+ if controller_python(options.python) or not options.python:
+ controller = OriginConfig(python=VirtualPythonConfig(version=options.python or 'default', system_site_packages=options.venv_system_site_packages))
+ else:
+ controller_fallback = f'origin:python={venv_fallback}', f'--venv --python {options.python}', FallbackReason.PYTHON
+ controller = OriginConfig(python=VirtualPythonConfig(version='default', system_site_packages=options.venv_system_site_packages))
+
+ if mode in (TargetMode.SANITY, TargetMode.UNITS):
+ targets = controller_targets(mode, options, controller)
+
+ # Target sanity tests either have no Python requirements or manage their own virtual environments.
+ # Thus there is no point in setting up virtual environments ahead of time for them.
+
+ if mode == TargetMode.UNITS:
+ targets = [ControllerConfig(python=VirtualPythonConfig(version=target.python.version, path=target.python.path,
+ system_site_packages=options.venv_system_site_packages)) for target in targets]
+ else:
+ targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default',
+ system_site_packages=options.venv_system_site_packages))]
+ elif options.docker:
+ docker_config = filter_completion(DOCKER_COMPLETION).get(options.docker)
+
+ if docker_config:
+ if options.python and options.python not in docker_config.supported_pythons:
+ raise PythonVersionUnsupportedError(f'--docker {options.docker}', options.python, docker_config.supported_pythons)
+
+ if docker_config.controller_supported:
+ if controller_python(options.python) or not options.python:
+ controller = DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{options.docker}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
+ controller = DockerConfig(name=options.docker)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker}', FallbackReason.ENVIRONMENT
+ controller = DockerConfig(name=docker_fallback)
+ targets = [DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
+ else:
+ if not options.python:
+ raise PythonVersionUnspecifiedError(f'--docker {options.docker}')
+
+ if controller_python(options.python):
+ controller = DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
+ controller = DockerConfig(name=docker_fallback)
+ targets = [DockerConfig(name=options.docker, python=native_python(options),
+ privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
+ elif options.remote:
+ remote_config = filter_completion(REMOTE_COMPLETION).get(options.remote)
+ context, reason = None, None
+
+ if remote_config:
+ if options.python and options.python not in remote_config.supported_pythons:
+ raise PythonVersionUnsupportedError(f'--remote {options.remote}', options.python, remote_config.supported_pythons)
+
+ if remote_config.controller_supported:
+ if controller_python(options.python) or not options.python:
+ controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = f'remote:{options.remote}', f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
+ controller = PosixRemoteConfig(name=options.remote, provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ context, reason = f'--remote {options.remote}', FallbackReason.ENVIRONMENT
+ controller = None
+ targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
+ elif mode == TargetMode.SHELL and options.remote.startswith('windows/'):
+ if options.python and options.python not in CONTROLLER_PYTHON_VERSIONS:
+ raise ControllerNotSupportedError(f'--python {options.python}')
+
+ controller = OriginConfig(python=native_python(options))
+ targets = [WindowsRemoteConfig(name=options.remote, provider=options.remote_provider)]
+ else:
+ if not options.python:
+ raise PythonVersionUnspecifiedError(f'--remote {options.remote}')
+
+ if controller_python(options.python):
+ controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
+ targets = controller_targets(mode, options, controller)
+ else:
+ context, reason = f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
+ controller = None
+ targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
+
+ if not controller:
+ if docker_available():
+ controller_fallback = f'docker:{docker_fallback}', context, reason
+ controller = DockerConfig(name=docker_fallback)
+ else:
+ controller_fallback = f'remote:{remote_fallback}', context, reason
+ controller = PosixRemoteConfig(name=remote_fallback)
+ else: # local/unspecified
+ # There are several changes in behavior from the legacy implementation when using no delegation (or the `--local` option).
+ # These changes are due to ansible-test now maintaining consistency between its own Python and that of controller Python subprocesses.
+ #
+ # 1) The `--python-interpreter` option (if different from sys.executable) now affects controller subprocesses and triggers re-execution of ansible-test.
+ # Previously this option was completely ignored except when used with the `--docker` or `--remote` options.
+ # 2) The `--python` option now triggers re-execution of ansible-test if it differs from sys.version_info.
+ # Previously it affected Python subprocesses, but not ansible-test itself.
+
+ if controller_python(options.python) or not options.python:
+ controller = OriginConfig(python=native_python(options))
+ targets = controller_targets(mode, options, controller)
+ else:
+ controller_fallback = 'origin:python=default', f'--python {options.python}', FallbackReason.PYTHON
+ controller = OriginConfig()
+ targets = controller_targets(mode, options, controller)
+
+ if controller_fallback:
+ controller_option, context, reason = controller_fallback
+
+ if mode.no_fallback:
+ raise ControllerNotSupportedError(context)
+
+ fallback_detail = FallbackDetail(
+ reason=reason,
+ message=f'Using `--controller {controller_option}` since `{context}` does not support the controller.',
+ )
+ else:
+ fallback_detail = None
+
+ if mode.one_host and any(not isinstance(target, ControllerConfig) for target in targets):
+ raise ControllerNotSupportedError(controller_fallback[1])
+
+ if mode == TargetMode.NO_TARGETS:
+ targets = []
+ else:
+ targets = handle_non_posix_targets(mode, options, targets)
+
+ return controller, targets, fallback_detail
+
+
+def handle_non_posix_targets(
+ mode, # type: TargetMode
+ options, # type: LegacyHostOptions
+ targets, # type: t.List[HostConfig]
+): # type: (...) -> t.List[HostConfig]
+ """Return a list of non-POSIX targets if the target mode is non-POSIX."""
+ if mode == TargetMode.WINDOWS_INTEGRATION:
+ if options.windows:
+ targets = [WindowsRemoteConfig(name=f'windows/{version}', provider=options.remote_provider) for version in options.windows]
+ else:
+ targets = [WindowsInventoryConfig(path=options.inventory)]
+ elif mode == TargetMode.NETWORK_INTEGRATION:
+ if options.platform:
+ targets = [NetworkRemoteConfig(name=platform, provider=options.remote_provider) for platform in options.platform]
+
+ for platform, collection in options.platform_collection or []:
+ for entry in targets:
+ if entry.platform == platform:
+ entry.collection = collection
+
+ for platform, connection in options.platform_connection or []:
+ for entry in targets:
+ if entry.platform == platform:
+ entry.connection = connection
+ else:
+ targets = [NetworkInventoryConfig(path=options.inventory)]
+
+ return targets
+
+
+def default_targets(
+ mode, # type: TargetMode
+ controller, # type: ControllerHostConfig
+): # type: (...) -> t.List[HostConfig]
+ """Return a list of default targets for the given target mode."""
+ if mode == TargetMode.WINDOWS_INTEGRATION:
+ targets = [WindowsInventoryConfig(path=os.path.abspath('test/integration/inventory.winrm'))]
+ elif mode == TargetMode.NETWORK_INTEGRATION:
+ targets = [NetworkInventoryConfig(path=os.path.abspath('test/integration/inventory.networking'))]
+ elif mode.multiple_pythons:
+ targets = controller.get_default_targets(HostContext(controller_config=controller))
+ else:
+ targets = [ControllerConfig()]
+
+ return targets
diff --git a/test/lib/ansible_test/_internal/cli/completers.py b/test/lib/ansible_test/_internal/cli/completers.py
new file mode 100644
index 0000000000..a4b9c04f4e
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/completers.py
@@ -0,0 +1,26 @@
+"""Completers for use with argcomplete."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+from ..target import (
+ find_target_completion,
+)
+
+from .argparsing.argcompletion import (
+ OptionCompletionFinder,
+)
+
+
+def complete_target(completer, prefix, parsed_args, **_): # type: (OptionCompletionFinder, str, argparse.Namespace, ...) -> t.List[str]
+ """Perform completion for the targets configured for the command being parsed."""
+ matches = find_target_completion(parsed_args.targets_func, prefix, completer.list_mode)
+ completer.disable_completion_mangling = completer.list_mode and len(matches) > 1
+ return matches
+
+
+def complete_choices(choices, prefix, **_): # type: (t.List[str], str, ...) -> t.List[str]
+ """Perform completion using the provided choices."""
+ matches = [choice for choice in choices if choice.startswith(prefix)]
+ return matches
diff --git a/test/lib/ansible_test/_internal/cli/converters.py b/test/lib/ansible_test/_internal/cli/converters.py
new file mode 100644
index 0000000000..46562738ef
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/converters.py
@@ -0,0 +1,20 @@
+"""Converters for use as the type argument for arparse's add_argument method."""
+from __future__ import annotations
+
+import argparse
+import typing as t
+
+
+def key_value_type(value): # type: (str) -> t.Tuple[str, str]
+ """Wrapper around key_value."""
+ return key_value(value)
+
+
+def key_value(value): # type: (str) -> t.Tuple[str, str]
+ """Type parsing and validation for argparse key/value pairs separated by an '=' character."""
+ parts = value.split('=')
+
+ if len(parts) != 2:
+ raise argparse.ArgumentTypeError('"%s" must be in the format "key=value"' % value)
+
+ return parts[0], parts[1]
diff --git a/test/lib/ansible_test/_internal/cli/environments.py b/test/lib/ansible_test/_internal/cli/environments.py
new file mode 100644
index 0000000000..b041ece7dc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/environments.py
@@ -0,0 +1,567 @@
+"""Command line parsing for test environments."""
+from __future__ import annotations
+
+import argparse
+import enum
+import functools
+import typing as t
+
+from ..constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_PROVIDERS,
+ SECCOMP_CHOICES,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ..completion import (
+ DOCKER_COMPLETION,
+ NETWORK_COMPLETION,
+ REMOTE_COMPLETION,
+ WINDOWS_COMPLETION,
+ filter_completion,
+)
+
+from ..cli.argparsing import (
+ CompositeAction,
+ CompositeActionCompletionFinder,
+)
+
+from ..cli.argparsing.actions import (
+ EnumAction,
+)
+
+from ..cli.actions import (
+ DelegatedControllerAction,
+ NetworkSshTargetAction,
+ NetworkTargetAction,
+ OriginControllerAction,
+ PosixSshTargetAction,
+ PosixTargetAction,
+ SanityPythonTargetAction,
+ UnitsPythonTargetAction,
+ WindowsSshTargetAction,
+ WindowsTargetAction,
+)
+
+from ..cli.compat import (
+ TargetMode,
+)
+
+from ..config import (
+ TerminateMode,
+)
+
+from .completers import (
+ complete_choices,
+)
+
+from .converters import (
+ key_value_type,
+)
+
+from ..ci import (
+ get_ci_provider,
+)
+
+
+class ControllerMode(enum.Enum):
+ """Type of provisioning to use for the controller."""
+ NO_DELEGATION = enum.auto()
+ ORIGIN = enum.auto()
+ DELEGATED = enum.auto()
+
+
+def add_environments(
+ parser, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add arguments for the environments used to run ansible-test and commands it invokes."""
+ no_environment = controller_mode == ControllerMode.NO_DELEGATION and target_mode == TargetMode.NO_TARGETS
+
+ parser.set_defaults(no_environment=no_environment)
+
+ if no_environment:
+ return
+
+ parser.set_defaults(target_mode=target_mode)
+
+ add_global_options(parser, controller_mode)
+ add_legacy_environment_options(parser, controller_mode, target_mode)
+ action_types = add_composite_environment_options(parser, completer, controller_mode, target_mode)
+
+ sections = [f'{heading}\n{content}'
+ for action_type, documentation_state in CompositeAction.documentation_state.items() if action_type in action_types
+ for heading, content in documentation_state.sections.items()]
+
+ if not get_ci_provider().supports_core_ci_auth():
+ sections.append('Remote provisioning options have been hidden since no Ansible Core CI API key was found.')
+
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.epilog = '\n\n'.join(sections)
+
+
+def add_global_options(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+):
+ """Add global options for controlling the test environment that work with both the legacy and composite options."""
+ global_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(title='global environment arguments'))
+
+ global_parser.add_argument(
+ '--containers',
+ metavar='JSON',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--pypi-proxy',
+ action='store_true',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--pypi-endpoint',
+ metavar='URI',
+ help=argparse.SUPPRESS,
+ )
+
+ global_parser.add_argument(
+ '--requirements',
+ action='store_true',
+ default=False,
+ help='install command requirements',
+ )
+
+ global_parser.add_argument(
+ '--no-pip-check',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
+ add_global_remote(global_parser, controller_mode)
+ add_global_docker(global_parser, controller_mode)
+
+
+def add_composite_environment_options(
+ parser, # type: argparse.ArgumentParser
+ completer, # type: CompositeActionCompletionFinder
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+): # type: (...) -> t.List[t.Type[CompositeAction]]
+ """Add composite options for controlling the test environment."""
+ composite_parser = t.cast(argparse.ArgumentParser, parser.add_argument_group(
+ title='composite environment arguments (mutually exclusive with "environment arguments" above)'))
+
+ composite_parser.add_argument(
+ '--host-path',
+ help=argparse.SUPPRESS,
+ )
+
+ action_types = [] # type: t.List[t.Type[CompositeAction]]
+
+ def register_action_type(action_type): # type: (t.Type[CompositeAction]) -> t.Type[CompositeAction]
+ """Register the provided composite action type and return it."""
+ action_types.append(action_type)
+ return action_type
+
+ if controller_mode == ControllerMode.NO_DELEGATION:
+ composite_parser.set_defaults(controller=None)
+ else:
+ composite_parser.add_argument(
+ '--controller',
+ metavar='OPT',
+ action=register_action_type(DelegatedControllerAction if controller_mode == ControllerMode.DELEGATED else OriginControllerAction),
+ help='configuration for the controller',
+ ).completer = completer.completer
+
+ if target_mode == TargetMode.NO_TARGETS:
+ composite_parser.set_defaults(targets=[])
+ elif target_mode == TargetMode.SHELL:
+ group = composite_parser.add_mutually_exclusive_group()
+
+ group.add_argument(
+ '--target-posix',
+ metavar='OPT',
+ action=register_action_type(PosixSshTargetAction),
+ help='configuration for the target',
+ ).completer = completer.completer
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ group.add_argument(
+ '--target-windows',
+ metavar='OPT',
+ action=WindowsSshTargetAction if suppress else register_action_type(WindowsSshTargetAction),
+ help=suppress or 'configuration for the target',
+ ).completer = completer.completer
+
+ group.add_argument(
+ '--target-network',
+ metavar='OPT',
+ action=NetworkSshTargetAction if suppress else register_action_type(NetworkSshTargetAction),
+ help=suppress or 'configuration for the target',
+ ).completer = completer.completer
+ else:
+ if target_mode.multiple_pythons:
+ target_option = '--target-python'
+ target_help = 'configuration for the target python interpreter(s)'
+ elif target_mode == TargetMode.POSIX_INTEGRATION:
+ target_option = '--target'
+ target_help = 'configuration for the target'
+ else:
+ target_option = '--target'
+ target_help = 'configuration for the target(s)'
+
+ target_actions = {
+ TargetMode.POSIX_INTEGRATION: PosixTargetAction,
+ TargetMode.WINDOWS_INTEGRATION: WindowsTargetAction,
+ TargetMode.NETWORK_INTEGRATION: NetworkTargetAction,
+ TargetMode.SANITY: SanityPythonTargetAction,
+ TargetMode.UNITS: UnitsPythonTargetAction,
+ }
+
+ target_action = target_actions[target_mode]
+
+ composite_parser.add_argument(
+ target_option,
+ metavar='OPT',
+ action=register_action_type(target_action),
+ help=target_help,
+ ).completer = completer.completer
+
+ return action_types
+
+
+def add_legacy_environment_options(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+ target_mode, # type: TargetMode
+):
+ """Add legacy options for controlling the test environment."""
+ # noinspection PyTypeChecker
+ environment = parser.add_argument_group(
+ title='environment arguments (mutually exclusive with "composite environment arguments" below)') # type: argparse.ArgumentParser
+
+ add_environments_python(environment, target_mode)
+ add_environments_host(environment, controller_mode, target_mode)
+
+
+def add_environments_python(
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments to control the Python version(s) used."""
+ if target_mode.has_python:
+ python_versions = SUPPORTED_PYTHON_VERSIONS
+ else:
+ python_versions = CONTROLLER_PYTHON_VERSIONS
+
+ environments_parser.add_argument(
+ '--python',
+ metavar='X.Y',
+ choices=python_versions + ('default',),
+ help='python version: %s' % ', '.join(python_versions),
+ )
+
+ environments_parser.add_argument(
+ '--python-interpreter',
+ metavar='PATH',
+ help='path to the python interpreter',
+ )
+
+
+def add_environments_host(
+ environments_parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+ target_mode # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for the given host and argument modes."""
+ # noinspection PyTypeChecker
+ environments_exclusive_group = environments_parser.add_mutually_exclusive_group() # type: argparse.ArgumentParser
+
+ add_environment_local(environments_exclusive_group)
+ add_environment_venv(environments_exclusive_group, environments_parser)
+
+ if controller_mode == ControllerMode.DELEGATED:
+ add_environment_remote(environments_exclusive_group, environments_parser, target_mode)
+ add_environment_docker(environments_exclusive_group, environments_parser, target_mode)
+
+ if target_mode == TargetMode.WINDOWS_INTEGRATION:
+ add_environment_windows(environments_parser)
+
+ if target_mode == TargetMode.NETWORK_INTEGRATION:
+ add_environment_network(environments_parser)
+
+
+def add_environment_network(
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on a windows host."""
+ environments_parser.add_argument(
+ '--platform',
+ metavar='PLATFORM',
+ action='append',
+ help='network platform/version',
+ ).completer = complete_network_platform
+
+ environments_parser.add_argument(
+ '--platform-collection',
+ type=key_value_type,
+ metavar='PLATFORM=COLLECTION',
+ action='append',
+ help='collection used to test platform',
+ ).completer = complete_network_platform_collection
+
+ environments_parser.add_argument(
+ '--platform-connection',
+ type=key_value_type,
+ metavar='PLATFORM=CONNECTION',
+ action='append',
+ help='connection used to test platform',
+ ).completer = complete_network_platform_connection
+
+ environments_parser.add_argument(
+ '--inventory',
+ metavar='PATH',
+ help='path to inventory used for tests',
+ )
+
+
+def add_environment_windows(
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on a windows host."""
+ environments_parser.add_argument(
+ '--windows',
+ metavar='VERSION',
+ action='append',
+ help='windows version',
+ ).completer = complete_windows
+
+ environments_parser.add_argument(
+ '--inventory',
+ metavar='PATH',
+ help='path to inventory used for tests',
+ )
+
+
+def add_environment_local(
+ exclusive_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running on the local (origin) host."""
+ exclusive_parser.add_argument(
+ '--local',
+ action='store_true',
+ help='run from the local environment',
+ )
+
+
+def add_environment_venv(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+): # type: (...) -> None
+ """Add environment arguments for running in ansible-test managed virtual environments."""
+ exclusive_parser.add_argument(
+ '--venv',
+ action='store_true',
+ help='run from a virtual environment',
+ )
+
+ environments_parser.add_argument(
+ '--venv-system-site-packages',
+ action='store_true',
+ help='enable system site packages')
+
+
+def add_global_docker(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+): # type: (...) -> None
+ """Add global options for Docker."""
+ if controller_mode != ControllerMode.DELEGATED:
+ parser.set_defaults(
+ docker_no_pull=False,
+ docker_network=None,
+ docker_terminate=None,
+ )
+
+ return
+
+ parser.add_argument(
+ '--docker-no-pull',
+ action='store_true',
+ help=argparse.SUPPRESS, # deprecated, kept for now (with a warning) for backwards compatibility
+ )
+
+ parser.add_argument(
+ '--docker-network',
+ metavar='NET',
+ help='run using the specified network',
+ )
+
+ parser.add_argument(
+ '--docker-terminate',
+ metavar='T',
+ default=TerminateMode.ALWAYS,
+ type=TerminateMode,
+ action=EnumAction,
+ help='terminate the container: %(choices)s (default: %(default)s)',
+ )
+
+
+def add_environment_docker(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for running in docker containers."""
+ if target_mode in (TargetMode.POSIX_INTEGRATION, TargetMode.SHELL):
+ docker_images = sorted(filter_completion(DOCKER_COMPLETION))
+ else:
+ docker_images = sorted(filter_completion(DOCKER_COMPLETION, controller_only=True))
+
+ exclusive_parser.add_argument(
+ '--docker',
+ metavar='IMAGE',
+ nargs='?',
+ const='default',
+ help='run from a docker container',
+ ).completer = functools.partial(complete_choices, docker_images)
+
+ environments_parser.add_argument(
+ '--docker-privileged',
+ action='store_true',
+ help='run docker container in privileged mode',
+ )
+
+ environments_parser.add_argument(
+ '--docker-seccomp',
+ metavar='SC',
+ choices=SECCOMP_CHOICES,
+ help='set seccomp confinement for the test container: %(choices)s',
+ )
+
+ environments_parser.add_argument(
+ '--docker-memory',
+ metavar='INT',
+ type=int,
+ help='memory limit for docker in bytes',
+ )
+
+
+def add_global_remote(
+ parser, # type: argparse.ArgumentParser
+ controller_mode, # type: ControllerMode
+): # type: (...) -> None
+ """Add global options for remote instances."""
+ if controller_mode != ControllerMode.DELEGATED:
+ parser.set_defaults(
+ remote_stage=None,
+ remote_endpoint=None,
+ remote_terminate=None,
+ )
+
+ return
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ parser.add_argument(
+ '--remote-stage',
+ metavar='STAGE',
+ default='prod',
+ help=suppress or 'remote stage to use: prod, dev',
+ ).completer = complete_remote_stage
+
+ parser.add_argument(
+ '--remote-endpoint',
+ metavar='EP',
+ help=suppress or 'remote provisioning endpoint to use',
+ )
+
+ parser.add_argument(
+ '--remote-terminate',
+ metavar='T',
+ default=TerminateMode.NEVER,
+ type=TerminateMode,
+ action=EnumAction,
+ help=suppress or 'terminate the remote instance: %(choices)s (default: %(default)s)',
+ )
+
+
+def add_environment_remote(
+ exclusive_parser, # type: argparse.ArgumentParser
+ environments_parser, # type: argparse.ArgumentParser
+ target_mode, # type: TargetMode
+): # type: (...) -> None
+ """Add environment arguments for running in ansible-core-ci provisioned remote virtual machines."""
+ if target_mode == TargetMode.POSIX_INTEGRATION:
+ remote_platforms = get_remote_platform_choices()
+ elif target_mode == TargetMode.SHELL:
+ remote_platforms = sorted(set(get_remote_platform_choices()) | set(get_windows_platform_choices()))
+ else:
+ remote_platforms = get_remote_platform_choices(True)
+
+ suppress = None if get_ci_provider().supports_core_ci_auth() else argparse.SUPPRESS
+
+ exclusive_parser.add_argument(
+ '--remote',
+ metavar='NAME',
+ help=suppress or 'run from a remote instance',
+ ).completer = functools.partial(complete_choices, remote_platforms)
+
+ environments_parser.add_argument(
+ '--remote-provider',
+ metavar='PR',
+ choices=REMOTE_PROVIDERS,
+ help=suppress or 'remote provider to use: %(choices)s',
+ )
+
+
+def complete_remote_stage(prefix, **_): # type: (str, ...) -> t.List[str]
+ """Return a list of supported stages matching the given prefix."""
+ return [stage for stage in ('prod', 'dev') if stage.startswith(prefix)]
+
+
+def complete_windows(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported Windows versions matching the given prefix, excluding versions already parsed from the command line."""
+ return [i for i in get_windows_version_choices() if i.startswith(prefix) and (not parsed_args.windows or i not in parsed_args.windows)]
+
+
+def complete_network_platform(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding platforms already parsed from the command line."""
+ images = sorted(filter_completion(NETWORK_COMPLETION))
+
+ return [i for i in images if i.startswith(prefix) and (not parsed_args.platform or i not in parsed_args.platform)]
+
+
+def complete_network_platform_collection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding collection platforms already parsed from the command line."""
+ left = prefix.split('=')[0]
+ images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values()))
+
+ return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_collection or i not in [x[0] for x in parsed_args.platform_collection])]
+
+
+def complete_network_platform_connection(prefix, parsed_args, **_): # type: (str, argparse.Namespace, ...) -> t.List[str]
+ """Return a list of supported network platforms matching the given prefix, excluding connection platforms already parsed from the command line."""
+ left = prefix.split('=')[0]
+ images = sorted(set(image.platform for image in filter_completion(NETWORK_COMPLETION).values()))
+
+ return [i + '=' for i in images if i.startswith(left) and (not parsed_args.platform_connection or i not in [x[0] for x in parsed_args.platform_connection])]
+
+
+def get_remote_platform_choices(controller=False): # type: (bool) -> t.List[str]
+ """Return a list of supported remote platforms matching the given prefix."""
+ return sorted(filter_completion(REMOTE_COMPLETION, controller_only=controller))
+
+
+def get_windows_platform_choices(): # type: () -> t.List[str]
+ """Return a list of supported Windows versions matching the given prefix."""
+ return sorted(f'windows/{windows.version}' for windows in filter_completion(WINDOWS_COMPLETION).values())
+
+
+def get_windows_version_choices(): # type: () -> t.List[str]
+ """Return a list of supported Windows versions."""
+ return sorted(windows.version for windows in filter_completion(WINDOWS_COMPLETION).values())
diff --git a/test/lib/ansible_test/_internal/cli/parsers/__init__.py b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
new file mode 100644
index 0000000000..25bac9167b
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/__init__.py
@@ -0,0 +1,303 @@
+"""Composite argument parsers for ansible-test specific command-line arguments."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...ci import (
+ get_ci_provider,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ NetworkConfig,
+ NetworkInventoryConfig,
+ PosixConfig,
+ WindowsConfig,
+ WindowsInventoryConfig,
+)
+
+from ..argparsing.parsers import (
+ DocumentationState,
+ Parser,
+ ParserState,
+ TypeParser,
+)
+
+from .value_parsers import (
+ PythonParser,
+)
+
+from .host_config_parsers import (
+ ControllerParser,
+ DockerParser,
+ NetworkInventoryParser,
+ NetworkRemoteParser,
+ OriginParser,
+ PosixRemoteParser,
+ PosixSshParser,
+ WindowsInventoryParser,
+ WindowsRemoteParser,
+)
+
+
+from .base_argument_parsers import (
+ ControllerNamespaceParser,
+ TargetNamespaceParser,
+ TargetsNamespaceParser,
+)
+
+
+class OriginControllerParser(ControllerNamespaceParser, TypeParser):
+ """Composite argument parser for the controller when delegation is not supported."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return dict(
+ origin=OriginParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = '--controller options:'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class DelegatedControllerParser(ControllerNamespaceParser, TypeParser):
+ """Composite argument parser for the controller when delegation is supported."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = dict(
+ origin=OriginParser(),
+ docker=DockerParser(controller=True),
+ )
+
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=PosixRemoteParser(controller=True),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = '--controller options:'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class PosixTargetParser(TargetNamespaceParser, TypeParser):
+ """Composite argument parser for a POSIX target."""
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = dict(
+ controller=ControllerParser(),
+ docker=DockerParser(controller=False),
+ )
+
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=PosixRemoteParser(controller=False),
+ )
+
+ parsers.update(
+ ssh=PosixSshParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class WindowsTargetParser(TargetsNamespaceParser, TypeParser):
+ """Composite argument parser for a Windows target."""
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return True
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers(state.root_namespace.targets)
+
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers([])
+
+ def get_internal_parsers(self, targets): # type: (t.List[WindowsConfig]) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = {}
+
+ if self.allow_inventory and not targets:
+ parsers.update(
+ inventory=WindowsInventoryParser(),
+ )
+
+ if not targets or not any(isinstance(target, WindowsInventoryConfig) for target in targets):
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=WindowsRemoteParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class NetworkTargetParser(TargetsNamespaceParser, TypeParser):
+ """Composite argument parser for a network target."""
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return True
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers(state.root_namespace.targets)
+
+ def get_stateless_parsers(self): # type: () -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ return self.get_internal_parsers([])
+
+ def get_internal_parsers(self, targets): # type: (t.List[NetworkConfig]) -> t.Dict[str, Parser]
+ """Return a dictionary of type names and type parsers."""
+ parsers = {}
+
+ if self.allow_inventory and not targets:
+ parsers.update(
+ inventory=NetworkInventoryParser(),
+ )
+
+ if not targets or not any(isinstance(target, NetworkInventoryConfig) for target in targets):
+ if get_ci_provider().supports_core_ci_auth():
+ parsers.update(
+ remote=NetworkRemoteParser(),
+ )
+
+ return parsers
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '' # place this section before the sections created by the parsers below
+ state.sections[section] = '\n'.join([f' {name}:{parser.document(state)}' for name, parser in self.get_stateless_parsers().items()])
+
+ return None
+
+
+class PythonTargetParser(TargetsNamespaceParser, Parser):
+ """Composite argument parser for a Python target."""
+ def __init__(self, allow_venv): # type: (bool) -> None
+ super().__init__()
+
+ self.allow_venv = allow_venv
+
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-python'
+
+ def get_value(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result, without storing the result in the namespace."""
+ versions = list(SUPPORTED_PYTHON_VERSIONS)
+
+ for target in state.root_namespace.targets or []: # type: PosixConfig
+ versions.remove(target.python.version)
+
+ parser = PythonParser(versions, allow_venv=self.allow_venv, allow_default=True)
+ python = parser.parse(state)
+
+ value = ControllerConfig(python=python)
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section = f'{self.option_name} options (choose one):'
+
+ state.sections[section] = '\n'.join([
+ f' {PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=True).document(state)} # non-origin controller',
+ f' {PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=True, allow_default=True).document(state)} # origin controller',
+ ])
+
+ return None
+
+
+class SanityPythonTargetParser(PythonTargetParser):
+ """Composite argument parser for a sanity Python target."""
+ def __init__(self): # type: () -> None
+ super().__init__(allow_venv=False)
+
+
+class UnitsPythonTargetParser(PythonTargetParser):
+ """Composite argument parser for a units Python target."""
+ def __init__(self): # type: () -> None
+ super().__init__(allow_venv=True)
+
+
+class PosixSshTargetParser(PosixTargetParser):
+ """Composite argument parser for a POSIX SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-posix'
+
+
+class WindowsSshTargetParser(WindowsTargetParser):
+ """Composite argument parser for a Windows SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-windows'
+
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
+
+
+class NetworkSshTargetParser(NetworkTargetParser):
+ """Composite argument parser for a network SSH target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target-network'
+
+ @property
+ def allow_inventory(self): # type: () -> bool
+ """True if inventory is allowed, otherwise False."""
+ return False
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
diff --git a/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
new file mode 100644
index 0000000000..2f17affa02
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/base_argument_parsers.py
@@ -0,0 +1,73 @@
+"""Base classes for the primary parsers for composite command line arguments."""
+from __future__ import annotations
+
+import abc
+import typing as t
+
+from ..argparsing.parsers import (
+ CompletionError,
+ NamespaceParser,
+ ParserState,
+)
+
+
+class ControllerNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for controller namespace parsers."""
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'controller'
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ if state.root_namespace.targets:
+ raise ControllerRequiredFirstError()
+
+ return super().parse(state)
+
+
+class TargetNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for target namespace parsers involving a single target."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target'
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'targets'
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return True
+
+ @property
+ def limit_one(self): # type: () -> bool
+ """True if only one target is allowed, otherwise False."""
+ return True
+
+
+class TargetsNamespaceParser(NamespaceParser, metaclass=abc.ABCMeta):
+ """Base class for controller namespace parsers involving multiple targets."""
+ @property
+ def option_name(self): # type: () -> str
+ """The option name used for this parser."""
+ return '--target'
+
+ @property
+ def dest(self): # type: () -> str
+ """The name of the attribute where the value should be stored."""
+ return 'targets'
+
+ @property
+ def use_list(self): # type: () -> bool
+ """True if the destination is a list, otherwise False."""
+ return True
+
+
+class ControllerRequiredFirstError(CompletionError):
+ """Exception raised when controller and target options are specified out-of-order."""
+ def __init__(self):
+ super().__init__('The `--controller` option must be specified before `--target` option(s).')
diff --git a/test/lib/ansible_test/_internal/cli/parsers/helpers.py b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
new file mode 100644
index 0000000000..0cf13f8dd2
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/helpers.py
@@ -0,0 +1,59 @@
+"""Helper functions for composite parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...completion import (
+ DOCKER_COMPLETION,
+ REMOTE_COMPLETION,
+ filter_completion,
+)
+
+from ...host_configs import (
+ DockerConfig,
+ HostConfig,
+ PosixRemoteConfig,
+)
+
+
+def get_docker_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
+ """Return a list of docker instance Python versions supported by the specified host config."""
+ image_config = filter_completion(DOCKER_COMPLETION).get(name)
+ available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+
+ if not image_config:
+ return [] if strict else available_pythons
+
+ supported_pythons = [python for python in image_config.supported_pythons if python in available_pythons]
+
+ return supported_pythons
+
+
+def get_remote_pythons(name, controller, strict): # type: (str, bool, bool) -> t.List[str]
+ """Return a list of remote instance Python versions supported by the specified host config."""
+ platform_config = filter_completion(REMOTE_COMPLETION).get(name)
+ available_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+
+ if not platform_config:
+ return [] if strict else available_pythons
+
+ supported_pythons = [python for python in platform_config.supported_pythons if python in available_pythons]
+
+ return supported_pythons
+
+
+def get_controller_pythons(controller_config, strict): # type: (HostConfig, bool) -> t.List[str]
+ """Return a list of controller Python versions supported by the specified host config."""
+ if isinstance(controller_config, DockerConfig):
+ pythons = get_docker_pythons(controller_config.name, False, strict)
+ elif isinstance(controller_config, PosixRemoteConfig):
+ pythons = get_remote_pythons(controller_config.name, False, strict)
+ else:
+ pythons = SUPPORTED_PYTHON_VERSIONS
+
+ return pythons
diff --git a/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
new file mode 100644
index 0000000000..3732263060
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/host_config_parsers.py
@@ -0,0 +1,310 @@
+"""Composite parsers for the various types of hosts."""
+from __future__ import annotations
+
+import typing as t
+
+from ...completion import (
+ DOCKER_COMPLETION,
+ NETWORK_COMPLETION,
+ REMOTE_COMPLETION,
+ WINDOWS_COMPLETION,
+ filter_completion,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ..compat import (
+ get_fallback_remote_controller,
+)
+
+from ..argparsing.parsers import (
+ ChoicesParser,
+ DocumentationState,
+ FileParser,
+ MatchConditions,
+ NamespaceWrappedParser,
+ PairParser,
+ Parser,
+ ParserError,
+ ParserState,
+)
+
+from .value_parsers import (
+ PlatformParser,
+ SshConnectionParser,
+)
+
+from .key_value_parsers import (
+ ControllerKeyValueParser,
+ DockerKeyValueParser,
+ EmptyKeyValueParser,
+ NetworkRemoteKeyValueParser,
+ OriginKeyValueParser,
+ PosixRemoteKeyValueParser,
+ PosixSshKeyValueParser,
+ WindowsRemoteKeyValueParser,
+)
+
+from .helpers import (
+ get_docker_pythons,
+ get_remote_pythons,
+)
+
+
+class OriginParser(Parser):
+ """Composite argument parser for the origin."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = OriginConfig()
+
+ state.set_namespace(namespace)
+
+ parser = OriginKeyValueParser()
+ parser.parse(state)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return OriginKeyValueParser().document(state)
+
+
+class ControllerParser(Parser):
+ """Composite argument parser for the controller."""
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = ControllerConfig()
+
+ state.set_namespace(namespace)
+
+ parser = ControllerKeyValueParser()
+ parser.parse(state)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return ControllerKeyValueParser().document(state)
+
+
+class DockerParser(PairParser):
+ """Composite argument parser for a docker host."""
+ def __init__(self, controller): # type: (bool) -> None
+ self.controller = controller
+
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return DockerConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('name', ChoicesParser(list(filter_completion(DOCKER_COMPLETION, controller_only=self.controller)),
+ conditions=MatchConditions.CHOICE | MatchConditions.ANY))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return DockerKeyValueParser(choice, self.controller)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state) # type: DockerConfig
+
+ if not value.python and not get_docker_pythons(value.name, self.controller, True):
+ raise ParserError(f'Python version required for docker image: {value.name}')
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ default = 'default'
+ content = '\n'.join([f' {image} ({", ".join(get_docker_pythons(image, self.controller, False))})'
+ for image, item in filter_completion(DOCKER_COMPLETION, controller_only=self.controller).items()])
+
+ content += '\n'.join([
+ '',
+ ' {image} # python must be specified for custom images',
+ ])
+
+ state.sections[f'{"controller" if self.controller else "target"} docker images and supported python version (choose one):'] = content
+
+ return f'{{image}}[,{DockerKeyValueParser(default, self.controller).document(state)}]'
+
+
+class PosixRemoteParser(PairParser):
+ """Composite argument parser for a POSIX remote host."""
+ def __init__(self, controller): # type: (bool) -> None
+ self.controller = controller
+
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return PosixRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('name', PlatformParser(list(filter_completion(REMOTE_COMPLETION, controller_only=self.controller))))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return PosixRemoteKeyValueParser(choice, self.controller)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state) # type: PosixRemoteConfig
+
+ if not value.python and not get_remote_pythons(value.name, self.controller, True):
+ raise ParserError(f'Python version required for remote: {value.name}')
+
+ return value
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ default = get_fallback_remote_controller()
+ content = '\n'.join([f' {name} ({", ".join(get_remote_pythons(name, self.controller, False))})'
+ for name, item in filter_completion(REMOTE_COMPLETION, controller_only=self.controller).items()])
+
+ content += '\n'.join([
+ '',
+ ' {platform}/{version} # python must be specified for unknown systems',
+ ])
+
+ state.sections[f'{"controller" if self.controller else "target"} remote systems and supported python versions (choose one):'] = content
+
+ return f'{{system}}[,{PosixRemoteKeyValueParser(default, self.controller).document(state)}]'
+
+
+class WindowsRemoteParser(PairParser):
+ """Composite argument parser for a Windows remote host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return WindowsRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ names = list(filter_completion(WINDOWS_COMPLETION))
+
+ for target in state.root_namespace.targets or []: # type: WindowsRemoteConfig
+ names.remove(target.name)
+
+ return NamespaceWrappedParser('name', PlatformParser(names))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return WindowsRemoteKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ content = '\n'.join([f' {name}' for name, item in filter_completion(WINDOWS_COMPLETION).items()])
+
+ content += '\n'.join([
+ '',
+ ' windows/{version} # use an unknown windows version',
+ ])
+
+ state.sections['target remote systems (choose one):'] = content
+
+ return f'{{system}}[,{WindowsRemoteKeyValueParser().document(state)}]'
+
+
+class NetworkRemoteParser(PairParser):
+ """Composite argument parser for a network remote host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return NetworkRemoteConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ names = list(filter_completion(NETWORK_COMPLETION))
+
+ for target in state.root_namespace.targets or []: # type: NetworkRemoteConfig
+ names.remove(target.name)
+
+ return NamespaceWrappedParser('name', PlatformParser(names))
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return NetworkRemoteKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ content = '\n'.join([f' {name}' for name, item in filter_completion(NETWORK_COMPLETION).items()])
+
+ content += '\n'.join([
+ '',
+ ' {platform}/{version} # use an unknown platform and version',
+ ])
+
+ state.sections['target remote systems (choose one):'] = content
+
+ return f'{{system}}[,{NetworkRemoteKeyValueParser().document(state)}]'
+
+
+class WindowsInventoryParser(PairParser):
+ """Composite argument parser for a Windows inventory."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return WindowsInventoryConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('path', FileParser())
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return EmptyKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{path} # INI format inventory file'
+
+
+class NetworkInventoryParser(PairParser):
+ """Composite argument parser for a network inventory."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return NetworkInventoryConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return NamespaceWrappedParser('path', FileParser())
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return EmptyKeyValueParser()
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return '{path} # INI format inventory file'
+
+
+class PosixSshParser(PairParser):
+ """Composite argument parser for a POSIX SSH host."""
+ def create_namespace(self): # type: () -> t.Any
+ """Create and return a namespace."""
+ return PosixSshConfig()
+
+ def get_left_parser(self, state): # type: (ParserState) -> Parser
+ """Return the parser for the left side."""
+ return SshConnectionParser()
+
+ def get_right_parser(self, choice): # type: (t.Any) -> Parser
+ """Return the parser for the right side."""
+ return PosixSshKeyValueParser()
+
+ @property
+ def required(self): # type: () -> bool
+ """True if the delimiter (and thus right parser) is required, otherwise False."""
+ return True
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return f'{SshConnectionParser().document(state)}[,{PosixSshKeyValueParser().document(state)}]'
diff --git a/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
new file mode 100644
index 0000000000..b22705f731
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/key_value_parsers.py
@@ -0,0 +1,213 @@
+"""Composite argument key-value parsers used by other parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_PROVIDERS,
+ SECCOMP_CHOICES,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...host_configs import (
+ OriginConfig,
+)
+
+from ..argparsing.parsers import (
+ AnyParser,
+ BooleanParser,
+ ChoicesParser,
+ DocumentationState,
+ IntegerParser,
+ KeyValueParser,
+ Parser,
+ ParserState,
+)
+
+from .value_parsers import (
+ PythonParser,
+)
+
+from .helpers import (
+ get_controller_pythons,
+ get_remote_pythons,
+ get_docker_pythons,
+)
+
+
+class OriginKeyValueParser(KeyValueParser):
+ """Composite argument parser for origin key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ versions = CONTROLLER_PYTHON_VERSIONS
+
+ return dict(
+ python=PythonParser(versions=versions, allow_venv=True, allow_default=True),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=CONTROLLER_PYTHON_VERSIONS, allow_venv=True, allow_default=True)
+
+ section_name = 'origin options'
+
+ state.sections[f'controller {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}} # default'
+
+
+class ControllerKeyValueParser(KeyValueParser):
+ """Composite argument parser for controller key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ versions = get_controller_pythons(state.root_namespace.controller, False)
+ allow_default = bool(get_controller_pythons(state.root_namespace.controller, True))
+ allow_venv = isinstance(state.root_namespace.controller, OriginConfig) or not state.root_namespace.controller
+
+ return dict(
+ python=PythonParser(versions=versions, allow_venv=allow_venv, allow_default=allow_default),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'controller options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' python={PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=True).document(state)} # non-origin controller',
+ f' python={PythonParser(SUPPORTED_PYTHON_VERSIONS, allow_venv=True, allow_default=True).document(state)} # origin controller',
+ ])
+
+ return f'{{{section_name}}} # default'
+
+
+class DockerKeyValueParser(KeyValueParser):
+ """Composite argument parser for docker key/value pairs."""
+ def __init__(self, image, controller):
+ self.controller = controller
+ self.versions = get_docker_pythons(image, controller, False)
+ self.allow_default = bool(get_docker_pythons(image, controller, True))
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
+ seccomp=ChoicesParser(SECCOMP_CHOICES),
+ privileged=BooleanParser(),
+ memory=IntegerParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
+
+ section_name = 'docker options'
+
+ state.sections[f'{"controller" if self.controller else "target"} {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ f' seccomp={ChoicesParser(SECCOMP_CHOICES).document(state)}',
+ f' privileged={BooleanParser().document(state)}',
+ f' memory={IntegerParser().document(state)} # bytes',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class PosixRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for POSIX remote key/value pairs."""
+ def __init__(self, name, controller):
+ self.controller = controller
+ self.versions = get_remote_pythons(name, controller, False)
+ self.allow_default = bool(get_remote_pythons(name, controller, True))
+
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ python=PythonParser(versions=self.versions, allow_venv=False, allow_default=self.allow_default),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=[], allow_venv=False, allow_default=self.allow_default)
+
+ section_name = 'remote options'
+
+ state.sections[f'{"controller" if self.controller else "target"} {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class WindowsRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for Windows remote key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'remote options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class NetworkRemoteKeyValueParser(KeyValueParser):
+ """Composite argument parser for network remote key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ provider=ChoicesParser(REMOTE_PROVIDERS),
+ collection=AnyParser(),
+ connection=AnyParser(),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ section_name = 'remote options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' provider={ChoicesParser(REMOTE_PROVIDERS).document(state)}',
+ ' collection={collecton}',
+ ' connection={connection}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class PosixSshKeyValueParser(KeyValueParser):
+ """Composite argument parser for POSIX SSH host key/value pairs."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return dict(
+ python=PythonParser(versions=list(SUPPORTED_PYTHON_VERSIONS), allow_venv=False, allow_default=False),
+ )
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ python_parser = PythonParser(versions=SUPPORTED_PYTHON_VERSIONS, allow_venv=False, allow_default=False)
+
+ section_name = 'ssh options'
+
+ state.sections[f'target {section_name} (comma separated):'] = '\n'.join([
+ f' python={python_parser.document(state)}',
+ ])
+
+ return f'{{{section_name}}}'
+
+
+class EmptyKeyValueParser(KeyValueParser):
+ """Composite argument parser when a key/value parser is required but there are no keys available."""
+ def get_parsers(self, state): # type: (ParserState) -> t.Dict[str, Parser]
+ """Return a dictionary of key names and value parsers."""
+ return {}
diff --git a/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
new file mode 100644
index 0000000000..1aae88216f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/cli/parsers/value_parsers.py
@@ -0,0 +1,172 @@
+"""Composite argument value parsers used by other parsers."""
+from __future__ import annotations
+
+import typing as t
+
+from ...host_configs import (
+ NativePythonConfig,
+ VirtualPythonConfig,
+)
+
+from ..argparsing.parsers import (
+ AbsolutePathParser,
+ AnyParser,
+ ChoicesParser,
+ DocumentationState,
+ IntegerParser,
+ MatchConditions,
+ Parser,
+ ParserError,
+ ParserState,
+)
+
+
+class PythonParser(Parser):
+ """
+ Composite argument parser for Python versions, with support for specifying paths and using virtual environments.
+
+ Allowed formats:
+
+ {version}
+ venv/{version}
+ venv/system-site-packages/{version}
+
+ The `{version}` has two possible formats:
+
+ X.Y
+ X.Y@{path}
+
+ Where `X.Y` is the Python major and minor version number and `{path}` is an absolute path with one of the following formats:
+
+ /path/to/python
+ /path/to/python/directory/
+
+ When a trailing slash is present, it is considered a directory, and `python{version}` will be appended to it automatically.
+
+ The default path depends on the context:
+
+ - Known docker/remote environments can declare their own path.
+ - The origin host uses `sys.executable` if `{version}` matches the current version in `sys.version_info`.
+ - The origin host (as a controller or target) use the `$PATH` environment variable to find `python{version}`.
+ - As a fallback/default, the path `/usr/bin/python{version}` is used.
+
+ NOTE: The Python path determines where to find the Python interpreter.
+ In the case of an ansible-test managed virtual environment, that Python interpreter will be used to create the virtual environment.
+ So the path given will not be the one actually used for the controller or target.
+
+ Known docker/remote environments limit the available Python versions to configured values known to be valid.
+ The origin host and unknown environments assume all relevant Python versions are available.
+ """
+ def __init__(self,
+ versions, # type: t.List[str]
+ *,
+ allow_default, # type: bool
+ allow_venv, # type: bool
+ ):
+ version_choices = list(versions)
+
+ if allow_default:
+ version_choices.append('default')
+
+ first_choices = list(version_choices)
+
+ if allow_venv:
+ first_choices.append('venv/')
+
+ venv_choices = list(version_choices) + ['system-site-packages/']
+
+ self.versions = versions
+ self.allow_default = allow_default
+ self.allow_venv = allow_venv
+ self.version_choices = version_choices
+ self.first_choices = first_choices
+ self.venv_choices = venv_choices
+ self.venv_choices = venv_choices
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ with state.delimit('@/', required=False) as boundary:
+ version = ChoicesParser(self.first_choices).parse(state)
+
+ if version == 'venv':
+ with state.delimit('@/', required=False) as boundary:
+ version = ChoicesParser(self.venv_choices).parse(state)
+
+ if version == 'system-site-packages':
+ system_site_packages = True
+
+ with state.delimit('@', required=False) as boundary:
+ version = ChoicesParser(self.version_choices).parse(state)
+ else:
+ system_site_packages = False
+
+ python = VirtualPythonConfig(version=version, system_site_packages=system_site_packages)
+ else:
+ python = NativePythonConfig(version=version)
+
+ if boundary.match == '@':
+ # FUTURE: For OriginConfig or ControllerConfig->OriginConfig the path could be validated with an absolute path parser (file or directory).
+ python.path = AbsolutePathParser().parse(state)
+
+ return python
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+
+ docs = '[venv/[system-site-packages/]]' if self.allow_venv else ''
+
+ if self.versions:
+ docs += '|'.join(self.version_choices)
+ else:
+ docs += '{X.Y}'
+
+ docs += '[@{path|dir/}]'
+
+ return docs
+
+
+class PlatformParser(ChoicesParser):
+ """Composite argument parser for "{platform}/{version}" formatted choices."""
+ def __init__(self, choices): # type: (t.List[str]) -> None
+ super().__init__(choices, conditions=MatchConditions.CHOICE | MatchConditions.ANY)
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ value = super().parse(state)
+
+ if len(value.split('/')) != 2:
+ raise ParserError(f'invalid platform format: {value}')
+
+ return value
+
+
+class SshConnectionParser(Parser):
+ """
+ Composite argument parser for connecting to a host using SSH.
+ Format: user@host[:port]
+ """
+ EXPECTED_FORMAT = '{user}@{host}[:{port}]'
+
+ def parse(self, state): # type: (ParserState) -> t.Any
+ """Parse the input from the given state and return the result."""
+ namespace = state.current_namespace
+
+ with state.delimit('@'):
+ user = AnyParser(no_match_message=f'Expected {{user}} from: {self.EXPECTED_FORMAT}').parse(state)
+
+ setattr(namespace, 'user', user)
+
+ with state.delimit(':', required=False) as colon:
+ host = AnyParser(no_match_message=f'Expected {{host}} from: {self.EXPECTED_FORMAT}').parse(state)
+
+ setattr(namespace, 'host', host)
+
+ if colon.match:
+ port = IntegerParser(65535).parse(state)
+ setattr(namespace, 'port', port)
+
+ return namespace
+
+ def document(self, state): # type: (DocumentationState) -> t.Optional[str]
+ """Generate and return documentation for this parser."""
+ return self.EXPECTED_FORMAT
diff --git a/test/lib/ansible_test/_internal/commands/__init__.py b/test/lib/ansible_test/_internal/commands/__init__.py
index e69de29bb2..e9cb68168d 100644
--- a/test/lib/ansible_test/_internal/commands/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/__init__.py
@@ -0,0 +1,2 @@
+"""Nearly empty __init__.py to keep pylint happy."""
+from __future__ import annotations
diff --git a/test/lib/ansible_test/_internal/commands/coverage/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
index 940dd2e325..50bc82632f 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/__init__.py
@@ -1,12 +1,14 @@
"""Common logic for the coverage subcommand."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import errno
import os
import re
+import typing as t
-from ... import types as t
+from ...constants import (
+ COVERAGE_REQUIRED_VERSION,
+)
from ...encoding import (
to_bytes,
@@ -25,7 +27,7 @@ from ...util import (
)
from ...util_common import (
- intercept_command,
+ intercept_python,
ResultType,
)
@@ -33,9 +35,8 @@ from ...config import (
EnvironmentConfig,
)
-from ...executor import (
- Delegate,
- install_command_requirements,
+from ...python_requirements import (
+ install_requirements,
)
from ... target import (
@@ -46,6 +47,14 @@ from ...data import (
data_context,
)
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...provisioning import (
+ HostState,
+)
+
if t.TYPE_CHECKING:
import coverage as coverage_module
@@ -57,16 +66,13 @@ COVERAGE_OUTPUT_FILE_NAME = 'coverage'
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageConfig, self).__init__(args, 'coverage')
+ super().__init__(args, 'coverage')
-def initialize_coverage(args): # type: (CoverageConfig) -> coverage_module
+def initialize_coverage(args, host_state): # type: (CoverageConfig, HostState) -> coverage_module
"""Delegate execution if requested, install requirements, then import and return the coverage module. Raises an exception if coverage is not available."""
- if args.delegate:
- raise Delegate()
-
- if args.requirements:
- install_command_requirements(args)
+ configure_pypi_proxy(args, host_state.controller_profile) # coverage
+ install_requirements(args, host_state.controller_profile.python, coverage=True) # coverage
try:
import coverage
@@ -74,35 +80,22 @@ def initialize_coverage(args): # type: (CoverageConfig) -> coverage_module
coverage = None
if not coverage:
- raise ApplicationError('You must install the "coverage" python module to use this command.')
-
- coverage_version_string = coverage.__version__
- coverage_version = tuple(int(v) for v in coverage_version_string.split('.'))
-
- min_version = (4, 2)
- max_version = (5, 0)
-
- supported_version = True
- recommended_version = '4.5.4'
-
- if coverage_version < min_version or coverage_version >= max_version:
- supported_version = False
+ raise ApplicationError(f'Version {COVERAGE_REQUIRED_VERSION} of the Python "coverage" module must be installed to use this command.')
- if not supported_version:
- raise ApplicationError('Version %s of "coverage" is not supported. Version %s is known to work and is recommended.' % (
- coverage_version_string, recommended_version))
+ if coverage.__version__ != COVERAGE_REQUIRED_VERSION:
+ raise ApplicationError(f'Version {COVERAGE_REQUIRED_VERSION} of the Python "coverage" module is required. Version {coverage.__version__} was found.')
return coverage
-def run_coverage(args, output_file, command, cmd): # type: (CoverageConfig, str, str, t.List[str]) -> None
+def run_coverage(args, host_state, output_file, command, cmd): # type: (CoverageConfig, HostState, str, str, t.List[str]) -> None
"""Run the coverage cli tool with the specified options."""
env = common_environment()
env.update(dict(COVERAGE_FILE=output_file))
cmd = ['python', '-m', 'coverage.__main__', command, '--rcfile', COVERAGE_CONFIG_PATH] + cmd
- intercept_command(args, target_name='coverage', env=env, cmd=cmd, disable_coverage=True)
+ intercept_python(args, host_state.controller_profile.python, cmd, env)
def get_all_coverage_files(): # type: () -> t.List[str]
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
index 103164494f..db169fd7a0 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/__init__.py
@@ -1,8 +1,6 @@
"""Common logic for the `coverage analyze` subcommand."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from .... import types as t
+from __future__ import annotations
+import typing as t
from .. import (
CoverageConfig,
@@ -12,7 +10,7 @@ from .. import (
class CoverageAnalyzeConfig(CoverageConfig):
"""Configuration for the `coverage analyze` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeConfig, self).__init__(args)
+ super().__init__(args)
# avoid mixing log messages with file output when using `/dev/stdout` for the output file on commands
# this may be worth considering as the default behavior in the future, instead of being dependent on the command or options used
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
index ec0bef78cd..a39d12c825 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/__init__.py
@@ -1,10 +1,8 @@
"""Analyze integration test target code coverage."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ..... import types as t
+import typing as t
from .....io import (
read_json_file,
@@ -33,7 +31,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsConfig(CoverageAnalyzeConfig):
"""Configuration for the `coverage analyze targets` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsConfig, self).__init__(args)
+ super().__init__(args)
self.info_stderr = True
@@ -121,7 +119,7 @@ def get_target_index(name, target_indexes): # type: (str, TargetIndexes) -> int
def expand_indexes(
source_data, # type: IndexedPoints
source_index, # type: t.List[str]
- format_func, # type: t.Callable[t.Tuple[t.Any], str]
+ format_func, # type: t.Callable[[TargetKey], str]
): # type: (...) -> NamedPoints
"""Expand indexes from the source into target names for easier processing of the data (arcs or lines)."""
combined_data = {} # type: t.Dict[str, t.Dict[t.Any, t.Set[str]]]
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
index 7d9f18d433..d68edc02b2 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/combine.py
@@ -1,8 +1,14 @@
"""Combine integration test target code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import typing as t
-from ..... import types as t
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
from . import (
CoverageAnalyzeTargetsConfig,
@@ -24,7 +30,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets combine` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsCombineConfig, self).__init__(args)
+ super().__init__(args)
self.input_files = args.input_file # type: t.List[str]
self.output_file = args.output_file # type: str
@@ -32,6 +38,11 @@ class CoverageAnalyzeTargetsCombineConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_combine(args): # type: (CoverageAnalyzeTargetsCombineConfig) -> None
"""Combine integration test target code coverage reports."""
+ host_state = prepare_profiles(args) # coverage analyze targets combine
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
combined_target_indexes = {} # type: TargetIndexes
combined_path_arcs = {} # type: Arcs
combined_path_lines = {} # type: Lines
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
index 491650d8b3..6ca6e6d33a 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/expand.py
@@ -1,14 +1,20 @@
"""Expand target names in an aggregated coverage file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from ..... import types as t
+from __future__ import annotations
+import typing as t
from .....io import (
SortedSetEncoder,
write_json_file,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
+
from . import (
CoverageAnalyzeTargetsConfig,
expand_indexes,
@@ -20,7 +26,7 @@ from . import (
class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets expand` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsExpandConfig, self).__init__(args)
+ super().__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
@@ -28,6 +34,11 @@ class CoverageAnalyzeTargetsExpandConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_expand(args): # type: (CoverageAnalyzeTargetsExpandConfig) -> None
"""Expand target names in an aggregated coverage file."""
+ host_state = prepare_profiles(args) # coverage analyze targets expand
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
report = dict(
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
index c7ea5810fe..e5d2f50003 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/filter.py
@@ -1,10 +1,16 @@
"""Filter an aggregated coverage file, keeping only the specified targets."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
+import typing as t
-from ..... import types as t
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
from . import (
CoverageAnalyzeTargetsConfig,
@@ -25,7 +31,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets filter` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsFilterConfig, self).__init__(args)
+ super().__init__(args)
self.input_file = args.input_file # type: str
self.output_file = args.output_file # type: str
@@ -37,6 +43,11 @@ class CoverageAnalyzeTargetsFilterConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTargetsFilterConfig) -> None
"""Filter target names in an aggregated coverage file."""
+ host_state = prepare_profiles(args) # coverage analyze targets filter
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
covered_targets, covered_path_arcs, covered_path_lines = read_report(args.input_file)
filtered_path_arcs = expand_indexes(covered_path_arcs, covered_targets, lambda v: v)
@@ -49,6 +60,7 @@ def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTarg
exclude_path = re.compile(args.exclude_path) if args.exclude_path else None
def path_filter_func(path):
+ """Return True if the given path should be included, otherwise return False."""
if include_path and not re.search(include_path, path):
return False
@@ -58,6 +70,7 @@ def command_coverage_analyze_targets_filter(args): # type: (CoverageAnalyzeTarg
return True
def target_filter_func(targets):
+ """Filter the given targets and return the result based on the defined includes and excludes."""
if include_targets:
targets &= include_targets
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
index d9f292785a..3f9bca74db 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/generate.py
@@ -1,10 +1,8 @@
"""Analyze code coverage data to determine which integration test targets provide coverage for each arc or line."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ..... import types as t
+import typing as t
from .....encoding import (
to_text,
@@ -18,6 +16,15 @@ from .....util_common import (
ResultType,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+ HostState,
+)
+
from ... import (
enumerate_powershell_lines,
enumerate_python_arcs,
@@ -47,7 +54,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets generate` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsGenerateConfig, self).__init__(args)
+ super().__init__(args)
self.input_dir = args.input_dir or ResultType.COVERAGE.path # type: str
self.output_file = args.output_file # type: str
@@ -55,9 +62,14 @@ class CoverageAnalyzeTargetsGenerateConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTargetsGenerateConfig) -> None
"""Analyze code coverage data to determine which integration test targets provide coverage for each arc or line."""
+ host_state = prepare_profiles(args) # coverage analyze targets generate
+
+ if args.delegate:
+ raise Delegate(host_state)
+
root = data_context().content.root
target_indexes = {}
- arcs = dict((os.path.relpath(path, root), data) for path, data in analyze_python_coverage(args, args.input_dir, target_indexes).items())
+ arcs = dict((os.path.relpath(path, root), data) for path, data in analyze_python_coverage(args, host_state, args.input_dir, target_indexes).items())
lines = dict((os.path.relpath(path, root), data) for path, data in analyze_powershell_coverage(args, args.input_dir, target_indexes).items())
report = make_report(target_indexes, arcs, lines)
write_report(args, report, args.output_file)
@@ -65,6 +77,7 @@ def command_coverage_analyze_targets_generate(args): # type: (CoverageAnalyzeTa
def analyze_python_coverage(
args, # type: CoverageAnalyzeTargetsGenerateConfig
+ host_state, # type: HostState
path, # type: str
target_indexes, # type: TargetIndexes
): # type: (...) -> Arcs
@@ -73,7 +86,7 @@ def analyze_python_coverage(
collection_search_re, collection_sub_re = get_collection_path_regexes()
modules = get_python_modules()
python_files = get_python_coverage_files(path)
- coverage = initialize_coverage(args)
+ coverage = initialize_coverage(args, host_state)
for python_file in python_files:
if not is_integration_coverage_file(python_file):
diff --git a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
index e0310ff227..9b6d696dbe 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/analyze/targets/missing.py
@@ -1,15 +1,21 @@
"""Identify aggregated coverage in one file missing from another."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ..... import types as t
+import typing as t
from .....encoding import (
to_bytes,
)
+from .....executor import (
+ Delegate,
+)
+
+from .....provisioning import (
+ prepare_profiles,
+)
+
from . import (
CoverageAnalyzeTargetsConfig,
get_target_index,
@@ -28,7 +34,7 @@ if t.TYPE_CHECKING:
class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
"""Configuration for the `coverage analyze targets missing` command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageAnalyzeTargetsMissingConfig, self).__init__(args)
+ super().__init__(args)
self.from_file = args.from_file # type: str
self.to_file = args.to_file # type: str
@@ -40,6 +46,11 @@ class CoverageAnalyzeTargetsMissingConfig(CoverageAnalyzeTargetsConfig):
def command_coverage_analyze_targets_missing(args): # type: (CoverageAnalyzeTargetsMissingConfig) -> None
"""Identify aggregated coverage in one file missing from another."""
+ host_state = prepare_profiles(args) # coverage analyze targets missing
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
from_targets, from_path_arcs, from_path_lines = read_report(args.from_file)
to_targets, to_path_arcs, to_path_lines = read_report(args.to_file)
target_indexes = {}
diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py
index caa4fa92e7..b96e2ba05a 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py
@@ -1,11 +1,9 @@
"""Combine code coverage files."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import json
-
-from ... import types as t
+import typing as t
from ...target import (
walk_compile_targets,
@@ -37,6 +35,16 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ DockerConfig,
+ RemoteConfig,
+)
+
+from ...provisioning import (
+ HostState,
+ prepare_profiles,
+)
+
from . import (
enumerate_python_arcs,
enumerate_powershell_lines,
@@ -58,8 +66,13 @@ def command_coverage_combine(args):
:type args: CoverageCombineConfig
:rtype: list[str]
"""
+ host_state = prepare_profiles(args) # coverage combine
+
if args.delegate:
- if args.docker or args.remote:
+ raise Delegate(host_state)
+
+ if args.delegate:
+ if isinstance(args.controller, (DockerConfig, RemoteConfig)):
paths = get_all_coverage_files()
exported_paths = [path for path in paths if os.path.basename(path).split('=')[-1].split('.')[:2] == ['coverage', 'combined']]
@@ -75,9 +88,9 @@ def command_coverage_combine(args):
data_context().register_payload_callback(coverage_callback)
- raise Delegate()
+ raise Delegate(host_state=host_state)
- paths = _command_coverage_combine_powershell(args) + _command_coverage_combine_python(args)
+ paths = _command_coverage_combine_powershell(args) + _command_coverage_combine_python(args, host_state)
for path in paths:
display.info('Generated combined output: %s' % path, verbosity=1)
@@ -88,18 +101,15 @@ def command_coverage_combine(args):
class ExportedCoverageDataNotFound(ApplicationError):
"""Exception when no combined coverage data is present yet is required."""
def __init__(self):
- super(ExportedCoverageDataNotFound, self).__init__(
+ super().__init__(
'Coverage data must be exported before processing with the `--docker` or `--remote` option.\n'
'Export coverage with `ansible-test coverage combine` using the `--export` option.\n'
'The exported files must be in the directory: %s/' % ResultType.COVERAGE.relative_path)
-def _command_coverage_combine_python(args):
- """
- :type args: CoverageCombineConfig
- :rtype: list[str]
- """
- coverage = initialize_coverage(args)
+def _command_coverage_combine_python(args, host_state): # type: (CoverageCombineConfig, HostState) -> t.List[str]
+ """Combine Python coverage files and return a list of the output files."""
+ coverage = initialize_coverage(args, host_state)
modules = get_python_modules()
@@ -350,7 +360,7 @@ def get_coverage_group(args, coverage_file):
class CoverageCombineConfig(CoverageConfig):
"""Configuration for the coverage combine command."""
def __init__(self, args): # type: (t.Any) -> None
- super(CoverageCombineConfig, self).__init__(args)
+ super().__init__(args)
self.group_by = frozenset(args.group_by) if args.group_by else frozenset() # type: t.FrozenSet[str]
self.all = args.all # type: bool
diff --git a/test/lib/ansible_test/_internal/commands/coverage/erase.py b/test/lib/ansible_test/_internal/commands/coverage/erase.py
index 7a41f56f47..9a459a38a5 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/erase.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/erase.py
@@ -1,6 +1,5 @@
"""Erase code coverage files."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -8,6 +7,14 @@ from ...util_common import (
ResultType,
)
+from ...executor import (
+ Delegate,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
from . import (
CoverageConfig,
)
@@ -15,6 +22,11 @@ from . import (
def command_coverage_erase(args): # type: (CoverageEraseConfig) -> None
"""Erase code coverage data files collected during test runs."""
+ host_state = prepare_profiles(args) # coverage erase
+
+ if args.delegate:
+ raise Delegate(host_state=host_state)
+
coverage_dir = ResultType.COVERAGE.path
for name in os.listdir(coverage_dir):
diff --git a/test/lib/ansible_test/_internal/commands/coverage/html.py b/test/lib/ansible_test/_internal/commands/coverage/html.py
index b34e1ef4ec..c4053a631a 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/html.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/html.py
@@ -1,6 +1,5 @@
"""Generate HTML code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -16,6 +15,10 @@ from ...util_common import (
ResultType,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
command_coverage_combine,
CoverageCombineConfig,
@@ -30,6 +33,7 @@ def command_coverage_html(args):
"""
:type args: CoverageHtmlConfig
"""
+ host_state = prepare_profiles(args) # coverage html
output_files = command_coverage_combine(args)
for output_file in output_files:
@@ -40,7 +44,7 @@ def command_coverage_html(args):
dir_name = os.path.join(ResultType.REPORTS.path, os.path.basename(output_file))
make_dirs(dir_name)
- run_coverage(args, output_file, 'html', ['-i', '-d', dir_name])
+ run_coverage(args, host_state, output_file, 'html', ['-i', '-d', dir_name])
display.info('HTML report generated: file:///%s' % os.path.join(dir_name, 'index.html'))
diff --git a/test/lib/ansible_test/_internal/commands/coverage/report.py b/test/lib/ansible_test/_internal/commands/coverage/report.py
index 498d543403..d5a6ecd8f9 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/report.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/report.py
@@ -1,6 +1,5 @@
"""Generate console code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -16,6 +15,10 @@ from ...data import (
data_context,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
command_coverage_combine,
CoverageCombineConfig,
@@ -30,6 +33,7 @@ def command_coverage_report(args):
"""
:type args: CoverageReportConfig
"""
+ host_state = prepare_profiles(args) # coverage report
output_files = command_coverage_combine(args)
for output_file in output_files:
@@ -50,7 +54,7 @@ def command_coverage_report(args):
if args.omit:
options.extend(['--omit', args.omit])
- run_coverage(args, output_file, 'report', options)
+ run_coverage(args, host_state, output_file, 'report', options)
def _generate_powershell_output_report(args, coverage_file):
@@ -149,7 +153,7 @@ class CoverageReportConfig(CoverageCombineConfig):
"""
:type args: any
"""
- super(CoverageReportConfig, self).__init__(args)
+ super().__init__(args)
self.show_missing = args.show_missing # type: bool
self.include = args.include # type: str
diff --git a/test/lib/ansible_test/_internal/commands/coverage/xml.py b/test/lib/ansible_test/_internal/commands/coverage/xml.py
index 2296ef61c2..6938924029 100644
--- a/test/lib/ansible_test/_internal/commands/coverage/xml.py
+++ b/test/lib/ansible_test/_internal/commands/coverage/xml.py
@@ -1,6 +1,5 @@
"""Generate XML code coverage reports."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import time
@@ -34,6 +33,10 @@ from ...data import (
data_context,
)
+from ...provisioning import (
+ prepare_profiles,
+)
+
from .combine import (
command_coverage_combine,
CoverageCombineConfig,
@@ -48,6 +51,7 @@ def command_coverage_xml(args):
"""
:type args: CoverageXmlConfig
"""
+ host_state = prepare_profiles(args) # coverage xml
output_files = command_coverage_combine(args)
for output_file in output_files:
@@ -63,7 +67,7 @@ def command_coverage_xml(args):
else:
xml_path = os.path.join(ResultType.REPORTS.path, xml_name)
make_dirs(ResultType.REPORTS.path)
- run_coverage(args, output_file, 'xml', ['-i', '-o', xml_path])
+ run_coverage(args, host_state, output_file, 'xml', ['-i', '-o', xml_path])
def _generate_powershell_xml(coverage_file):
diff --git a/test/lib/ansible_test/_internal/commands/env/__init__.py b/test/lib/ansible_test/_internal/commands/env/__init__.py
index ad13ab4f10..c625209c84 100644
--- a/test/lib/ansible_test/_internal/commands/env/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/env/__init__.py
@@ -1,29 +1,23 @@
"""Show information about the test environment."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
-import functools
import os
import platform
-import signal
import sys
-import time
+import typing as t
from ...config import (
CommonConfig,
- TestConfig,
)
from ...io import (
write_json_file,
- read_json_file,
)
from ...util import (
display,
SubprocessError,
- ApplicationError,
get_ansible_version,
get_available_python_versions,
)
@@ -40,30 +34,19 @@ from ...docker_util import (
docker_version
)
-from ...thread import (
- WrappedThread,
-)
-
from ...constants import (
TIMEOUT_PATH,
)
-from ...test import (
- TestTimeout,
-)
-
from ...ci import (
get_ci_provider,
)
class EnvConfig(CommonConfig):
- """Configuration for the tools command."""
- def __init__(self, args):
- """
- :type args: any
- """
- super(EnvConfig, self).__init__(args, 'env')
+ """Configuration for the `env` command."""
+ def __init__(self, args): # type: (t.Any) -> None
+ super().__init__(args, 'env')
self.show = args.show
self.dump = args.dump
@@ -75,19 +58,15 @@ class EnvConfig(CommonConfig):
self.show = True
-def command_env(args):
- """
- :type args: EnvConfig
- """
+def command_env(args): # type: (EnvConfig) -> None
+ """Entry point for the `env` command."""
show_dump_env(args)
list_files_env(args)
set_timeout(args)
-def show_dump_env(args):
- """
- :type args: EnvConfig
- """
+def show_dump_env(args): # type: (EnvConfig) -> None
+ """Show information about the current environment and/or write the information to disk."""
if not args.show and not args.dump:
return
@@ -137,10 +116,8 @@ def list_files_env(args): # type: (EnvConfig) -> None
display.info(path)
-def set_timeout(args):
- """
- :type args: EnvConfig
- """
+def set_timeout(args): # type: (EnvConfig) -> None
+ """Set an execution timeout for subsequent ansible-test invocations."""
if args.timeout is None:
return
@@ -167,79 +144,8 @@ def set_timeout(args):
os.remove(TIMEOUT_PATH)
-def get_timeout():
- """
- :rtype: dict[str, any] | None
- """
- if not os.path.exists(TIMEOUT_PATH):
- return None
-
- data = read_json_file(TIMEOUT_PATH)
- data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
-
- return data
-
-
-def configure_timeout(args):
- """
- :type args: CommonConfig
- """
- if isinstance(args, TestConfig):
- configure_test_timeout(args) # only tests are subject to the timeout
-
-
-def configure_test_timeout(args):
- """
- :type args: TestConfig
- """
- timeout = get_timeout()
-
- if not timeout:
- return
-
- timeout_start = datetime.datetime.utcnow()
- timeout_duration = timeout['duration']
- timeout_deadline = timeout['deadline']
- timeout_remaining = timeout_deadline - timeout_start
-
- test_timeout = TestTimeout(timeout_duration)
-
- if timeout_remaining <= datetime.timedelta():
- test_timeout.write(args)
-
- raise ApplicationError('The %d minute test timeout expired %s ago at %s.' % (
- timeout_duration, timeout_remaining * -1, timeout_deadline))
-
- display.info('The %d minute test timeout expires in %s at %s.' % (
- timeout_duration, timeout_remaining, timeout_deadline), verbosity=1)
-
- def timeout_handler(_dummy1, _dummy2):
- """Runs when SIGUSR1 is received."""
- test_timeout.write(args)
-
- raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration)
-
- def timeout_waiter(timeout_seconds):
- """
- :type timeout_seconds: int
- """
- time.sleep(timeout_seconds)
- os.kill(os.getpid(), signal.SIGUSR1)
-
- signal.signal(signal.SIGUSR1, timeout_handler)
-
- instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.seconds))
- instance.daemon = True
- instance.start()
-
-
-def show_dict(data, verbose, root_verbosity=0, path=None):
- """
- :type data: dict[str, any]
- :type verbose: dict[str, int]
- :type root_verbosity: int
- :type path: list[str] | None
- """
+def show_dict(data, verbose, root_verbosity=0, path=None): # type: (t.Dict[str, t.Any], t.Dict[str, int], int, t.Optional[t.List[str]]) -> None
+ """Show a dict with varying levels of verbosity."""
path = path if path else []
for key, value in sorted(data.items()):
@@ -260,11 +166,8 @@ def show_dict(data, verbose, root_verbosity=0, path=None):
display.info(indent + '%s: %s' % (key, value), verbosity=verbosity)
-def get_docker_details(args):
- """
- :type args: CommonConfig
- :rtype: dict[str, any]
- """
+def get_docker_details(args): # type: (EnvConfig) -> t.Dict[str, str]
+ """Return details about docker."""
docker = get_docker_command()
executable = None
diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py
index f6e2721b60..e462872efb 100644
--- a/test/lib/ansible_test/_internal/commands/integration/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py
@@ -1,21 +1,15 @@
"""Ansible integration test infrastructure."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import contextlib
import datetime
-import difflib
-import filecmp
import json
import os
-import random
import re
import shutil
-import string
import tempfile
import time
-
-from ... import types as t
+import typing as t
from ...encoding import (
to_bytes,
@@ -23,15 +17,17 @@ from ...encoding import (
from ...ansible_util import (
ansible_environment,
- check_pyyaml,
)
from ...executor import (
- get_python_version,
get_changes_filter,
AllTargetsSkipped,
Delegate,
- install_command_requirements,
+ ListTargets,
+)
+
+from ...python_requirements import (
+ install_requirements,
)
from ...ci import (
@@ -44,6 +40,7 @@ from ...target import (
IntegrationTarget,
walk_internal_targets,
TIntegrationTarget,
+ IntegrationTargetType,
)
from ...config import (
@@ -56,39 +53,26 @@ from ...config import (
from ...io import (
make_dirs,
- write_text_file,
read_text_file,
- open_text_file,
)
from ...util import (
ApplicationError,
display,
- COVERAGE_CONFIG_NAME,
- MODE_DIRECTORY,
- MODE_DIRECTORY_WRITE,
- MODE_FILE,
SubprocessError,
remove_tree,
- find_executable,
- raw_command,
- ANSIBLE_TEST_TOOLS_ROOT,
- SUPPORTED_PYTHON_VERSIONS,
- get_hash,
)
from ...util_common import (
named_temporary_file,
ResultType,
- get_docker_completion,
- get_remote_completion,
- intercept_command,
run_command,
write_json_test_results,
+ check_pyyaml,
)
from ...coverage_util import (
- generate_coverage_config,
+ cover_python,
)
from ...cache import (
@@ -107,31 +91,42 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ OriginConfig,
+)
-def setup_common_temp_dir(args, path):
- """
- :type args: IntegrationConfig
- :type path: str
- """
- if args.explain:
- return
-
- os.mkdir(path)
- os.chmod(path, MODE_DIRECTORY)
+from ...host_profiles import (
+ ControllerProfile,
+ HostProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
- if args.coverage:
- coverage_config_path = os.path.join(path, COVERAGE_CONFIG_NAME)
+from ...provisioning import (
+ HostState,
+ prepare_profiles,
+)
- coverage_config = generate_coverage_config(args)
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
- write_text_file(coverage_config_path, coverage_config)
+from ...inventory import (
+ create_controller_inventory,
+ create_windows_inventory,
+ create_network_inventory,
+ create_posix_inventory,
+)
- os.chmod(coverage_config_path, MODE_FILE)
+from .filters import (
+ get_target_filter,
+)
- coverage_output_path = os.path.join(path, ResultType.COVERAGE.name)
+from .coverage import (
+ CoverageManager,
+)
- os.mkdir(coverage_output_path)
- os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
def generate_dependency_map(integration_targets):
@@ -186,7 +181,7 @@ def get_files_needed(target_dependencies):
def check_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> None
"""Check the given inventory for issues."""
- if args.docker or args.remote:
+ if not isinstance(args.controller, OriginConfig):
if os.path.exists(inventory_path):
inventory = read_text_file(inventory_path)
@@ -334,7 +329,7 @@ def integration_test_environment(args, target, inventory_path_src):
yield IntegrationEnvironment(integration_dir, targets_dir, inventory_path, ansible_config, vars_file)
finally:
if not args.explain:
- shutil.rmtree(temp_dir)
+ remove_tree(temp_dir)
@contextlib.contextmanager
@@ -367,302 +362,40 @@ def integration_test_config_file(args, env_config, integration_dir):
yield path
-def get_integration_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- if args.docker:
- return get_integration_docker_filter(args, targets)
-
- if args.remote:
- return get_integration_remote_filter(args, targets)
-
- return get_integration_local_filter(args, targets)
-
-
-def common_integration_filter(args, targets, exclude):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :type exclude: list[str]
- """
- override_disabled = set(target for target in args.include if target.startswith('disabled/'))
-
- if not args.allow_disabled:
- skip = 'disabled/'
- override = [target.name for target in targets if override_disabled & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-disabled or prefixing with "disabled/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_unsupported = set(target for target in args.include if target.startswith('unsupported/'))
-
- if not args.allow_unsupported:
- skip = 'unsupported/'
- override = [target.name for target in targets if override_unsupported & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-unsupported or prefixing with "unsupported/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_unstable = set(target for target in args.include if target.startswith('unstable/'))
-
- if args.allow_unstable_changed:
- override_unstable |= set(args.metadata.change_description.focused_targets or [])
-
- if not args.allow_unstable:
- skip = 'unstable/'
- override = [target.name for target in targets if override_unstable & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-unstable or prefixing with "unstable/": %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- # only skip a Windows test if using --windows and all the --windows versions are defined in the aliases as skip/windows/%s
- if isinstance(args, WindowsIntegrationConfig) and args.windows:
- all_skipped = []
- not_skipped = []
-
- for target in targets:
- if "skip/windows/" not in target.aliases:
- continue
-
- skip_valid = []
- skip_missing = []
- for version in args.windows:
- if "skip/windows/%s/" % version in target.aliases:
- skip_valid.append(version)
- else:
- skip_missing.append(version)
-
- if skip_missing and skip_valid:
- not_skipped.append((target.name, skip_valid, skip_missing))
- elif skip_valid:
- all_skipped.append(target.name)
-
- if all_skipped:
- exclude.extend(all_skipped)
- skip_aliases = ["skip/windows/%s/" % w for w in args.windows]
- display.warning('Excluding tests marked "%s" which are set to skip with --windows %s: %s'
- % ('", "'.join(skip_aliases), ', '.join(args.windows), ', '.join(all_skipped)))
-
- if not_skipped:
- for target, skip_valid, skip_missing in not_skipped:
- # warn when failing to skip due to lack of support for skipping only some versions
- display.warning('Including test "%s" which was marked to skip for --windows %s but not %s.'
- % (target, ', '.join(skip_valid), ', '.join(skip_missing)))
-
-
-def get_integration_local_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- if not args.allow_root and os.getuid() != 0:
- skip = 'needs/root/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require --allow-root or running as root: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- override_destructive = set(target for target in args.include if target.startswith('destructive/'))
-
- if not args.allow_destructive:
- skip = 'destructive/'
- override = [target.name for target in targets if override_destructive & set(target.aliases)]
- skipped = [target.name for target in targets if skip in target.aliases and target.name not in override]
- if skipped:
- exclude.extend(skipped)
- display.warning('Excluding tests marked "%s" which require --allow-destructive or prefixing with "destructive/" to run locally: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- exclude_targets_by_python_version(targets, args.python_version, exclude)
-
- return exclude
-
-
-def get_integration_docker_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- skip = 'skip/docker/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which cannot run under docker: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- if not args.docker_privileged:
- skip = 'needs/privileged/'
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
- % (skip.rstrip('/'), ', '.join(skipped)))
-
- python_version = get_python_version(args, get_docker_completion(), args.docker_raw)
-
- exclude_targets_by_python_version(targets, python_version, exclude)
-
- return exclude
-
-
-def get_integration_remote_filter(args, targets):
- """
- :type args: IntegrationConfig
- :type targets: tuple[IntegrationTarget]
- :rtype: list[str]
- """
- remote = args.parsed_remote
-
- exclude = []
-
- common_integration_filter(args, targets, exclude)
-
- skips = {
- 'skip/%s' % remote.platform: remote.platform,
- 'skip/%s/%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version),
- 'skip/%s%s' % (remote.platform, remote.version): '%s %s' % (remote.platform, remote.version), # legacy syntax, use above format
- }
-
- if remote.arch:
- skips.update({
- 'skip/%s/%s' % (remote.arch, remote.platform): '%s on %s' % (remote.platform, remote.arch),
- 'skip/%s/%s/%s' % (remote.arch, remote.platform, remote.version): '%s %s on %s' % (remote.platform, remote.version, remote.arch),
- })
-
- for skip, description in skips.items():
- skipped = [target.name for target in targets if skip in target.skips]
- if skipped:
- exclude.append(skip + '/')
- display.warning('Excluding tests marked "%s" which are not supported on %s: %s' % (skip, description, ', '.join(skipped)))
-
- python_version = get_python_version(args, get_remote_completion(), args.remote)
-
- exclude_targets_by_python_version(targets, python_version, exclude)
-
- return exclude
-
-
-def exclude_targets_by_python_version(targets, python_version, exclude):
- """
- :type targets: tuple[IntegrationTarget]
- :type python_version: str
- :type exclude: list[str]
- """
- if not python_version:
- display.warning('Python version unknown. Unable to skip tests based on Python version.')
- return
-
- python_major_version = python_version.split('.')[0]
-
- skip = 'skip/python%s/' % python_version
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
- % (skip.rstrip('/'), python_version, ', '.join(skipped)))
-
- skip = 'skip/python%s/' % python_major_version
- skipped = [target.name for target in targets if skip in target.aliases]
- if skipped:
- exclude.append(skip)
- display.warning('Excluding tests marked "%s" which are not supported on python %s: %s'
- % (skip.rstrip('/'), python_version, ', '.join(skipped)))
-
-
-def command_integration_filter(args, # type: TIntegrationConfig
- targets, # type: t.Iterable[TIntegrationTarget]
- init_callback=None, # type: t.Callable[[TIntegrationConfig, t.Tuple[TIntegrationTarget, ...]], None]
- ): # type: (...) -> t.Tuple[TIntegrationTarget, ...]
- """Filter the given integration test targets."""
- targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
- changes = get_changes_filter(args)
-
- # special behavior when the --changed-all-target target is selected based on changes
- if args.changed_all_target in changes:
- # act as though the --changed-all-target target was in the include list
- if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
- args.include.append(args.changed_all_target)
- args.delegate_args += ['--include', args.changed_all_target]
- # act as though the --changed-all-target target was in the exclude list
- elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
- args.exclude.append(args.changed_all_target)
-
- require = args.require + changes
- exclude = args.exclude
-
- internal_targets = walk_internal_targets(targets, args.include, exclude, require)
- environment_exclude = get_integration_filter(args, internal_targets)
-
- environment_exclude += cloud_filter(args, internal_targets)
-
- if environment_exclude:
- exclude += environment_exclude
- internal_targets = walk_internal_targets(targets, args.include, exclude, require)
-
- if not internal_targets:
- raise AllTargetsSkipped()
-
- if args.start_at and not any(target.name == args.start_at for target in internal_targets):
- raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
-
- if init_callback:
- init_callback(args, internal_targets)
-
- cloud_init(args, internal_targets)
-
- vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
-
- if os.path.exists(vars_file_src):
- def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
- """
- Add the integration config vars file to the payload file list.
- This will preserve the file during delegation even if the file is ignored by source control.
- """
- files.append((vars_file_src, data_context().content.integration_vars_path))
-
- data_context().register_payload_callback(integration_config_callback)
-
- if args.delegate:
- raise Delegate(require=require, exclude=exclude)
-
- extra_requirements = []
-
- for cloud_platform in get_cloud_platforms(args):
- extra_requirements.append('%s.cloud.%s' % (args.command, cloud_platform))
-
- install_command_requirements(args, extra_requirements=extra_requirements)
-
- return internal_targets
+def create_inventory(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ inventory_path, # type: str
+ target, # type: IntegrationTarget
+): # type: (...) -> None
+ """Create inventory."""
+ if isinstance(args, PosixIntegrationConfig):
+ if target.target_type == IntegrationTargetType.CONTROLLER:
+ display.info('Configuring controller inventory.', verbosity=1)
+ create_controller_inventory(args, inventory_path, host_state.controller_profile)
+ elif target.target_type == IntegrationTargetType.TARGET:
+ display.info('Configuring target inventory.', verbosity=1)
+ create_posix_inventory(args, inventory_path, host_state.target_profiles, 'needs/ssh/' in target.aliases)
+ else:
+ raise Exception(f'Unhandled test type for target "{target.name}": {target.target_type.name.lower()}')
+ elif isinstance(args, WindowsIntegrationConfig):
+ display.info('Configuring target inventory.', verbosity=1)
+ target_profiles = filter_profiles_for_target(args, host_state.target_profiles, target)
+ create_windows_inventory(args, inventory_path, target_profiles)
+ elif isinstance(args, NetworkIntegrationConfig):
+ display.info('Configuring target inventory.', verbosity=1)
+ target_profiles = filter_profiles_for_target(args, host_state.target_profiles, target)
+ create_network_inventory(args, inventory_path, target_profiles)
def command_integration_filtered(
args, # type: IntegrationConfig
+ host_state, # type: HostState
targets, # type: t.Tuple[IntegrationTarget]
all_targets, # type: t.Tuple[IntegrationTarget]
inventory_path, # type: str
- pre_target=None, # type: t.Optional[t.Callable[IntegrationTarget]]
- post_target=None, # type: t.Optional[t.Callable[IntegrationTarget]]
- remote_temp_path=None, # type: t.Optional[str]
+ pre_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
+ post_target=None, # type: t.Optional[t.Callable[[IntegrationTarget], None]]
):
"""Run integration tests for the specified targets."""
found = False
@@ -683,13 +416,13 @@ def command_integration_filtered(
if setup_errors:
raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))
- check_pyyaml(args, args.python_version)
+ check_pyyaml(host_state.controller_profile.python)
test_dir = os.path.join(ResultType.TMP.path, 'output_dir')
if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
max_tries = 20
- display.info('SSH service required for tests. Checking to make sure we can connect.')
+ display.info('SSH connection to controller required by tests. Checking the connection.')
for i in range(1, max_tries + 1):
try:
run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
@@ -706,13 +439,19 @@ def command_integration_filtered(
results = {}
- current_environment = None # type: t.Optional[EnvironmentDescription]
+ target_profile = host_state.target_profiles[0]
- # common temporary directory path that will be valid on both the controller and the remote
- # it must be common because it will be referenced in environment variables that are shared across multiple hosts
- common_temp_path = '/tmp/ansible-test-%s' % ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(8))
+ if isinstance(target_profile, PosixProfile):
+ target_python = target_profile.python
- setup_common_temp_dir(args, common_temp_path)
+ if isinstance(target_profile, ControllerProfile):
+ if host_state.controller_profile.python.path != target_profile.python.path:
+ install_requirements(args, target_python, command=True) # integration
+ elif isinstance(target_profile, SshTargetHostProfile):
+ install_requirements(args, target_python, command=True, connection=target_profile.get_controller_target_connections()[0]) # integration
+
+ coverage_manager = CoverageManager(args, host_state, inventory_path)
+ coverage_manager.setup()
try:
for target in targets_iter:
@@ -722,20 +461,13 @@ def command_integration_filtered(
if not found:
continue
- if args.list_targets:
- print(target.name)
- continue
+ create_inventory(args, host_state, inventory_path, target)
tries = 2 if args.retry_on_error else 1
verbosity = args.verbosity
cloud_environment = get_cloud_environment(args, target)
- original_environment = current_environment if current_environment else EnvironmentDescription(args)
- current_environment = None
-
- display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)
-
try:
while tries:
tries -= 1
@@ -744,14 +476,16 @@ def command_integration_filtered(
if cloud_environment:
cloud_environment.setup_once()
- run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, False)
+ run_setup_targets(args, host_state, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, inventory_path,
+ coverage_manager, False)
start_time = time.time()
if pre_target:
pre_target(target)
- run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path, common_temp_path, True)
+ run_setup_targets(args, host_state, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, inventory_path,
+ coverage_manager, True)
if not args.explain:
# create a fresh test directory for each test target
@@ -760,11 +494,9 @@ def command_integration_filtered(
try:
if target.script_path:
- command_integration_script(args, target, test_dir, inventory_path, common_temp_path,
- remote_temp_path=remote_temp_path)
+ command_integration_script(args, host_state, target, test_dir, inventory_path, coverage_manager)
else:
- command_integration_role(args, target, start_at_task, test_dir, inventory_path,
- common_temp_path, remote_temp_path=remote_temp_path)
+ command_integration_role(args, host_state, target, start_at_task, test_dir, inventory_path, coverage_manager)
start_at_task = None
finally:
if post_target:
@@ -780,9 +512,6 @@ def command_integration_filtered(
run_time_seconds=int(end_time - start_time),
setup_once=target.setup_once,
setup_always=target.setup_always,
- coverage=args.coverage,
- coverage_label=args.coverage_label,
- python_version=args.python_version,
)
break
@@ -790,23 +519,12 @@ def command_integration_filtered(
if cloud_environment:
cloud_environment.on_failure(target, tries)
- if not original_environment.validate(target.name, throw=False):
- raise
-
if not tries:
raise
display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
display.verbosity = args.verbosity = 6
- start_time = time.time()
- current_environment = EnvironmentDescription(args)
- end_time = time.time()
-
- EnvironmentDescription.check(original_environment, current_environment, target.name, throw=True)
-
- results[target.name]['validation_seconds'] = int(end_time - start_time)
-
passed.append(target)
except Exception as ex:
failed.append(target)
@@ -828,14 +546,7 @@ def command_integration_filtered(
finally:
if not args.explain:
- if args.coverage:
- coverage_temp_path = os.path.join(common_temp_path, ResultType.COVERAGE.name)
- coverage_save_path = ResultType.COVERAGE.path
-
- for filename in os.listdir(coverage_temp_path):
- shutil.copy(os.path.join(coverage_temp_path, filename), os.path.join(coverage_save_path, filename))
-
- remove_tree(common_temp_path)
+ coverage_manager.teardown()
result_name = '%s-%s.json' % (
args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))
@@ -851,15 +562,15 @@ def command_integration_filtered(
len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
-def command_integration_script(args, target, test_dir, inventory_path, temp_path, remote_temp_path=None):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type test_dir: str
- :type inventory_path: str
- :type temp_path: str
- :type remote_temp_path: str | None
- """
+def command_integration_script(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ target, # type: IntegrationTarget
+ test_dir, # type: str
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+):
+ """Run an integration test script."""
display.info('Running %s integration test script' % target.name)
env_config = None
@@ -899,22 +610,20 @@ def command_integration_script(args, target, test_dir, inventory_path, temp_path
if config_path:
cmd += ['-e', '@%s' % config_path]
- module_coverage = 'non_local/' not in target.aliases
+ env.update(coverage_manager.get_environment(target.name, target.aliases))
+ cover_python(args, host_state.controller_profile.python, cmd, target.name, env, cwd=cwd)
- intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
- remote_temp_path=remote_temp_path, module_coverage=module_coverage)
-
-def command_integration_role(args, target, start_at_task, test_dir, inventory_path, temp_path, remote_temp_path=None):
- """
- :type args: IntegrationConfig
- :type target: IntegrationTarget
- :type start_at_task: str | None
- :type test_dir: str
- :type inventory_path: str
- :type temp_path: str
- :type remote_temp_path: str | None
- """
+def command_integration_role(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ target, # type: IntegrationTarget
+ start_at_task, # type: t.Optional[str]
+ test_dir, # type: str
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+):
+ """Run an integration test role."""
display.info('Running %s integration test role' % target.name)
env_config = None
@@ -937,6 +646,11 @@ def command_integration_role(args, target, start_at_task, test_dir, inventory_pa
hosts = 'testhost'
gather_facts = True
+ if 'gather_facts/yes/' in target.aliases:
+ gather_facts = True
+ elif 'gather_facts/no/' in target.aliases:
+ gather_facts = False
+
if not isinstance(args, NetworkIntegrationConfig):
cloud_environment = get_cloud_environment(args, target)
@@ -1015,22 +729,22 @@ def command_integration_role(args, target, start_at_task, test_dir, inventory_pa
env['ANSIBLE_ROLES_PATH'] = test_env.targets_dir
- module_coverage = 'non_local/' not in target.aliases
- intercept_command(args, cmd, target_name=target.name, env=env, cwd=cwd, temp_path=temp_path,
- remote_temp_path=remote_temp_path, module_coverage=module_coverage)
+ env.update(coverage_manager.get_environment(target.name, target.aliases))
+ cover_python(args, host_state.controller_profile.python, cmd, target.name, env, cwd=cwd)
-def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, inventory_path, temp_path, always):
- """
- :type args: IntegrationConfig
- :type test_dir: str
- :type target_names: list[str]
- :type targets_dict: dict[str, IntegrationTarget]
- :type targets_executed: set[str]
- :type inventory_path: str
- :type temp_path: str
- :type always: bool
- """
+def run_setup_targets(
+ args, # type: IntegrationConfig
+ host_state, # type: HostState
+ test_dir, # type: str
+ target_names, # type: t.List[str]
+ targets_dict, # type: t.Dict[str, IntegrationTarget]
+ targets_executed, # type: t.Set[str]
+ inventory_path, # type: str
+ coverage_manager, # type: CoverageManager
+ always, # type: bool
+):
+ """Run setup targets."""
for target_name in target_names:
if not always and target_name in targets_executed:
continue
@@ -1043,9 +757,9 @@ def run_setup_targets(args, test_dir, target_names, targets_dict, targets_execut
make_dirs(test_dir)
if target.script_path:
- command_integration_script(args, target, test_dir, inventory_path, temp_path)
+ command_integration_script(args, host_state, target, test_dir, inventory_path, coverage_manager)
else:
- command_integration_role(args, target, None, test_dir, inventory_path, temp_path)
+ command_integration_role(args, host_state, target, None, test_dir, inventory_path, coverage_manager)
targets_executed.add(target_name)
@@ -1114,165 +828,130 @@ class IntegrationCache(CommonCache):
return self.get('dependency_map', lambda: generate_dependency_map(self.integration_targets))
-class EnvironmentDescription:
- """Description of current running environment."""
- def __init__(self, args):
- """Initialize snapshot of environment configuration.
- :type args: IntegrationConfig
- """
- self.args = args
-
- if self.args.explain:
- self.data = {}
- return
-
- warnings = []
-
- versions = ['']
- versions += SUPPORTED_PYTHON_VERSIONS
- versions += list(set(v.split('.', 1)[0] for v in SUPPORTED_PYTHON_VERSIONS))
-
- version_check = os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'versions.py')
- python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions))
- pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions))
- program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v])
- pip_interpreters = dict((v, self.get_shebang(pip_paths[v])) for v in sorted(pip_paths) if pip_paths[v])
- known_hosts_hash = get_hash(os.path.expanduser('~/.ssh/known_hosts'))
-
- for version in sorted(versions):
- self.check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings)
-
- for warning in warnings:
- display.warning(warning, unique=True)
-
- self.data = dict(
- python_paths=python_paths,
- pip_paths=pip_paths,
- program_versions=program_versions,
- pip_interpreters=pip_interpreters,
- known_hosts_hash=known_hosts_hash,
- warnings=warnings,
- )
+def filter_profiles_for_target(args, profiles, target): # type: (IntegrationConfig, t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Return a list of profiles after applying target filters."""
+ if target.target_type == IntegrationTargetType.CONTROLLER:
+ profile_filter = get_target_filter(args, [args.controller], True)
+ elif target.target_type == IntegrationTargetType.TARGET:
+ profile_filter = get_target_filter(args, args.targets, False)
+ else:
+ raise Exception(f'Unhandled test type for target "{target.name}": {target.target_type.name.lower()}')
- @staticmethod
- def check_python_pip_association(version, python_paths, pip_paths, pip_interpreters, warnings):
- """
- :type version: str
- :param python_paths: dict[str, str]
- :param pip_paths: dict[str, str]
- :param pip_interpreters: dict[str, str]
- :param warnings: list[str]
- """
- python_label = 'Python%s' % (' %s' % version if version else '')
+ profiles = profile_filter.filter_profiles(profiles, target)
- pip_path = pip_paths.get(version)
- python_path = python_paths.get(version)
+ return profiles
- if not python_path or not pip_path:
- # skip checks when either python or pip are missing for this version
- return
- pip_shebang = pip_interpreters.get(version)
+def get_integration_filter(args, targets): # type: (IntegrationConfig, t.List[IntegrationTarget]) -> t.Set[str]
+ """Return a list of test targets to skip based on the host(s) that will be used to run the specified test targets."""
+ invalid_targets = sorted(target.name for target in targets if target.target_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
- match = re.search(r'#!\s*(?P<command>[^\s]+)', pip_shebang)
+ if invalid_targets and not args.list_targets:
+ message = f'''Unable to determine context for the following test targets: {", ".join(invalid_targets)}
- if not match:
- warnings.append('A %s pip was found at "%s", but it does not have a valid shebang: %s' % (python_label, pip_path, pip_shebang))
- return
+Make sure the test targets are correctly named:
- pip_interpreter = os.path.realpath(match.group('command'))
- python_interpreter = os.path.realpath(python_path)
+ - Modules - The target name should match the module name.
+ - Plugins - The target name should be "{{plugin_type}}_{{plugin_name}}".
- if pip_interpreter == python_interpreter:
- return
+If necessary, context can be controlled by adding entries to the "aliases" file for a test target:
- try:
- identical = filecmp.cmp(pip_interpreter, python_interpreter)
- except OSError:
- identical = False
+ - Add the name(s) of modules which are tested.
+ - Add "context/target" for module and module_utils tests (these will run on the target host).
+ - Add "context/controller" for other test types (these will run on the controller).'''
- if identical:
- return
+ raise ApplicationError(message)
- warnings.append('A %s pip was found at "%s", but it uses interpreter "%s" instead of "%s".' % (
- python_label, pip_path, pip_interpreter, python_interpreter))
+ invalid_targets = sorted(target.name for target in targets if target.actual_type not in (IntegrationTargetType.CONTROLLER, IntegrationTargetType.TARGET))
- def __str__(self):
- """
- :rtype: str
- """
- return json.dumps(self.data, sort_keys=True, indent=4)
+ if invalid_targets:
+ if data_context().content.is_ansible:
+ display.warning(f'Unable to determine context for the following test targets: {", ".join(invalid_targets)}')
+ else:
+ display.warning(f'Unable to determine context for the following test targets, they will be run on the target host: {", ".join(invalid_targets)}')
- def validate(self, target_name, throw):
- """
- :type target_name: str
- :type throw: bool
- :rtype: bool
- """
- current = EnvironmentDescription(self.args)
+ exclude = set() # type: t.Set[str]
- return self.check(self, current, target_name, throw)
+ controller_targets = [target for target in targets if target.target_type == IntegrationTargetType.CONTROLLER]
+ target_targets = [target for target in targets if target.target_type == IntegrationTargetType.TARGET]
- @staticmethod
- def check(original, current, target_name, throw):
- """
- :type original: EnvironmentDescription
- :type current: EnvironmentDescription
- :type target_name: str
- :type throw: bool
- :rtype: bool
- """
- original_json = str(original)
- current_json = str(current)
-
- if original_json == current_json:
- return True
-
- unified_diff = '\n'.join(difflib.unified_diff(
- a=original_json.splitlines(),
- b=current_json.splitlines(),
- fromfile='original.json',
- tofile='current.json',
- lineterm='',
- ))
+ controller_filter = get_target_filter(args, [args.controller], True)
+ target_filter = get_target_filter(args, args.targets, False)
- message = ('Test target "%s" has changed the test environment!\n'
- 'If these changes are necessary, they must be reverted before the test finishes.\n'
- '>>> Original Environment\n'
- '%s\n'
- '>>> Current Environment\n'
- '%s\n'
- '>>> Environment Diff\n'
- '%s'
- % (target_name, original_json, current_json, unified_diff))
+ controller_filter.filter_targets(controller_targets, exclude)
+ target_filter.filter_targets(target_targets, exclude)
- if throw:
- raise ApplicationError(message)
+ return exclude
- display.error(message)
- return False
+def command_integration_filter(args, # type: TIntegrationConfig
+ targets, # type: t.Iterable[TIntegrationTarget]
+ ): # type: (...) -> t.Tuple[HostState, t.Tuple[TIntegrationTarget, ...]]
+ """Filter the given integration test targets."""
+ targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
+ changes = get_changes_filter(args)
- @staticmethod
- def get_version(command, warnings):
- """
- :type command: list[str]
- :type warnings: list[text]
- :rtype: list[str]
- """
- try:
- stdout, stderr = raw_command(command, capture=True, cmd_verbosity=2)
- except SubprocessError as ex:
- warnings.append(u'%s' % ex)
- return None # all failures are equal, we don't care why it failed, only that it did
+ # special behavior when the --changed-all-target target is selected based on changes
+ if args.changed_all_target in changes:
+ # act as though the --changed-all-target target was in the include list
+ if args.changed_all_mode == 'include' and args.changed_all_target not in args.include:
+ args.include.append(args.changed_all_target)
+ args.delegate_args += ['--include', args.changed_all_target]
+ # act as though the --changed-all-target target was in the exclude list
+ elif args.changed_all_mode == 'exclude' and args.changed_all_target not in args.exclude:
+ args.exclude.append(args.changed_all_target)
- return [line.strip() for line in ((stdout or '').strip() + (stderr or '').strip()).splitlines()]
+ require = args.require + changes
+ exclude = args.exclude
- @staticmethod
- def get_shebang(path):
- """
- :type path: str
- :rtype: str
- """
- with open_text_file(path) as script_fd:
- return script_fd.readline().strip()
+ internal_targets = walk_internal_targets(targets, args.include, exclude, require)
+ environment_exclude = get_integration_filter(args, list(internal_targets))
+
+ environment_exclude |= set(cloud_filter(args, internal_targets))
+
+ if environment_exclude:
+ exclude = sorted(set(exclude) | environment_exclude)
+ internal_targets = walk_internal_targets(targets, args.include, exclude, require)
+
+ if not internal_targets:
+ raise AllTargetsSkipped()
+
+ if args.start_at and not any(target.name == args.start_at for target in internal_targets):
+ raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
+
+ cloud_init(args, internal_targets)
+
+ vars_file_src = os.path.join(data_context().content.root, data_context().content.integration_vars_path)
+
+ if os.path.exists(vars_file_src):
+ def integration_config_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """
+ Add the integration config vars file to the payload file list.
+ This will preserve the file during delegation even if the file is ignored by source control.
+ """
+ files.append((vars_file_src, data_context().content.integration_vars_path))
+
+ data_context().register_payload_callback(integration_config_callback)
+
+ if args.list_targets:
+ raise ListTargets([target.name for target in internal_targets])
+
+ # requirements are installed using a callback since the windows-integration and network-integration host status checks depend on them
+ host_state = prepare_profiles(args, targets_use_pypi=True, requirements=requirements) # integration, windows-integration, network-integration
+
+ if args.delegate:
+ raise Delegate(host_state=host_state, require=require, exclude=exclude)
+
+ return host_state, internal_targets
+
+
+def requirements(args, host_state): # type: (IntegrationConfig, HostState) -> None
+ """Install requirements."""
+ target_profile = host_state.target_profiles[0]
+
+ configure_pypi_proxy(args, host_state.controller_profile) # integration, windows-integration, network-integration
+
+ if isinstance(target_profile, PosixProfile) and not isinstance(target_profile, ControllerProfile):
+ configure_pypi_proxy(args, target_profile) # integration
+
+ install_requirements(args, host_state.controller_profile.python, ansible=True, command=True) # integration, windows-integration, network-integration
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
index 8d4bbec1ea..040fbc5004 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/__init__.py
@@ -1,6 +1,5 @@
"""Plugin system for cloud providers and environments for use in integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import atexit
@@ -9,8 +8,7 @@ import os
import re
import tempfile
import time
-
-from .... import types as t
+import typing as t
from ....encoding import (
to_bytes,
@@ -21,12 +19,12 @@ from ....io import (
)
from ....util import (
- ABC,
ANSIBLE_TEST_CONFIG_ROOT,
ApplicationError,
display,
import_plugins,
load_plugins,
+ cache,
)
from ....util_common import (
@@ -52,19 +50,34 @@ from ....data import (
)
from ....docker_util import (
- get_docker_command,
+ docker_available,
)
-PROVIDERS = {}
-ENVIRONMENTS = {}
-
-def initialize_cloud_plugins(): # type: () -> None
+@cache
+def get_cloud_plugins(): # type: () -> t.Tuple[t.Dict[str, t.Type[CloudProvider]], t.Dict[str, t.Type[CloudEnvironment]]]
"""Import cloud plugins and load them into the plugin dictionaries."""
import_plugins('commands/integration/cloud')
- load_plugins(CloudProvider, PROVIDERS)
- load_plugins(CloudEnvironment, ENVIRONMENTS)
+ providers = {}
+ environments = {}
+
+ load_plugins(CloudProvider, providers)
+ load_plugins(CloudEnvironment, environments)
+
+ return providers, environments
+
+
+@cache
+def get_provider_plugins(): # type: () -> t.Dict[str, t.Type[CloudProvider]]
+ """Return a dictionary of the available cloud provider plugins."""
+ return get_cloud_plugins()[0]
+
+
+@cache
+def get_environment_plugins(): # type: () -> t.Dict[str, t.Type[CloudEnvironment]]
+ """Return a dictionary of the available cloud environment plugins."""
+ return get_cloud_plugins()[1]
def get_cloud_platforms(args, targets=None): # type: (TestConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[str]
@@ -93,7 +106,7 @@ def get_cloud_platform(target): # type: (IntegrationTarget) -> t.Optional[str]
if len(cloud_platforms) == 1:
cloud_platform = cloud_platforms.pop()
- if cloud_platform not in PROVIDERS:
+ if cloud_platform not in get_provider_plugins():
raise ApplicationError('Target %s aliases contains unknown cloud platform: %s' % (target.name, cloud_platform))
return cloud_platform
@@ -103,7 +116,7 @@ def get_cloud_platform(target): # type: (IntegrationTarget) -> t.Optional[str]
def get_cloud_providers(args, targets=None): # type: (IntegrationConfig, t.Optional[t.Tuple[IntegrationTarget, ...]]) -> t.List[CloudProvider]
"""Return a list of cloud providers for the given targets."""
- return [PROVIDERS[p](args) for p in get_cloud_platforms(args, targets)]
+ return [get_provider_plugins()[p](args) for p in get_cloud_platforms(args, targets)]
def get_cloud_environment(args, target): # type: (IntegrationConfig, IntegrationTarget) -> t.Optional[CloudEnvironment]
@@ -113,7 +126,7 @@ def get_cloud_environment(args, target): # type: (IntegrationConfig, Integratio
if not cloud_platform:
return None
- return ENVIRONMENTS[cloud_platform](args)
+ return get_environment_plugins()[cloud_platform](args)
def cloud_filter(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> t.List[str]
@@ -162,10 +175,8 @@ def cloud_init(args, targets): # type: (IntegrationConfig, t.Tuple[IntegrationT
write_json_test_results(ResultType.DATA, result_name, data)
-class CloudBase(ABC):
+class CloudBase(metaclass=abc.ABCMeta):
"""Base class for cloud plugins."""
- __metaclass__ = abc.ABCMeta
-
_CONFIG_PATH = 'config_path'
_RESOURCE_PREFIX = 'resource_prefix'
_MANAGED = 'managed'
@@ -244,7 +255,7 @@ class CloudBase(ABC):
class CloudProvider(CloudBase):
"""Base class for cloud provider plugins. Sets up cloud resources before delegation."""
def __init__(self, args, config_extension='.ini'): # type: (IntegrationConfig, str) -> None
- super(CloudProvider, self).__init__(args)
+ super().__init__(args)
self.ci_provider = get_ci_provider()
self.remove_config = False
@@ -261,7 +272,7 @@ class CloudProvider(CloudBase):
if not self.uses_docker and not self.uses_config:
return
- if self.uses_docker and get_docker_command():
+ if self.uses_docker and docker_available():
return
if self.uses_config and os.path.exists(self.config_static_path):
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
index 748d135064..42d6f0bc21 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/acme.py
@@ -1,6 +1,5 @@
"""ACME plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -9,6 +8,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -24,7 +24,7 @@ class ACMEProvider(CloudProvider):
DOCKER_SIMULATOR_NAME = 'acme-simulator'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(ACMEProvider, self).__init__(args)
+ super().__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used.
if os.environ.get('ANSIBLE_ACME_CONTAINER'):
@@ -36,7 +36,7 @@ class ACMEProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(ACMEProvider, self).setup()
+ super().setup()
if self._use_static_config():
self._setup_static()
@@ -50,18 +50,16 @@ class ACMEProvider(CloudProvider):
14000, # Pebble ACME CA
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_SIMULATOR_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
)
- descriptor.register(self.args)
-
self._set_cloud_config('acme_host', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
index 294cdd9c58..f449393330 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/aws.py
@@ -1,15 +1,13 @@
"""AWS plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import uuid
-
-from .... import types as t
+import configparser
+import typing as t
from ....util import (
ApplicationError,
- ConfigParser,
display,
)
@@ -25,6 +23,10 @@ from ....core_ci import (
AnsibleCoreCI,
)
+from ....host_configs import (
+ OriginConfig,
+)
+
from . import (
CloudEnvironment,
CloudEnvironmentConfig,
@@ -35,7 +37,7 @@ from . import (
class AwsCloudProvider(CloudProvider):
"""AWS cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(AwsCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
@@ -46,15 +48,15 @@ class AwsCloudProvider(CloudProvider):
if aci.available:
return
- super(AwsCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(AwsCloudProvider, self).setup()
+ super().setup()
aws_config_path = os.path.expanduser('~/.aws')
- if os.path.exists(aws_config_path) and not self.args.docker and not self.args.remote:
+ if os.path.exists(aws_config_path) and isinstance(self.args.controller, OriginConfig):
raise ApplicationError('Rename "%s" or use the --docker or --remote option to isolate tests.' % aws_config_path)
if not self._use_static_config():
@@ -89,14 +91,14 @@ class AwsCloudProvider(CloudProvider):
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
"""Return an AWS instance of AnsibleCoreCI."""
- return AnsibleCoreCI(self.args, 'aws', 'aws', persist=False, stage=self.args.remote_stage, provider='aws', internal=True)
+ return AnsibleCoreCI(self.args, 'aws', 'aws', 'aws', persist=False)
class AwsCloudEnvironment(CloudEnvironment):
"""AWS cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
index 408410001f..002fa581db 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/azure.py
@@ -1,10 +1,10 @@
"""Azure plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import configparser
import os
-
-from .... import types as t
+import urllib.parse
+import typing as t
from ....io import (
read_text_file,
@@ -12,7 +12,6 @@ from ....io import (
from ....util import (
ApplicationError,
- ConfigParser,
display,
)
@@ -26,9 +25,6 @@ from ....target import (
from ....http import (
HttpClient,
- parse_qs,
- urlparse,
- urlunparse,
)
from ....core_ci import (
@@ -47,7 +43,7 @@ class AzureCloudProvider(CloudProvider):
SHERLOCK_CONFIG_PATH = os.path.expanduser('~/.ansible-sherlock-ci.cfg')
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(AzureCloudProvider, self).__init__(args)
+ super().__init__(args)
self.aci = None
@@ -63,11 +59,11 @@ class AzureCloudProvider(CloudProvider):
if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
return
- super(AzureCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(AzureCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
@@ -79,7 +75,7 @@ class AzureCloudProvider(CloudProvider):
if self.aci:
self.aci.stop()
- super(AzureCloudProvider, self).cleanup()
+ super().cleanup()
def _setup_dynamic(self): # type: () -> None
"""Request Azure credentials through Sherlock."""
@@ -91,9 +87,9 @@ class AzureCloudProvider(CloudProvider):
if os.path.isfile(self.SHERLOCK_CONFIG_PATH):
sherlock_uri = read_text_file(self.SHERLOCK_CONFIG_PATH).splitlines()[0].strip() + '&rgcount=2'
- parts = urlparse(sherlock_uri)
- query_string = parse_qs(parts.query)
- base_uri = urlunparse(parts[:4] + ('', ''))
+ parts = urllib.parse.urlparse(sherlock_uri)
+ query_string = urllib.parse.parse_qs(parts.query)
+ base_uri = urllib.parse.urlunparse(parts[:4] + ('', ''))
if 'code' not in query_string:
example_uri = 'https://example.azurewebsites.net/api/sandbox-provisioning'
@@ -137,7 +133,7 @@ class AzureCloudProvider(CloudProvider):
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
"""Return an Azure instance of AnsibleCoreCI."""
- return AnsibleCoreCI(self.args, 'azure', 'azure', persist=False, stage=self.args.remote_stage, provider='azure', internal=True)
+ return AnsibleCoreCI(self.args, 'azure', 'azure', 'azure', persist=False)
class AzureCloudEnvironment(CloudEnvironment):
@@ -168,7 +164,7 @@ class AzureCloudEnvironment(CloudEnvironment):
def get_config(config_path): # type: (str) -> t.Dict[str, str]
"""Return a configuration dictionary parsed from the given configuration path."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(config_path)
config = dict((key.upper(), value) for key, value in parser.items('default'))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
index 095b030862..0a17fb2546 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cloudscale.py
@@ -4,11 +4,11 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""Cloudscale plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
- ConfigParser,
display,
)
@@ -26,13 +26,13 @@ from . import (
class CloudscaleCloudProvider(CloudProvider):
"""Cloudscale cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(CloudscaleCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(CloudscaleCloudProvider, self).setup()
+ super().setup()
self._use_static_config()
@@ -41,7 +41,7 @@ class CloudscaleCloudEnvironment(CloudEnvironment):
"""Cloudscale cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
index 9b58c46fc4..1dc07abb98 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/cs.py
@@ -1,15 +1,14 @@
"""CloudStack plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
+import configparser
import os
-
-from .... import types as t
+import urllib.parse
+import typing as t
from ....util import (
ApplicationError,
- ConfigParser,
display,
)
@@ -17,15 +16,12 @@ from ....config import (
IntegrationConfig,
)
-from ....http import (
- urlparse,
-)
-
from ....docker_util import (
docker_exec,
)
from ....containers import (
+ CleanupMode,
run_support_container,
wait_for_file,
)
@@ -42,7 +38,7 @@ class CsCloudProvider(CloudProvider):
DOCKER_SIMULATOR_NAME = 'cloudstack-sim'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(CsCloudProvider, self).__init__(args)
+ super().__init__(args)
self.image = os.environ.get('ANSIBLE_CLOUDSTACK_CONTAINER', 'quay.io/ansible/cloudstack-test-container:1.4.0')
self.host = ''
@@ -53,7 +49,7 @@ class CsCloudProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(CsCloudProvider, self).setup()
+ super().setup()
if self._use_static_config():
self._setup_static()
@@ -62,12 +58,12 @@ class CsCloudProvider(CloudProvider):
def _setup_static(self): # type: () -> None
"""Configure CloudStack tests for use with static configuration."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_static_path)
endpoint = parser.get('cloudstack', 'endpoint')
- parts = urlparse(endpoint)
+ parts = urllib.parse.urlparse(endpoint)
self.host = parts.hostname
@@ -95,18 +91,16 @@ class CsCloudProvider(CloudProvider):
self.port,
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_SIMULATOR_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
)
- descriptor.register(self.args)
-
# apply work-around for OverlayFS issue
# https://github.com/docker/for-linux/issues/72#issuecomment-319904698
docker_exec(self.args, self.DOCKER_SIMULATOR_NAME, ['find', '/var/lib/mysql', '-type', 'f', '-exec', 'touch', '{}', ';'])
@@ -135,6 +129,7 @@ class CsCloudProvider(CloudProvider):
def _get_credentials(self, container_name): # type: (str) -> t.Dict[str, t.Any]
"""Wait for the CloudStack simulator to return credentials."""
def check(value):
+ """Return True if the given configuration is valid JSON, otherwise return False."""
# noinspection PyBroadException
try:
json.loads(value)
@@ -152,7 +147,7 @@ class CsCloudEnvironment(CloudEnvironment):
"""CloudStack cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
config = dict(parser.items('default'))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
index 0ea771dc6c..00b05d75e2 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/digitalocean.py
@@ -1,9 +1,9 @@
"""DigitalOcean plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
- ConfigParser,
display,
)
@@ -21,13 +21,13 @@ from . import (
class DigitalOceanCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(DigitalOceanCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(DigitalOceanCloudProvider, self).setup()
+ super().setup()
self._use_static_config()
@@ -36,7 +36,7 @@ class DigitalOceanCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
index 36db874161..b4ca48f75f 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/foreman.py
@@ -1,6 +1,5 @@
"""Foreman plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -9,6 +8,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -33,7 +33,7 @@ class ForemanProvider(CloudProvider):
DOCKER_IMAGE = 'quay.io/ansible/foreman-test-container:1.4.0'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(ForemanProvider, self).__init__(args)
+ super().__init__(args)
self.__container_from_env = os.environ.get('ANSIBLE_FRMNSIM_CONTAINER')
"""
@@ -48,7 +48,7 @@ class ForemanProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup cloud resource before delegation and reg cleanup callback."""
- super(ForemanProvider, self).setup()
+ super().setup()
if self._use_static_config():
self._setup_static()
@@ -63,18 +63,16 @@ class ForemanProvider(CloudProvider):
foreman_port,
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_SIMULATOR_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
)
- descriptor.register(self.args)
-
self._set_cloud_config('FOREMAN_HOST', self.DOCKER_SIMULATOR_NAME)
self._set_cloud_config('FOREMAN_PORT', str(foreman_port))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
index d5bf37ccd4..066d2b0a0b 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/galaxy.py
@@ -1,6 +1,5 @@
"""Galaxy (ansible-galaxy) plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import tempfile
@@ -79,7 +78,7 @@ class GalaxyProvider(CloudProvider):
The pulp source itself resides at: https://github.com/pulp/pulp-oci-images
"""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(GalaxyProvider, self).__init__(args)
+ super().__init__(args)
# Cannot use the latest container image as either galaxy_ng 4.2.0rc2 or pulp 0.5.0 has sporatic issues with
# dropping published collections in CI. Try running the tests multiple times when updating. Will also need to
@@ -94,7 +93,7 @@ class GalaxyProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup cloud resource before delegation and reg cleanup callback."""
- super(GalaxyProvider, self).setup()
+ super().setup()
galaxy_port = 80
pulp_host = 'ansible-ci-pulp'
@@ -114,7 +113,6 @@ class GalaxyProvider(CloudProvider):
ports,
start=False,
allow_existing=True,
- cleanup=None,
)
if not descriptor.running:
@@ -133,8 +131,6 @@ class GalaxyProvider(CloudProvider):
descriptor.start(self.args)
- descriptor.register(self.args)
-
self._set_cloud_config('PULP_HOST', pulp_host)
self._set_cloud_config('PULP_PORT', str(pulp_port))
self._set_cloud_config('GALAXY_PORT', str(galaxy_port))
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
index a7409a375e..b23097a7be 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/gcp.py
@@ -1,12 +1,12 @@
# Copyright: (c) 2018, Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""GCP plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
display,
- ConfigParser,
)
from ....config import (
@@ -23,13 +23,13 @@ from . import (
class GcpCloudProvider(CloudProvider):
"""GCP cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(GcpCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(GcpCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
display.notice(
@@ -41,7 +41,7 @@ class GcpCloudEnvironment(CloudEnvironment):
"""GCP cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
index a897d34c65..28b07e7230 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/hcloud.py
@@ -1,11 +1,10 @@
"""Hetzner Cloud plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-from .... import types as t
+import configparser
+import typing as t
from ....util import (
- ConfigParser,
display,
)
@@ -31,7 +30,7 @@ from . import (
class HcloudCloudProvider(CloudProvider):
"""Hetzner Cloud provider plugin. Sets up cloud resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(HcloudCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
@@ -42,11 +41,11 @@ class HcloudCloudProvider(CloudProvider):
if aci.available:
return
- super(HcloudCloudProvider, self).filter(targets, exclude)
+ super().filter(targets, exclude)
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(HcloudCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
@@ -79,14 +78,14 @@ class HcloudCloudProvider(CloudProvider):
def _create_ansible_core_ci(self): # type: () -> AnsibleCoreCI
"""Return a Heztner instance of AnsibleCoreCI."""
- return AnsibleCoreCI(self.args, 'hetzner', 'hetzner', persist=False, stage=self.args.remote_stage, provider='hetzner', internal=True)
+ return AnsibleCoreCI(self.args, 'hetzner', 'hetzner', 'hetzner', persist=False)
class HcloudCloudEnvironment(CloudEnvironment):
"""Hetzner Cloud cloud environment plugin. Updates integration test environment after delegation."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
index d26244656b..2899aeff62 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/httptester.py
@@ -1,6 +1,5 @@
"""HTTP Tester plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -14,6 +13,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -29,7 +29,7 @@ KRB5_PASSWORD_ENV = 'KRB5_PASSWORD'
class HttptesterProvider(CloudProvider):
"""HTTP Tester provider plugin. Sets up resources before delegation."""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(HttptesterProvider, self).__init__(args)
+ super().__init__(args)
self.image = os.environ.get('ANSIBLE_HTTP_TEST_CONTAINER', 'quay.io/ansible/http-test-container:1.3.0')
@@ -37,7 +37,7 @@ class HttptesterProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup resources before delegation."""
- super(HttptesterProvider, self).setup()
+ super().setup()
ports = [
80,
@@ -61,16 +61,13 @@ class HttptesterProvider(CloudProvider):
'http-test-container',
ports,
aliases=aliases,
- start=True,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
env={
KRB5_PASSWORD_ENV: generate_password(),
},
)
- descriptor.register(self.args)
-
# Read the password from the container environment.
# This allows the tests to work when re-using an existing container.
# The password is marked as sensitive, since it may differ from the one we generated.
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
index d75a55b167..4c695fc696 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/nios.py
@@ -1,6 +1,5 @@
"""NIOS plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -9,6 +8,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -33,7 +33,7 @@ class NiosProvider(CloudProvider):
DOCKER_IMAGE = 'quay.io/ansible/nios-test-container:1.3.0'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(NiosProvider, self).__init__(args)
+ super().__init__(args)
self.__container_from_env = os.environ.get('ANSIBLE_NIOSSIM_CONTAINER')
"""
@@ -49,7 +49,7 @@ class NiosProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup cloud resource before delegation and reg cleanup callback."""
- super(NiosProvider, self).setup()
+ super().setup()
if self._use_static_config():
self._setup_static()
@@ -64,18 +64,16 @@ class NiosProvider(CloudProvider):
nios_port,
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_SIMULATOR_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
)
- descriptor.register(self.args)
-
self._set_cloud_config('NIOS_HOST', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
index 029a569914..3019f3102b 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/opennebula.py
@@ -1,9 +1,9 @@
"""OpenNebula plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
- ConfigParser,
display,
)
@@ -18,7 +18,7 @@ class OpenNebulaCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(OpenNebulaCloudProvider, self).setup()
+ super().setup()
if not self._use_static_config():
self._setup_dynamic()
@@ -44,7 +44,7 @@ class OpenNebulaCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
ansible_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
index 0924aa4c7c..2acf96282d 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/openshift.py
@@ -1,6 +1,5 @@
"""OpenShift plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
@@ -17,6 +16,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
wait_for_file,
)
@@ -33,7 +33,7 @@ class OpenShiftCloudProvider(CloudProvider):
DOCKER_CONTAINER_NAME = 'openshift-origin'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(OpenShiftCloudProvider, self).__init__(args, config_extension='.kubeconfig')
+ super().__init__(args, config_extension='.kubeconfig')
# The image must be pinned to a specific version to guarantee CI passes with the version used.
self.image = 'openshift/origin:v3.9.0'
@@ -43,7 +43,7 @@ class OpenShiftCloudProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(OpenShiftCloudProvider, self).setup()
+ super().setup()
if self._use_static_config():
self._setup_static()
@@ -69,19 +69,17 @@ class OpenShiftCloudProvider(CloudProvider):
cmd = ['start', 'master', '--listen', 'https://0.0.0.0:%d' % port]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_CONTAINER_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
cmd=cmd,
)
- descriptor.register(self.args)
-
if self.args.explain:
config = '# Unknown'
else:
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
index af8e3dcbaf..1ef158b2b2 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/scaleway.py
@@ -1,9 +1,9 @@
"""Scaleway plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
- ConfigParser,
display,
)
@@ -21,13 +21,13 @@ from . import (
class ScalewayCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(ScalewayCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(ScalewayCloudProvider, self).setup()
+ super().setup()
self._use_static_config()
@@ -36,7 +36,7 @@ class ScalewayCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
index 1ee191fd66..fb69b9b212 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vcenter.py
@@ -1,12 +1,11 @@
"""VMware vCenter plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import configparser
import os
from ....util import (
ApplicationError,
- ConfigParser,
display,
)
@@ -15,6 +14,7 @@ from ....config import (
)
from ....containers import (
+ CleanupMode,
run_support_container,
)
@@ -30,7 +30,7 @@ class VcenterProvider(CloudProvider):
DOCKER_SIMULATOR_NAME = 'vcenter-simulator'
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(VcenterProvider, self).__init__(args)
+ super().__init__(args)
# The simulator must be pinned to a specific version to guarantee CI passes with the version used.
if os.environ.get('ANSIBLE_VCSIM_CONTAINER'):
@@ -51,7 +51,7 @@ class VcenterProvider(CloudProvider):
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(VcenterProvider, self).setup()
+ super().setup()
self._set_cloud_config('vmware_test_platform', self.vmware_test_platform)
@@ -73,18 +73,16 @@ class VcenterProvider(CloudProvider):
5000, # control port for flask app in simulator
]
- descriptor = run_support_container(
+ run_support_container(
self.args,
self.platform,
self.image,
self.DOCKER_SIMULATOR_NAME,
ports,
allow_existing=True,
- cleanup=True,
+ cleanup=CleanupMode.YES,
)
- descriptor.register(self.args)
-
self._set_cloud_config('vcenter_hostname', self.DOCKER_SIMULATOR_NAME)
def _setup_static(self): # type: () -> None
@@ -99,7 +97,7 @@ class VcenterEnvironment(CloudEnvironment):
try:
# We may be in a container, so we cannot just reach VMWARE_TEST_PLATFORM,
# We do a try/except instead
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path) # static
env_vars = dict()
diff --git a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
index 9abc65cb6a..2e8b1b3fac 100644
--- a/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
+++ b/test/lib/ansible_test/_internal/commands/integration/cloud/vultr.py
@@ -1,9 +1,9 @@
"""Vultr plugin for integration tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+
+import configparser
from ....util import (
- ConfigParser,
display,
)
@@ -21,13 +21,13 @@ from . import (
class VultrCloudProvider(CloudProvider):
"""Checks if a configuration file has been passed or fixtures are going to be used for testing"""
def __init__(self, args): # type: (IntegrationConfig) -> None
- super(VultrCloudProvider, self).__init__(args)
+ super().__init__(args)
self.uses_config = True
def setup(self): # type: () -> None
"""Setup the cloud resource before delegation and register a cleanup callback."""
- super(VultrCloudProvider, self).setup()
+ super().setup()
self._use_static_config()
@@ -36,7 +36,7 @@ class VultrCloudEnvironment(CloudEnvironment):
"""Updates integration test environment after delegation. Will setup the config file as parameter."""
def get_environment_config(self): # type: () -> CloudEnvironmentConfig
"""Return environment configuration for use in the test environment after delegation."""
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(self.config_path)
env_vars = dict(
diff --git a/test/lib/ansible_test/_internal/commands/integration/coverage.py b/test/lib/ansible_test/_internal/commands/integration/coverage.py
new file mode 100644
index 0000000000..c36b440366
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/coverage.py
@@ -0,0 +1,416 @@
+"""Code coverage support for integration tests."""
+from __future__ import annotations
+
+import abc
+import os
+import shutil
+import tempfile
+import typing as t
+import zipfile
+
+from ...io import (
+ write_text_file,
+)
+
+from ...ansible_util import (
+ run_playbook,
+)
+
+from ...config import (
+ IntegrationConfig,
+)
+
+from ...util import (
+ COVERAGE_CONFIG_NAME,
+ MODE_DIRECTORY,
+ MODE_DIRECTORY_WRITE,
+ MODE_FILE,
+ SubprocessError,
+ cache,
+ display,
+ generate_name,
+ get_generic_type,
+ get_type_map,
+ remove_tree,
+ sanitize_host_name,
+)
+
+from ...util_common import (
+ ResultType,
+)
+
+from ...coverage_util import (
+ generate_coverage_config,
+ get_coverage_platform,
+)
+
+from ...host_configs import (
+ HostConfig,
+ PosixConfig,
+ WindowsConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ...data import (
+ data_context,
+)
+
+from ...host_profiles import (
+ ControllerProfile,
+ HostProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
+
+from ...provisioning import (
+ HostState,
+)
+
+from ...connections import (
+ LocalConnection,
+)
+
+from ...inventory import (
+ create_windows_inventory,
+ create_posix_inventory,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+
+
+class CoverageHandler(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for configuring hosts for integration test code coverage."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ self.args = args
+ self.host_state = host_state
+ self.inventory_path = inventory_path
+ self.profiles = self.get_profiles()
+
+ def get_profiles(self): # type: () -> t.List[HostProfile]
+ """Return a list of profiles relevant for this handler."""
+ profile_type = get_generic_type(type(self), HostConfig)
+ profiles = [profile for profile in self.host_state.target_profiles if isinstance(profile.config, profile_type)]
+
+ return profiles
+
+ @property
+ @abc.abstractmethod
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+
+ @abc.abstractmethod
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+
+ @abc.abstractmethod
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+
+ @abc.abstractmethod
+ def create_inventory(self): # type: () -> None
+ """Create inventory, if needed."""
+
+ @abc.abstractmethod
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ def run_playbook(self, playbook, variables): # type: (str, t.Dict[str, str]) -> None
+ """Run the specified playbook using the current inventory."""
+ self.create_inventory()
+ run_playbook(self.args, self.inventory_path, playbook, variables)
+
+
+class PosixCoverageHandler(CoverageHandler[PosixConfig]):
+ """Configure integration test code coverage for POSIX hosts."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ super().__init__(args, host_state, inventory_path)
+
+ # Common temporary directory used on all POSIX hosts that will be created world writeable.
+ self.common_temp_path = f'/tmp/ansible-test-{generate_name()}'
+
+ def get_profiles(self): # type: () -> t.List[HostProfile]
+ """Return a list of profiles relevant for this handler."""
+ profiles = super().get_profiles()
+ profiles = [profile for profile in profiles if not isinstance(profile, ControllerProfile) or
+ profile.python.path != self.host_state.controller_profile.python.path]
+
+ return profiles
+
+ @property
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+ return True
+
+ @property
+ def target_profile(self): # type: () -> t.Optional[PosixProfile]
+ """The POSIX target profile, if it uses a different Python interpreter than the controller, otherwise None."""
+ return t.cast(PosixProfile, self.profiles[0]) if self.profiles else None
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ self.setup_controller()
+ self.setup_target()
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ self.teardown_controller()
+ self.teardown_target()
+
+ def setup_controller(self):
+ """Perform setup for code coverage on the controller."""
+ coverage_config_path = os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME)
+ coverage_output_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name)
+
+ coverage_config = generate_coverage_config(self.args)
+
+ write_text_file(coverage_config_path, coverage_config, create_directories=True)
+
+ os.chmod(coverage_config_path, MODE_FILE)
+ os.mkdir(coverage_output_path)
+ os.chmod(coverage_output_path, MODE_DIRECTORY_WRITE)
+
+ def setup_target(self):
+ """Perform setup for code coverage on the target."""
+ if not self.target_profile:
+ return
+
+ if isinstance(self.target_profile, ControllerProfile):
+ return
+
+ self.run_playbook('posix_coverage_setup.yml', self.get_playbook_variables())
+
+ def teardown_controller(self): # type: () -> None
+ """Perform teardown for code coverage on the controller."""
+ coverage_temp_path = os.path.join(self.common_temp_path, ResultType.COVERAGE.name)
+ platform = get_coverage_platform(self.args.controller)
+
+ for filename in os.listdir(coverage_temp_path):
+ shutil.copyfile(os.path.join(coverage_temp_path, filename), os.path.join(ResultType.COVERAGE.path, update_coverage_filename(filename, platform)))
+
+ remove_tree(self.common_temp_path)
+
+ def teardown_target(self): # type: () -> None
+ """Perform teardown for code coverage on the target."""
+ if not self.target_profile:
+ return
+
+ if isinstance(self.target_profile, ControllerProfile):
+ return
+
+ profile = t.cast(SshTargetHostProfile, self.target_profile)
+ platform = get_coverage_platform(profile.config)
+ con = profile.get_controller_target_connections()[0]
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-coverage-', suffix='.tgz') as coverage_tgz:
+ try:
+ con.create_archive(chdir=self.common_temp_path, name=ResultType.COVERAGE.name, dst=coverage_tgz)
+ except SubprocessError as ex:
+ display.warning(f'Failed to download coverage results: {ex}')
+ else:
+ coverage_tgz.seek(0)
+
+ with tempfile.TemporaryDirectory() as temp_dir:
+ local_con = LocalConnection(self.args)
+ local_con.extract_archive(chdir=temp_dir, src=coverage_tgz)
+
+ base_dir = os.path.join(temp_dir, ResultType.COVERAGE.name)
+
+ for filename in os.listdir(base_dir):
+ shutil.copyfile(os.path.join(base_dir, filename), os.path.join(ResultType.COVERAGE.path, update_coverage_filename(filename, platform)))
+
+ self.run_playbook('posix_coverage_teardown.yml', self.get_playbook_variables())
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ # Enable code coverage collection on Ansible modules (both local and remote).
+ # Used by the AnsiballZ wrapper generator in lib/ansible/executor/module_common.py to support code coverage.
+ config_file = os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME)
+
+ # Include the command, target and platform marker so the remote host can create a filename with that info.
+ # The generated AnsiballZ wrapper is responsible for adding '=python-{X.Y}=coverage.{hostname}.{pid}.{id}'
+ coverage_file = os.path.join(self.common_temp_path, ResultType.COVERAGE.name, '='.join((self.args.command, target_name, 'platform')))
+
+ if self.args.coverage_check:
+ # cause the 'coverage' module to be found, but not imported or enabled
+ coverage_file = ''
+
+ variables = dict(
+ _ANSIBLE_COVERAGE_CONFIG=config_file,
+ _ANSIBLE_COVERAGE_OUTPUT=coverage_file,
+ )
+
+ return variables
+
+ def create_inventory(self): # type: () -> None
+ """Create inventory."""
+ create_posix_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
+
+ def get_playbook_variables(self): # type: () -> t.Dict[str, str]
+ """Return a dictionary of variables for setup and teardown of POSIX coverage."""
+ return dict(
+ common_temp_dir=self.common_temp_path,
+ coverage_config=generate_coverage_config(self.args),
+ coverage_config_path=os.path.join(self.common_temp_path, COVERAGE_CONFIG_NAME),
+ coverage_output_path=os.path.join(self.common_temp_path, ResultType.COVERAGE.name),
+ mode_directory=f'{MODE_DIRECTORY:04o}',
+ mode_directory_write=f'{MODE_DIRECTORY_WRITE:04o}',
+ mode_file=f'{MODE_FILE:04o}',
+ )
+
+
+class WindowsCoverageHandler(CoverageHandler[WindowsConfig]):
+ """Configure integration test code coverage for Windows hosts."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ super().__init__(args, host_state, inventory_path)
+
+ # Common temporary directory used on all Windows hosts that will be created writable by everyone.
+ self.remote_temp_path = f'C:\\ansible_test_coverage_{generate_name()}'
+
+ @property
+ def is_active(self): # type: () -> bool
+ """True if the handler should be used, otherwise False."""
+ return self.profiles and not self.args.coverage_check
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ self.run_playbook('windows_coverage_setup.yml', self.get_playbook_variables())
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ with tempfile.TemporaryDirectory() as local_temp_path:
+ variables = self.get_playbook_variables()
+ variables.update(
+ local_temp_path=local_temp_path,
+ )
+
+ self.run_playbook('windows_coverage_teardown.yml', variables)
+
+ for filename in os.listdir(local_temp_path):
+ if all(isinstance(profile.config, WindowsRemoteConfig) for profile in self.profiles):
+ prefix = 'remote'
+ elif all(isinstance(profile.config, WindowsInventoryConfig) for profile in self.profiles):
+ prefix = 'inventory'
+ else:
+ raise NotImplementedError()
+
+ platform = f'{prefix}-{sanitize_host_name(os.path.splitext(filename)[0])}'
+
+ with zipfile.ZipFile(os.path.join(local_temp_path, filename)) as coverage_zip:
+ for item in coverage_zip.infolist():
+ if item.is_dir():
+ raise Exception(f'Unexpected directory in zip file: {item.filename}')
+
+ item.filename = update_coverage_filename(item.filename, platform)
+
+ coverage_zip.extract(item, ResultType.COVERAGE.path)
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+
+ # Include the command, target and platform marker so the remote host can create a filename with that info.
+ # The remote is responsible for adding '={language-version}=coverage.{hostname}.{pid}.{id}'
+ coverage_name = '='.join((self.args.command, target_name, 'platform'))
+
+ variables = dict(
+ _ANSIBLE_COVERAGE_REMOTE_OUTPUT=os.path.join(self.remote_temp_path, coverage_name),
+ _ANSIBLE_COVERAGE_REMOTE_PATH_FILTER=os.path.join(data_context().content.root, '*'),
+ )
+
+ return variables
+
+ def create_inventory(self): # type: () -> None
+ """Create inventory."""
+ create_windows_inventory(self.args, self.inventory_path, self.host_state.target_profiles)
+
+ def get_playbook_variables(self): # type: () -> t.Dict[str, str]
+ """Return a dictionary of variables for setup and teardown of Windows coverage."""
+ return dict(
+ remote_temp_path=self.remote_temp_path,
+ )
+
+
+class CoverageManager:
+ """Manager for code coverage configuration and state."""
+ def __init__(self, args, host_state, inventory_path): # type: (IntegrationConfig, HostState, str) -> None
+ self.args = args
+ self.host_state = host_state
+ self.inventory_path = inventory_path
+
+ if self.args.coverage:
+ handler_types = set(get_handler_type(type(profile.config)) for profile in host_state.profiles)
+ handler_types.discard(None)
+ else:
+ handler_types = set()
+
+ handlers = [handler_type(args=args, host_state=host_state, inventory_path=inventory_path) for handler_type in handler_types]
+
+ self.handlers = [handler for handler in handlers if handler.is_active]
+
+ def setup(self): # type: () -> None
+ """Perform setup for code coverage."""
+ if not self.args.coverage:
+ return
+
+ for handler in self.handlers:
+ handler.setup()
+
+ def teardown(self): # type: () -> None
+ """Perform teardown for code coverage."""
+ if not self.args.coverage:
+ return
+
+ for handler in self.handlers:
+ handler.teardown()
+
+ def get_environment(self, target_name, aliases): # type: (str, t.Tuple[str, ...]) -> t.Dict[str, str]
+ """Return a dictionary of environment variables for running tests with code coverage."""
+ if not self.args.coverage or 'non_local/' in aliases:
+ return {}
+
+ env = {}
+
+ for handler in self.handlers:
+ env.update(handler.get_environment(target_name, aliases))
+
+ return env
+
+
+@cache
+def get_config_handler_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[CoverageHandler]]
+ """Create and return a mapping of HostConfig types to CoverageHandler types."""
+ return get_type_map(CoverageHandler, HostConfig)
+
+
+def get_handler_type(config_type): # type: (t.Type[HostConfig]) -> t.Optional[t.Type[CoverageHandler]]
+ """Return the coverage handler type associated with the given host config type if found, otherwise return None."""
+ queue = [config_type]
+ type_map = get_config_handler_type_map()
+
+ while queue:
+ config_type = queue.pop(0)
+ handler_type = type_map.get(config_type)
+
+ if handler_type:
+ return handler_type
+
+ queue.extend(config_type.__bases__)
+
+ return None
+
+
+def update_coverage_filename(original_filename, platform): # type: (str, str) -> str
+ """Validate the given filename and insert the specified platform, then return the result."""
+ parts = original_filename.split('=')
+
+ if original_filename != os.path.basename(original_filename) or len(parts) != 5 or parts[2] != 'platform':
+ raise Exception(f'Unexpected coverage filename: {original_filename}')
+
+ parts[2] = platform
+
+ updated_filename = '='.join(parts)
+
+ display.info(f'Coverage file for platform "{platform}": {original_filename} -> {updated_filename}', verbosity=3)
+
+ return updated_filename
diff --git a/test/lib/ansible_test/_internal/commands/integration/filters.py b/test/lib/ansible_test/_internal/commands/integration/filters.py
new file mode 100644
index 0000000000..9854de57c6
--- /dev/null
+++ b/test/lib/ansible_test/_internal/commands/integration/filters.py
@@ -0,0 +1,273 @@
+"""Logic for filtering out integration test targets which are unsupported for the currently provided arguments and available hosts."""
+from __future__ import annotations
+
+import abc
+import typing as t
+
+from ...config import (
+ IntegrationConfig,
+)
+
+from ...util import (
+ cache,
+ display,
+ get_type_map,
+)
+
+from ...target import (
+ IntegrationTarget,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ FallbackReason,
+ HostConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ RemoteConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from ...host_profiles import (
+ HostProfile,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+TPosixConfig = t.TypeVar('TPosixConfig', bound=PosixConfig)
+TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
+
+
+class TargetFilter(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for target filters."""
+ def __init__(self, args, configs, controller): # type: (IntegrationConfig, t.List[THostConfig], bool) -> None
+ self.args = args
+ self.configs = configs
+ self.controller = controller
+ self.host_type = 'controller' if controller else 'target'
+
+ # values which are not host specific
+ self.include_targets = args.include
+ self.allow_root = args.allow_root
+ self.allow_destructive = args.allow_destructive
+
+ @property
+ def config(self): # type: () -> THostConfig
+ """The configuration to filter. Only valid when there is a single config."""
+ if len(self.configs) != 1:
+ raise Exception()
+
+ return self.configs[0]
+
+ def skip(
+ self,
+ skip, # type: str
+ reason, # type: str
+ targets, # type: t.List[IntegrationTarget]
+ exclude, # type: t.Set[str]
+ override=None, # type: t.Optional[t.List[str]]
+ ): # type: (...) -> None
+ """Apply the specified skip rule to the given targets by updating the provided exclude list."""
+ if skip.startswith('skip/'):
+ skipped = [target.name for target in targets if skip in target.skips and (not override or target.name not in override)]
+ else:
+ skipped = [target.name for target in targets if f'{skip}/' in target.aliases and (not override or target.name not in override)]
+
+ self.apply_skip(f'"{skip}"', reason, skipped, exclude)
+
+ def apply_skip(self, marked, reason, skipped, exclude): # type: (str, str, t.List[str], t.Set[str]) -> None
+ """Apply the provided skips to the given exclude list."""
+ if not skipped:
+ return
+
+ exclude.update(skipped)
+ display.warning(f'Excluding {self.host_type} tests marked {marked} {reason}: {", ".join(skipped)}')
+
+ def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Filter the list of profiles, returning only those which are not skipped for the given target."""
+ del target
+ return profiles
+
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ if self.controller and self.args.host_settings.controller_fallback and targets:
+ affected_targets = [target.name for target in targets]
+ reason = self.args.host_settings.controller_fallback.reason
+
+ if reason == FallbackReason.ENVIRONMENT:
+ exclude.update(affected_targets)
+ display.warning(f'Excluding {self.host_type} tests since a fallback controller is in use: {", ".join(affected_targets)}')
+ elif reason == FallbackReason.PYTHON:
+ display.warning(f'Some {self.host_type} tests may be redundant since a fallback python is in use: {", ".join(affected_targets)}')
+
+ if not self.allow_destructive and not self.config.is_managed:
+ override_destructive = set(target for target in self.include_targets if target.startswith('destructive/'))
+ override = [target.name for target in targets if override_destructive & set(target.skips)]
+
+ self.skip('destructive', 'which require --allow-destructive or prefixing with "destructive/" to run on unmanaged hosts', targets, exclude, override)
+
+ if not self.args.allow_disabled:
+ override_disabled = set(target for target in self.args.include if target.startswith('disabled/'))
+ override = [target.name for target in targets if override_disabled & set(target.skips)]
+
+ self.skip('disabled', 'which require --allow-disabled or prefixing with "disabled/"', targets, exclude, override)
+
+ if not self.args.allow_unsupported:
+ override_unsupported = set(target for target in self.args.include if target.startswith('unsupported/'))
+ override = [target.name for target in targets if override_unsupported & set(target.skips)]
+
+ self.skip('unsupported', 'which require --allow-unsupported or prefixing with "unsupported/"', targets, exclude, override)
+
+ if not self.args.allow_unstable:
+ override_unstable = set(target for target in self.args.include if target.startswith('unstable/'))
+
+ if self.args.allow_unstable_changed:
+ override_unstable |= set(self.args.metadata.change_description.focused_targets or [])
+
+ override = [target.name for target in targets if override_unstable & set(target.skips)]
+
+ self.skip('unstable', 'which require --allow-unstable or prefixing with "unstable/"', targets, exclude, override)
+
+
+class PosixTargetFilter(TargetFilter[TPosixConfig]):
+ """Target filter for POSIX hosts."""
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ if not self.allow_root and not self.config.have_root:
+ self.skip('needs/root', 'which require --allow-root or running as root', targets, exclude)
+
+ self.skip(f'skip/python{self.config.python.version}', f'which are not supported by Python {self.config.python.version}', targets, exclude)
+ self.skip(f'skip/python{self.config.python.major_version}', f'which are not supported by Python {self.config.python.major_version}', targets, exclude)
+
+
+class DockerTargetFilter(PosixTargetFilter[DockerConfig]):
+ """Target filter for docker hosts."""
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ self.skip('skip/docker', 'which cannot run under docker', targets, exclude)
+
+ if not self.config.privileged:
+ self.skip('needs/privileged', 'which require --docker-privileged to run under docker', targets, exclude)
+
+
+class PosixSshTargetFilter(PosixTargetFilter[PosixSshConfig]):
+ """Target filter for POSIX SSH hosts."""
+
+
+class RemoteTargetFilter(TargetFilter[TRemoteConfig]):
+ """Target filter for remote Ansible Core CI managed hosts."""
+ def filter_profiles(self, profiles, target): # type: (t.List[THostProfile], IntegrationTarget) -> t.List[THostProfile]
+ """Filter the list of profiles, returning only those which are not skipped for the given target."""
+ profiles = super().filter_profiles(profiles, target)
+
+ skipped_profiles = [profile for profile in profiles if any(skip in target.skips for skip in get_remote_skip_aliases(profile.config))]
+
+ if skipped_profiles:
+ configs = [profile.config for profile in skipped_profiles] # type: t.List[TRemoteConfig]
+ display.warning(f'Excluding skipped hosts from inventory: {", ".join(config.name for config in configs)}')
+
+ profiles = [profile for profile in profiles if profile not in skipped_profiles]
+
+ return profiles
+
+ def filter_targets(self, targets, exclude): # type: (t.List[IntegrationTarget], t.Set[str]) -> None
+ """Filter the list of targets, adding any which this host profile cannot support to the provided exclude list."""
+ super().filter_targets(targets, exclude)
+
+ if len(self.configs) > 1:
+ host_skips = {host.name: get_remote_skip_aliases(host) for host in self.configs}
+
+ # Skip only targets which skip all hosts.
+ # Targets that skip only some hosts will be handled during inventory generation.
+ skipped = [target.name for target in targets if all(any(skip in target.skips for skip in skips) for skips in host_skips.values())]
+
+ if skipped:
+ exclude.update(skipped)
+ display.warning(f'Excluding tests which do not support {", ".join(host_skips.keys())}: {", ".join(skipped)}')
+ else:
+ skips = get_remote_skip_aliases(self.config)
+
+ for skip, reason in skips.items():
+ self.skip(skip, reason, targets, exclude)
+
+
+class PosixRemoteTargetFilter(PosixTargetFilter[PosixRemoteConfig], RemoteTargetFilter[PosixRemoteConfig]):
+ """Target filter for POSIX remote hosts."""
+
+
+class WindowsRemoteTargetFilter(RemoteTargetFilter[WindowsRemoteConfig]):
+ """Target filter for remote Windows hosts."""
+
+
+class WindowsInventoryTargetFilter(TargetFilter[WindowsInventoryConfig]):
+ """Target filter for Windows inventory."""
+
+
+class NetworkRemoteTargetFilter(RemoteTargetFilter[NetworkRemoteConfig]):
+ """Target filter for remote network hosts."""
+
+
+class NetworkInventoryTargetFilter(TargetFilter[NetworkInventoryConfig]):
+ """Target filter for network inventory."""
+
+
+class OriginTargetFilter(TargetFilter[OriginConfig]):
+ """Target filter for localhost."""
+
+
+@cache
+def get_host_target_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[TargetFilter]]
+ """Create and return a mapping of HostConfig types to TargetFilter types."""
+ return get_type_map(TargetFilter, HostConfig)
+
+
+def get_target_filter(args, configs, controller): # type: (IntegrationConfig, t.List[HostConfig], bool) -> TargetFilter
+ """Return an integration test target filter instance for the provided host configurations."""
+ target_type = type(configs[0])
+
+ if issubclass(target_type, ControllerConfig):
+ target_type = type(args.controller)
+ configs = [args.controller]
+
+ filter_type = get_host_target_type_map()[target_type]
+ filter_instance = filter_type(args, configs, controller)
+
+ return filter_instance
+
+
+def get_remote_skip_aliases(config): # type: (RemoteConfig) -> t.Dict[str, str]
+ """Return a dictionary of skip aliases and the reason why they apply."""
+ if isinstance(config, PosixRemoteConfig):
+ return get_platform_skip_aliases(config.platform, config.version, config.arch)
+
+ return get_platform_skip_aliases(config.platform, config.version, None)
+
+
+def get_platform_skip_aliases(platform, version, arch): # type: (str, str, t.Optional[str]) -> t.Dict[str, str]
+ """Return a dictionary of skip aliases and the reason why they apply."""
+ skips = {
+ f'skip/{platform}': platform,
+ f'skip/{platform}/{version}': f'{platform} {version}',
+ f'skip/{platform}{version}': f'{platform} {version}', # legacy syntax, use above format
+ }
+
+ if arch:
+ skips.update({
+ f'skip/{arch}': arch,
+ f'skip/{arch}/{platform}': f'{platform} on {arch}',
+ f'skip/{arch}/{platform}/{version}': f'{platform} {version} on {arch}',
+ })
+
+ skips = {alias: f'which are not supported by {description}' for alias, description in skips.items()}
+
+ return skips
diff --git a/test/lib/ansible_test/_internal/commands/integration/network.py b/test/lib/ansible_test/_internal/commands/integration/network.py
index a723e6cbdc..f9953144a3 100644
--- a/test/lib/ansible_test/_internal/commands/integration/network.py
+++ b/test/lib/ansible_test/_internal/commands/integration/network.py
@@ -1,45 +1,18 @@
"""Network integration testing."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-import time
-import textwrap
-import functools
-
-from ... import types as t
-
-from ...thread import (
- WrappedThread,
-)
-
-from ...core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from ...manage_ci import (
- ManageNetworkCI,
- get_network_settings,
-)
-
-from ...io import (
- write_text_file,
-)
from ...util import (
ApplicationError,
- display,
ANSIBLE_TEST_CONFIG_ROOT,
)
from ...util_common import (
- get_python_path,
handle_layout_messages,
)
from ...target import (
- IntegrationTarget,
walk_network_integration_targets,
)
@@ -59,18 +32,22 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+)
+
-def command_network_integration(args):
- """
- :type args: NetworkIntegrationConfig
- """
+def command_network_integration(args): # type: (NetworkIntegrationConfig) -> None
+ """Entry point for the `network-integration` command."""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
- if args.inventory:
- inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
+ if issubclass(args.target_type, NetworkInventoryConfig):
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path,
+ args.only_target(NetworkInventoryConfig).path or os.path.basename(inventory_relative_path))
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
@@ -80,7 +57,7 @@ def command_network_integration(args):
else:
inventory_exists = os.path.isfile(inventory_path)
- if not args.explain and not args.platform and not inventory_exists:
+ if not args.explain and not issubclass(args.target_type, NetworkRemoteConfig) and not inventory_exists:
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
@@ -92,155 +69,5 @@ def command_network_integration(args):
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
- instances = [] # type: t.List[WrappedThread]
-
- if args.platform:
- get_python_path(args, args.python_executable) # initialize before starting threads
-
- configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
-
- for platform_version in args.platform:
- platform, version = platform_version.split('/', 1)
- config = configs.get(platform_version)
-
- if not config:
- continue
-
- instance = WrappedThread(functools.partial(network_run, args, platform, version, config))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- remotes = [instance.wait_for_result() for instance in instances]
- inventory = network_inventory(args, remotes)
-
- display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
-
- if not args.explain:
- write_text_file(inventory_path, inventory)
-
- success = False
-
- try:
- command_integration_filtered(args, internal_targets, all_targets, inventory_path)
- success = True
- finally:
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- for instance in instances:
- instance.result.stop()
-
-
-def network_init(args, internal_targets): # type: (NetworkIntegrationConfig, t.Tuple[IntegrationTarget, ...]) -> None
- """Initialize platforms for network integration tests."""
- if not args.platform:
- return
-
- if args.metadata.instance_config is not None:
- return
-
- platform_targets = set(a for target in internal_targets for a in target.aliases if a.startswith('network/'))
-
- instances = [] # type: t.List[WrappedThread]
-
- # generate an ssh key (if needed) up front once, instead of for each instance
- SshKey(args)
-
- for platform_version in args.platform:
- platform, version = platform_version.split('/', 1)
- platform_target = 'network/%s/' % platform
-
- if platform_target not in platform_targets:
- display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
- platform_version, platform))
- continue
-
- instance = WrappedThread(functools.partial(network_start, args, platform, version))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
-
-
-def network_start(args, platform, version):
- """
- :type args: NetworkIntegrationConfig
- :type platform: str
- :type version: str
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider)
- core_ci.start()
-
- return core_ci.save()
-
-
-def network_run(args, platform, version, config):
- """
- :type args: NetworkIntegrationConfig
- :type platform: str
- :type version: str
- :type config: dict[str, str]
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, provider=args.remote_provider, load=False)
- core_ci.load(config)
- core_ci.wait()
-
- manage = ManageNetworkCI(args, core_ci)
- manage.wait()
-
- return core_ci
-
-
-def network_inventory(args, remotes):
- """
- :type args: NetworkIntegrationConfig
- :type remotes: list[AnsibleCoreCI]
- :rtype: str
- """
- groups = dict([(remote.platform, []) for remote in remotes])
- net = []
-
- for remote in remotes:
- options = dict(
- ansible_host=remote.connection.hostname,
- ansible_user=remote.connection.username,
- ansible_ssh_private_key_file=os.path.abspath(remote.ssh_key.key),
- )
-
- settings = get_network_settings(args, remote.platform, remote.version)
-
- options.update(settings.inventory_vars)
-
- groups[remote.platform].append(
- '%s %s' % (
- remote.name.replace('.', '-'),
- ' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
- )
- )
-
- net.append(remote.platform)
-
- groups['net:children'] = net
-
- template = ''
-
- for group in groups:
- hosts = '\n'.join(groups[group])
-
- template += textwrap.dedent("""
- [%s]
- %s
- """) % (group, hosts)
-
- inventory = template
-
- return inventory
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path)
diff --git a/test/lib/ansible_test/_internal/commands/integration/posix.py b/test/lib/ansible_test/_internal/commands/integration/posix.py
index 2f839c8aad..be78359c4e 100644
--- a/test/lib/ansible_test/_internal/commands/integration/posix.py
+++ b/test/lib/ansible_test/_internal/commands/integration/posix.py
@@ -1,22 +1,16 @@
"""POSIX integration testing."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-from ... import types as t
-
-from ...util import (
- ANSIBLE_TEST_DATA_ROOT,
-)
-
from ...util_common import (
handle_layout_messages,
)
from ...containers import (
- SshConnectionDetail,
create_container_hooks,
+ local_ssh,
+ root_ssh,
)
from ...target import (
@@ -38,20 +32,17 @@ from ...data import (
)
-def command_posix_integration(args):
- """
- :type args: PosixIntegrationConfig
- """
+def command_posix_integration(args): # type: (PosixIntegrationConfig) -> None
+ """Entry point for the `integration` command."""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
- inventory_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, os.path.basename(inventory_relative_path))
+ inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
all_targets = tuple(walk_posix_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets)
-
- managed_connections = None # type: t.Optional[t.List[SshConnectionDetail]]
-
- pre_target, post_target = create_container_hooks(args, managed_connections)
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ control_connections = [local_ssh(args, host_state.controller_profile.python)]
+ managed_connections = [root_ssh(ssh) for ssh in host_state.get_controller_target_connections()]
+ pre_target, post_target = create_container_hooks(args, control_connections, managed_connections)
- command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target, post_target=post_target)
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path, pre_target=pre_target, post_target=post_target)
diff --git a/test/lib/ansible_test/_internal/commands/integration/windows.py b/test/lib/ansible_test/_internal/commands/integration/windows.py
index 5f67b5076f..f6b4494245 100644
--- a/test/lib/ansible_test/_internal/commands/integration/windows.py
+++ b/test/lib/ansible_test/_internal/commands/integration/windows.py
@@ -1,56 +1,24 @@
"""Windows integration testing."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-import time
-import textwrap
-import functools
-
-from ... import types as t
-
-from ...thread import (
- WrappedThread,
-)
-
-from ...core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from ...manage_ci import (
- ManageWindowsCI,
-)
-
-from ...io import (
- write_text_file,
-)
from ...util import (
ApplicationError,
- display,
ANSIBLE_TEST_CONFIG_ROOT,
- tempdir,
- open_zipfile,
)
from ...util_common import (
- get_python_path,
- ResultType,
handle_layout_messages,
)
from ...containers import (
- SshConnectionDetail,
create_container_hooks,
-)
-
-from ...ansible_util import (
- run_playbook,
+ local_ssh,
+ root_ssh,
)
from ...target import (
- IntegrationTarget,
walk_windows_integration_targets,
)
@@ -58,6 +26,11 @@ from ...config import (
WindowsIntegrationConfig,
)
+from ...host_configs import (
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
from . import (
command_integration_filter,
command_integration_filtered,
@@ -70,27 +43,21 @@ from ...data import (
data_context,
)
-from ...executor import (
- parse_inventory,
- get_hosts,
-)
-
-def command_windows_integration(args):
- """
- :type args: WindowsIntegrationConfig
- """
+def command_windows_integration(args): # type: (WindowsIntegrationConfig) -> None
+ """Entry point for the `windows-integration` command."""
handle_layout_messages(data_context().content.integration_messages)
inventory_relative_path = get_inventory_relative_path(args)
template_path = os.path.join(ANSIBLE_TEST_CONFIG_ROOT, os.path.basename(inventory_relative_path)) + '.template'
- if args.inventory:
- inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path, args.inventory)
+ if issubclass(args.target_type, WindowsInventoryConfig):
+ inventory_path = os.path.join(data_context().content.root, data_context().content.integration_path,
+ args.only_target(WindowsInventoryConfig).path or os.path.basename(inventory_relative_path))
else:
inventory_path = os.path.join(data_context().content.root, inventory_relative_path)
- if not args.explain and not args.windows and not os.path.isfile(inventory_path):
+ if not args.explain and not issubclass(args.target_type, WindowsRemoteConfig) and not os.path.isfile(inventory_path):
raise ApplicationError(
'Inventory not found: %s\n'
'Use --inventory to specify the inventory path.\n'
@@ -102,209 +69,9 @@ def command_windows_integration(args):
delegate_inventory(args, inventory_path)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
- internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
- instances = [] # type: t.List[WrappedThread]
- managed_connections = [] # type: t.List[SshConnectionDetail]
-
- if args.windows:
- get_python_path(args, args.python_executable) # initialize before starting threads
-
- configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
-
- for version in args.windows:
- config = configs['windows/%s' % version]
-
- instance = WrappedThread(functools.partial(windows_run, args, version, config))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- remotes = [instance.wait_for_result() for instance in instances]
- inventory = windows_inventory(remotes)
-
- display.info('>>> Inventory: %s\n%s' % (inventory_path, inventory.strip()), verbosity=3)
-
- if not args.explain:
- write_text_file(inventory_path, inventory)
-
- for core_ci in remotes:
- ssh_con = core_ci.connection
- ssh = SshConnectionDetail(core_ci.name, ssh_con.hostname, 22, ssh_con.username, core_ci.ssh_key.key, shell_type='powershell')
- managed_connections.append(ssh)
- elif args.explain:
- identity_file = SshKey(args).key
-
- # mock connection details to prevent tracebacks in explain mode
- managed_connections = [SshConnectionDetail(
- name='windows',
- host='windows',
- port=22,
- user='administrator',
- identity_file=identity_file,
- shell_type='powershell',
- )]
- else:
- inventory = parse_inventory(args, inventory_path)
- hosts = get_hosts(inventory, 'windows')
- identity_file = SshKey(args).key
-
- managed_connections = [SshConnectionDetail(
- name=name,
- host=config['ansible_host'],
- port=22,
- user=config['ansible_user'],
- identity_file=identity_file,
- shell_type='powershell',
- ) for name, config in hosts.items()]
-
- if managed_connections:
- display.info('Generated SSH connection details from inventory:\n%s' % (
- '\n'.join('%s %s@%s:%d' % (ssh.name, ssh.user, ssh.host, ssh.port) for ssh in managed_connections)), verbosity=1)
-
- pre_target, post_target = create_container_hooks(args, managed_connections)
-
- remote_temp_path = None
-
- if args.coverage and not args.coverage_check:
- # Create the remote directory that is writable by everyone. Use Ansible to talk to the remote host.
- remote_temp_path = 'C:\\ansible_test_coverage_%s' % time.time()
- playbook_vars = {'remote_temp_path': remote_temp_path}
- run_playbook(args, inventory_path, 'windows_coverage_setup.yml', playbook_vars)
-
- success = False
-
- try:
- command_integration_filtered(args, internal_targets, all_targets, inventory_path, pre_target=pre_target,
- post_target=post_target, remote_temp_path=remote_temp_path)
- success = True
- finally:
- if remote_temp_path:
- # Zip up the coverage files that were generated and fetch it back to localhost.
- with tempdir() as local_temp_path:
- playbook_vars = {'remote_temp_path': remote_temp_path, 'local_temp_path': local_temp_path}
- run_playbook(args, inventory_path, 'windows_coverage_teardown.yml', playbook_vars)
-
- for filename in os.listdir(local_temp_path):
- with open_zipfile(os.path.join(local_temp_path, filename)) as coverage_zip:
- coverage_zip.extractall(ResultType.COVERAGE.path)
-
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- for instance in instances:
- instance.result.stop()
-
-
-# noinspection PyUnusedLocal
-def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: WindowsIntegrationConfig
- :type internal_targets: tuple[IntegrationTarget]
- """
- # generate an ssh key (if needed) up front once, instead of for each instance
- SshKey(args)
-
- if not args.windows:
- return
-
- if args.metadata.instance_config is not None:
- return
-
- instances = [] # type: t.List[WrappedThread]
-
- for version in args.windows:
- instance = WrappedThread(functools.partial(windows_start, args, version))
- instance.daemon = True
- instance.start()
- instances.append(instance)
-
- while any(instance.is_alive() for instance in instances):
- time.sleep(1)
-
- args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
-
-
-def windows_start(args, version):
- """
- :type args: WindowsIntegrationConfig
- :type version: str
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider)
- core_ci.start()
-
- return core_ci.save()
-
-
-def windows_run(args, version, config):
- """
- :type args: WindowsIntegrationConfig
- :type version: str
- :type config: dict[str, str]
- :rtype: AnsibleCoreCI
- """
- core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, provider=args.remote_provider, load=False)
- core_ci.load(config)
- core_ci.wait()
-
- manage = ManageWindowsCI(core_ci)
- manage.wait()
-
- return core_ci
-
-
-def windows_inventory(remotes):
- """
- :type remotes: list[AnsibleCoreCI]
- :rtype: str
- """
- hosts = []
-
- for remote in remotes:
- options = dict(
- ansible_host=remote.connection.hostname,
- ansible_user=remote.connection.username,
- ansible_password=remote.connection.password,
- ansible_port=remote.connection.port,
- )
-
- # used for the connection_windows_ssh test target
- if remote.ssh_key:
- options["ansible_ssh_private_key_file"] = os.path.abspath(remote.ssh_key.key)
-
- if remote.name == 'windows-2016':
- options.update(
- # force 2016 to use NTLM + HTTP message encryption
- ansible_connection='winrm',
- ansible_winrm_server_cert_validation='ignore',
- ansible_winrm_transport='ntlm',
- ansible_winrm_scheme='http',
- ansible_port='5985',
- )
- else:
- options.update(
- ansible_connection='winrm',
- ansible_winrm_server_cert_validation='ignore',
- )
-
- hosts.append(
- '%s %s' % (
- remote.name.replace('/', '_'),
- ' '.join('%s="%s"' % (k, options[k]) for k in sorted(options)),
- )
- )
-
- template = """
- [windows]
- %s
-
- # support winrm binary module tests (temporary solution)
- [testhost:children]
- windows
- """
-
- template = textwrap.dedent(template)
- inventory = template % ('\n'.join(hosts))
+ host_state, internal_targets = command_integration_filter(args, all_targets)
+ control_connections = [local_ssh(args, host_state.controller_profile.python)]
+ managed_connections = [root_ssh(ssh) for ssh in host_state.get_controller_target_connections()]
+ pre_target, post_target = create_container_hooks(args, control_connections, managed_connections)
- return inventory
+ command_integration_filtered(args, host_state, internal_targets, all_targets, inventory_path, pre_target=pre_target, post_target=post_target)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
index 45170ff113..1dcb0e8f58 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py
@@ -1,17 +1,30 @@
"""Execute Ansible sanity tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import glob
+import hashlib
+import json
import os
+import pathlib
import re
import collections
+import typing as t
-from ... import types as t
+from ...constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_ONLY_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from ...encoding import (
+ to_bytes,
+)
from ...io import (
read_json_file,
+ write_json_file,
+ write_text_file,
)
from ...util import (
@@ -21,26 +34,23 @@ from ...util import (
import_plugins,
load_plugins,
parse_to_list_of_dict,
- ABC,
ANSIBLE_TEST_CONTROLLER_ROOT,
ANSIBLE_TEST_TARGET_ROOT,
is_binary_file,
read_lines_without_comments,
- get_available_python_versions,
- find_python,
is_subdir,
paths_to_dirs,
get_ansible_version,
str_to_version,
- SUPPORTED_PYTHON_VERSIONS,
- CONTROLLER_PYTHON_VERSIONS,
- REMOTE_ONLY_PYTHON_VERSIONS,
+ cache,
+ remove_tree,
)
from ...util_common import (
- run_command,
- intercept_command,
+ intercept_python,
handle_layout_messages,
+ yamlcheck,
+ create_result_directories,
)
from ...ansible_util import (
@@ -57,7 +67,12 @@ from ...executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
- install_command_requirements,
+)
+
+from ...python_requirements import (
+ PipInstall,
+ collect_requirements,
+ run_pip,
)
from ...config import (
@@ -80,6 +95,28 @@ from ...content_config import (
get_content_config,
)
+from ...host_configs import (
+ PosixConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+from ...host_profiles import (
+ PosixProfile,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...venv import (
+ create_virtual_environment,
+)
+
COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity')
TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity')
@@ -89,6 +126,11 @@ def command_sanity(args):
"""
:type args: SanityConfig
"""
+ create_result_directories(args)
+
+ target_configs = t.cast(t.List[PosixConfig], args.targets)
+ target_versions = {target.python.version: target for target in target_configs} # type: t.Dict[str, PosixConfig]
+
handle_layout_messages(data_context().content.sanity_messages)
changes = get_changes_filter(args)
@@ -98,23 +140,31 @@ def command_sanity(args):
if not targets.include:
raise AllTargetsSkipped()
- if args.delegate:
- raise Delegate(require=changes, exclude=args.exclude)
-
tests = sanity_get_tests()
if args.test:
+ disabled = []
tests = [target for target in tests if target.name in args.test]
else:
disabled = [target.name for target in tests if not target.enabled and not args.allow_disabled]
tests = [target for target in tests if target.enabled or args.allow_disabled]
- if disabled:
- display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
-
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
+ targets_use_pypi = any(isinstance(test, SanityMultipleVersion) and test.needs_pypi for test in tests) and not args.list_tests
+ host_state = prepare_profiles(args, targets_use_pypi=targets_use_pypi) # sanity
+
+ if args.delegate:
+ raise Delegate(host_state=host_state, require=changes, exclude=args.exclude)
+
+ configure_pypi_proxy(args, host_state.controller_profile) # sanity
+
+ if disabled:
+ display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
+
+ target_profiles = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)} # type: t.Dict[str, PosixProfile]
+
total = 0
failed = []
@@ -123,46 +173,20 @@ def command_sanity(args):
display.info(test.name)
continue
- available_versions = sorted(get_available_python_versions().keys())
-
- if args.python:
- # specific version selected
- versions = (args.python,)
- elif isinstance(test, SanityMultipleVersion):
- # try all supported versions for multi-version tests when a specific version has not been selected
- versions = test.supported_python_versions
- elif not test.supported_python_versions or args.python_version in test.supported_python_versions:
- # the test works with any version or the version we're already running
- versions = (args.python_version,)
- else:
- # available versions supported by the test
- versions = tuple(sorted(set(available_versions) & set(test.supported_python_versions)))
- # use the lowest available version supported by the test or the current version as a fallback (which will be skipped)
- versions = versions[:1] or (args.python_version,)
+ for version in SUPPORTED_PYTHON_VERSIONS:
+ options = ''
- for version in versions:
if isinstance(test, SanityMultipleVersion):
- skip_version = version
+ if version not in target_versions and version not in args.host_settings.skipped_python_versions:
+ continue # version was not requested, skip it silently
else:
- skip_version = None
-
- options = ''
+ if version != args.controller_python.version:
+ continue # only multi-version sanity tests use target versions, the rest use the controller version
if test.supported_python_versions and version not in test.supported_python_versions:
- # There are two ways this situation can occur:
- #
- # - A specific Python version was requested with the `--python` option and that version is not supported by the test.
- # This means that the test supports only a subset of the controller supported Python versions, and not the one given by the `--python` option.
- # Or that a remote-only Python version was specified for a Python based sanity test that is not multi-version.
- #
- # - No specific Python version was requested and no supported version was found on the system.
- # This means that the test supports only a subset of the controller supported Python versions, and not the one used to run ansible-test.
- # Or that the Python version used to run ansible-test is not supported by the controller, a condition which will soon not be possible.
- #
- # Neither of these are affected by the Python versions supported by a collection.
- result = SanitySkipped(test.name, skip_version)
- result.reason = "Skipping sanity test '%s' on Python %s. Supported Python versions: %s" % (
- test.name, version, ', '.join(test.supported_python_versions))
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} because it is unsupported.' \
+ f' Supported Python versions: {", ".join(test.supported_python_versions)}'
else:
if isinstance(test, SanityCodeSmellTest):
settings = test.load_processor(args)
@@ -194,37 +218,53 @@ def command_sanity(args):
test_needed = bool(usable_targets or test.no_targets)
result = None
- if test_needed and not args.python and version not in available_versions:
+ if test_needed and version in args.host_settings.skipped_python_versions:
# Deferred checking of Python availability. Done here since it is now known to be required for running the test.
# Earlier checking could cause a spurious warning to be generated for a collection which does not support the Python version.
- # If the `--python` option was used, this warning will be skipped and an error will be reported when running the test instead.
- result = SanitySkipped(test.name, skip_version)
- result.reason = "Skipping sanity test '%s' on Python %s due to missing interpreter." % (test.name, version)
+ # If the user specified a Python version, an error will be generated before reaching this point when the Python interpreter is not found.
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} because it could not be found.'
if not result:
- if test.supported_python_versions:
- display.info("Running sanity test '%s' with Python %s" % (test.name, version))
+ if isinstance(test, SanityMultipleVersion):
+ display.info(f'Running sanity test "{test.name}" on Python {version}')
else:
- display.info("Running sanity test '%s'" % test.name)
+ display.info(f'Running sanity test "{test.name}"')
if test_needed and not result:
- install_command_requirements(args, version, context=test.name, enable_pyyaml_check=True)
-
- if isinstance(test, SanityCodeSmellTest):
- result = test.test(args, sanity_targets, version)
- elif isinstance(test, SanityMultipleVersion):
- result = test.test(args, sanity_targets, version)
+ if isinstance(test, SanityMultipleVersion):
+ # multi-version sanity tests handle their own requirements (if any) and use the target python
+ test_profile = target_profiles[version]
+ result = test.test(args, sanity_targets, test_profile.python)
options = ' --python %s' % version
elif isinstance(test, SanitySingleVersion):
- result = test.test(args, sanity_targets, version)
+ # single version sanity tests use the controller python
+ test_profile = host_state.controller_profile
+ virtualenv_python = create_sanity_virtualenv(args, test_profile.python, test.name, context=test.name)
+
+ if virtualenv_python:
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
+
+ if test.require_libyaml and not virtualenv_yaml:
+ result = SanitySkipped(test.name)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} due to missing libyaml support in PyYAML.'
+ else:
+ if virtualenv_yaml is False:
+ display.warning(f'Sanity test "{test.name}" on Python {version} may be slow due to missing libyaml support in PyYAML.')
+
+ result = test.test(args, sanity_targets, virtualenv_python)
+ else:
+ result = SanitySkipped(test.name, version)
+ result.reason = f'Skipping sanity test "{test.name}" on Python {version} due to missing virtual environment support.'
elif isinstance(test, SanityVersionNeutral):
+ # version neutral sanity tests handle their own requirements (if any)
result = test.test(args, sanity_targets)
else:
raise Exception('Unsupported test type: %s' % type(test))
elif result:
pass
else:
- result = SanitySkipped(test.name, skip_version)
+ result = SanitySkipped(test.name, version)
result.write(args)
@@ -243,7 +283,8 @@ def command_sanity(args):
raise ApplicationError(message)
-def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
+@cache
+def collect_code_smell_tests(): # type: () -> t.Tuple[SanityTest, ...]
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
@@ -253,19 +294,11 @@ def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
skip_tests = read_lines_without_comments(os.path.join(ansible_code_smell_root, 'skip.txt'), remove_blank_lines=True, optional=True)
paths.extend(path for path in glob.glob(os.path.join(ansible_code_smell_root, '*.py')) if os.path.basename(path) not in skip_tests)
- paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p))
tests = tuple(SanityCodeSmellTest(p) for p in paths)
return tests
-def sanity_get_tests():
- """
- :rtype: tuple[SanityFunc]
- """
- return SANITY_TESTS
-
-
class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
NO_CODE = '_'
@@ -370,7 +403,7 @@ class SanityIgnoreParser:
unversioned_name, test_name)))
elif test_name in versioned_test_names:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires a Python version like '%s-%s'" % (
- test_name, test_name, args.python_version)))
+ test_name, test_name, args.controller_python.version)))
else:
self.parse_errors.append((line_no, len(path) + 2, "Sanity test '%s' does not exist" % test_name))
@@ -563,7 +596,7 @@ class SanitySuccess(TestSuccess):
:type test: str
:type python_version: str
"""
- super(SanitySuccess, self).__init__(COMMAND, test, python_version)
+ super().__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
@@ -573,7 +606,7 @@ class SanitySkipped(TestSkipped):
:type test: str
:type python_version: str
"""
- super(SanitySkipped, self).__init__(COMMAND, test, python_version)
+ super().__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
@@ -585,7 +618,7 @@ class SanityFailure(TestFailure):
:type messages: list[SanityMessage]
:type summary: unicode
"""
- super(SanityFailure, self).__init__(COMMAND, test, python_version, messages, summary)
+ super().__init__(COMMAND, test, python_version, messages, summary)
class SanityMessage(TestMessage):
@@ -639,13 +672,16 @@ class SanityTargets:
return SanityTargets.get_targets.targets
-class SanityTest(ABC):
+class SanityTest(metaclass=abc.ABCMeta):
"""Sanity test base class."""
- __metaclass__ = abc.ABCMeta
-
ansible_only = False
- def __init__(self, name):
+ def __init__(self, name=None): # type: (t.Optional[str]) -> None
+ if not name:
+ name = self.__class__.__name__
+ name = re.sub(r'Test$', '', name) # drop Test suffix
+ name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
+
self.name = name
self.enabled = True
@@ -728,7 +764,8 @@ class SanityTest(ABC):
return targets
- def filter_remote_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
+ @staticmethod
+ def filter_remote_targets(targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return a filtered list of the given targets, including only those that require support for remote-only Python versions."""
targets = [target for target in targets if (
is_subdir(target.path, data_context().content.module_path) or
@@ -739,8 +776,8 @@ class SanityTest(ABC):
re.search('^%s/.*/library/' % re.escape(data_context().content.integration_targets_path), target.path) or
# special handling for content in ansible-core
(data_context().content.is_ansible and (
- # temporary solution until ansible-test code is reorganized when the split controller/remote implementation is complete
- is_subdir(target.path, 'test/lib/ansible_test/') or
+ # utility code that runs in target environments and requires support for remote-only Python versions
+ is_subdir(target.path, 'test/lib/ansible_test/_util/target/') or
# integration test support modules/module_utils continue to require support for remote-only Python versions
re.search('^test/support/integration/.*/(modules|module_utils)/', target.path)
))
@@ -749,13 +786,34 @@ class SanityTest(ABC):
return targets
-class SanityCodeSmellTest(SanityTest):
+class SanitySingleVersion(SanityTest, metaclass=abc.ABCMeta):
+ """Base class for sanity test plugins which should run on a single python version."""
+ @property
+ def require_libyaml(self): # type: () -> bool
+ """True if the test requires PyYAML to have libyaml support."""
+ return False
+
+ @abc.abstractmethod
+ def test(self, args, targets, python):
+ """
+ :type args: SanityConfig
+ :type targets: SanityTargets
+ :type python: PythonConfig
+ :rtype: TestResult
+ """
+
+ def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
+ """Load the ignore processor for this sanity test."""
+ return SanityIgnoreProcessor(args, self, None)
+
+
+class SanityCodeSmellTest(SanitySingleVersion):
"""Sanity test script."""
def __init__(self, path):
name = os.path.splitext(os.path.basename(path))[0]
config_path = os.path.splitext(path)[0] + '.json'
- super(SanityCodeSmellTest, self).__init__(name)
+ super().__init__(name=name)
self.path = path
self.config_path = config_path if os.path.exists(config_path) else None
@@ -773,7 +831,6 @@ class SanityCodeSmellTest(SanityTest):
self.files = self.config.get('files') # type: t.List[str]
self.text = self.config.get('text') # type: t.Optional[bool]
self.ignore_self = self.config.get('ignore_self') # type: bool
- self.intercept = self.config.get('intercept') # type: bool
self.minimum_python_version = self.config.get('minimum_python_version') # type: t.Optional[str]
self.__all_targets = self.config.get('all_targets') # type: bool
@@ -788,7 +845,6 @@ class SanityCodeSmellTest(SanityTest):
self.files = []
self.text = None # type: t.Optional[bool]
self.ignore_self = False
- self.intercept = False
self.minimum_python_version = None # type: t.Optional[str]
self.__all_targets = False
@@ -842,7 +898,7 @@ class SanityCodeSmellTest(SanityTest):
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
- versions = super(SanityCodeSmellTest, self).supported_python_versions
+ versions = super().supported_python_versions
if self.minimum_python_version:
versions = tuple(version for version in versions if str_to_version(version) >= str_to_version(self.minimum_python_version))
@@ -876,14 +932,14 @@ class SanityCodeSmellTest(SanityTest):
return targets
- def test(self, args, targets, python_version):
+ def test(self, args, targets, python):
"""
:type args: SanityConfig
:type targets: SanityTargets
- :type python_version: str
+ :type python: PythonConfig
:rtype: TestResult
"""
- cmd = [find_python(python_version), self.path]
+ cmd = [python.path, self.path]
env = ansible_environment(args, color=False)
@@ -909,11 +965,7 @@ class SanityCodeSmellTest(SanityTest):
display.info(data, verbosity=4)
try:
- if self.intercept:
- stdout, stderr = intercept_command(args, cmd, target_name='sanity.%s' % self.name, data=data, env=env, capture=True, disable_coverage=True)
- else:
- stdout, stderr = run_command(args, cmd, data=data, env=env, capture=True)
-
+ stdout, stderr = intercept_python(args, python, cmd, data=data, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
@@ -957,17 +1009,7 @@ class SanityCodeSmellTest(SanityTest):
return SanityIgnoreProcessor(args, self, None)
-class SanityFunc(SanityTest):
- """Base class for sanity test plugins."""
- def __init__(self):
- name = self.__class__.__name__
- name = re.sub(r'Test$', '', name) # drop Test suffix
- name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
-
- super(SanityFunc, self).__init__(name)
-
-
-class SanityVersionNeutral(SanityFunc):
+class SanityVersionNeutral(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which are idependent of the python version being used."""
@abc.abstractmethod
def test(self, args, targets):
@@ -987,30 +1029,14 @@ class SanityVersionNeutral(SanityFunc):
return None
-class SanitySingleVersion(SanityFunc):
- """Base class for sanity test plugins which should run on a single python version."""
- @abc.abstractmethod
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
-
- def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
- """Load the ignore processor for this sanity test."""
- return SanityIgnoreProcessor(args, self, None)
-
-
-class SanityMultipleVersion(SanityFunc):
+class SanityMultipleVersion(SanityTest, metaclass=abc.ABCMeta):
"""Base class for sanity test plugins which should run on multiple python versions."""
@abc.abstractmethod
- def test(self, args, targets, python_version):
+ def test(self, args, targets, python):
"""
:type args: SanityConfig
:type targets: SanityTargets
- :type python_version: str
+ :type python: PythonConfig
:rtype: TestResult
"""
@@ -1019,6 +1045,11 @@ class SanityMultipleVersion(SanityFunc):
return SanityIgnoreProcessor(args, self, python_version)
@property
+ def needs_pypi(self): # type: () -> bool
+ """True if the test requires PyPI, otherwise False."""
+ return False
+
+ @property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
@@ -1028,7 +1059,7 @@ class SanityMultipleVersion(SanityFunc):
if not python_version:
raise Exception('python_version is required to filter multi-version tests')
- targets = super(SanityMultipleVersion, self).filter_targets_by_version(targets, python_version)
+ targets = super().filter_targets_by_version(targets, python_version)
if python_version in REMOTE_ONLY_PYTHON_VERSIONS:
content_config = get_content_config()
@@ -1043,15 +1074,91 @@ class SanityMultipleVersion(SanityFunc):
return targets
-SANITY_TESTS = (
-)
-
-
-def sanity_init():
- """Initialize full sanity test list (includes code-smell scripts determined at runtime)."""
+@cache
+def sanity_get_tests(): # type: () -> t.Tuple[SanityTest, ...]
+ """Return a tuple of the available sanity tests."""
import_plugins('commands/sanity')
- sanity_plugins = {} # type: t.Dict[str, t.Type[SanityFunc]]
- load_plugins(SanityFunc, sanity_plugins)
+ sanity_plugins = {} # type: t.Dict[str, t.Type[SanityTest]]
+ load_plugins(SanityTest, sanity_plugins)
+ sanity_plugins.pop('sanity') # SanityCodeSmellTest
sanity_tests = tuple(plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only)
- global SANITY_TESTS # pylint: disable=locally-disabled, global-statement
- SANITY_TESTS = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
+ sanity_tests = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
+ return sanity_tests
+
+
+def create_sanity_virtualenv(
+ args, # type: SanityConfig
+ python, # type: PythonConfig
+ name, # type: str
+ ansible=False, # type: bool
+ coverage=False, # type: bool
+ minimize=False, # type: bool
+ context=None, # type: t.Optional[str]
+): # type: (...) -> t.Optional[VirtualPythonConfig]
+ """Return an existing sanity virtual environment matching the requested parameters or create a new one."""
+ commands = collect_requirements( # create_sanity_virtualenv()
+ python=python,
+ controller=True,
+ virtualenv=False,
+ command=None,
+ # used by import tests
+ ansible=ansible,
+ cryptography=ansible,
+ coverage=coverage,
+ minimize=minimize,
+ # used by non-import tests
+ sanity=context,
+ )
+
+ if commands:
+ label = f'sanity.{name}'
+ else:
+ label = 'sanity' # use a single virtualenv name for tests which have no requirements
+
+ # The path to the virtual environment must be kept short to avoid the 127 character shebang length limit on Linux.
+ # If the limit is exceeded, generated entry point scripts from pip installed packages will fail with syntax errors.
+ virtualenv_install = json.dumps([command.serialize() for command in commands], indent=4)
+ virtualenv_hash = hashlib.sha256(to_bytes(virtualenv_install)).hexdigest()[:8]
+ virtualenv_cache = os.path.join(os.path.expanduser('~/.ansible/test/venv'))
+ virtualenv_path = os.path.join(virtualenv_cache, label, f'{python.version}', virtualenv_hash)
+ virtualenv_marker = os.path.join(virtualenv_path, 'marker.txt')
+
+ meta_install = os.path.join(virtualenv_path, 'meta.install.json')
+ meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json')
+
+ virtualenv_python = VirtualPythonConfig(
+ version=python.version,
+ path=os.path.join(virtualenv_path, 'bin', 'python'),
+ )
+
+ if not os.path.exists(virtualenv_marker):
+ # a virtualenv without a marker is assumed to have been partially created
+ remove_tree(virtualenv_path)
+
+ if not create_virtual_environment(args, python, virtualenv_path):
+ return None
+
+ run_pip(args, virtualenv_python, commands, None) # create_sanity_virtualenv()
+
+ write_text_file(meta_install, virtualenv_install)
+
+ if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands):
+ virtualenv_yaml = yamlcheck(virtualenv_python)
+ else:
+ virtualenv_yaml = None
+
+ write_json_file(meta_yaml, virtualenv_yaml)
+
+ # touch the marker to keep track of when the virtualenv was last used
+ pathlib.Path(virtualenv_marker).touch()
+
+ return virtualenv_python
+
+
+def check_sanity_virtualenv_yaml(python): # type: (VirtualPythonConfig) -> t.Optional[bool]
+ """Return True if PyYAML has libyaml support for the given sanity virtual environment, False if it does not and None if it was not found."""
+ virtualenv_path = os.path.dirname(os.path.dirname(python.path))
+ meta_yaml = os.path.join(virtualenv_path, 'meta.yaml.json')
+ virtualenv_yaml = read_json_file(meta_yaml)
+
+ return virtualenv_yaml
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
index 491dc01473..82d9f75133 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ansible_doc.py
@@ -1,17 +1,20 @@
"""Sanity test for ansible-doc."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
import os
import re
-
-from ... import types as t
+import typing as t
from . import (
SanitySingleVersion,
SanityFailure,
SanitySuccess,
+ SanityTargets,
+)
+
+from ...test import (
+ TestResult,
)
from ...target import (
@@ -24,12 +27,9 @@ from ...util import (
is_subdir,
)
-from ...util_common import (
- intercept_command,
-)
-
from ...ansible_util import (
ansible_environment,
+ intercept_python,
)
from ...config import (
@@ -40,8 +40,8 @@ from ...data import (
data_context,
)
-from ...coverage_util import (
- coverage_context,
+from ...host_configs import (
+ PythonConfig,
)
@@ -50,7 +50,7 @@ class AnsibleDocTest(SanitySingleVersion):
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
# This should use documentable plugins from constants instead
- unsupported_plugin_types = set([
+ unsupported_plugin_types = {
# not supported by ansible-doc
'action',
'doc_fragments',
@@ -62,7 +62,7 @@ class AnsibleDocTest(SanitySingleVersion):
# (https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst#modules--plugins)
'plugin_utils',
'sub_plugins',
- ])
+ }
plugin_paths = [plugin_path for plugin_type, plugin_path in data_context().content.plugin_paths.items() if plugin_type not in unsupported_plugin_types]
@@ -72,13 +72,7 @@ class AnsibleDocTest(SanitySingleVersion):
and any(is_subdir(target.path, path) for path in plugin_paths)
]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
@@ -113,9 +107,7 @@ class AnsibleDocTest(SanitySingleVersion):
cmd.extend(sorted(doc_targets[doc_type]))
try:
- with coverage_context(args):
- stdout, stderr = intercept_command(args, cmd, target_name='ansible-doc', env=env, capture=True, python_version=python_version)
-
+ stdout, stderr = intercept_python(args, python, cmd, env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
diff --git a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
index 934da9f60c..067cc0de92 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py
@@ -1,16 +1,19 @@
"""Sanity test for symlinks in the bin directory."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
+)
+
+from ...test import (
+ TestResult,
)
from ...config import (
@@ -46,13 +49,7 @@ class BinSymlinksTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
bin_root = ANSIBLE_BIN_PATH
bin_names = os.listdir(bin_root)
bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/compile.py b/test/lib/ansible_test/_internal/commands/sanity/compile.py
index 9f92401efc..02e962dd77 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/compile.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/compile.py
@@ -1,10 +1,8 @@
"""Sanity test for proper python syntax."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
SanityMultipleVersion,
@@ -15,6 +13,10 @@ from . import (
TARGET_SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -22,7 +24,6 @@ from ...target import (
from ...util import (
SubprocessError,
display,
- find_python,
parse_to_list_of_dict,
is_subdir,
)
@@ -35,6 +36,10 @@ from ...config import (
SanityConfig,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class CompileTest(SanityMultipleVersion):
"""Sanity test for proper python syntax."""
@@ -42,18 +47,12 @@ class CompileTest(SanityMultipleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- settings = self.load_processor(args, python_version)
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ settings = self.load_processor(args, python.version)
paths = [target.path for target in targets.include]
- cmd = [find_python(python_version), os.path.join(TARGET_SANITY_ROOT, 'compile', 'compile.py')]
+ cmd = [python.path, os.path.join(TARGET_SANITY_ROOT, 'compile', 'compile.py')]
data = '\n'.join(paths)
@@ -71,7 +70,7 @@ class CompileTest(SanityMultipleVersion):
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
- return SanitySuccess(self.name, python_version=python_version)
+ return SanitySuccess(self.name, python_version=python.version)
pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
@@ -87,6 +86,6 @@ class CompileTest(SanityMultipleVersion):
results = settings.process_errors(results, paths)
if results:
- return SanityFailure(self.name, messages=results, python_version=python_version)
+ return SanityFailure(self.name, messages=results, python_version=python.version)
- return SanitySuccess(self.name, python_version=python_version)
+ return SanitySuccess(self.name, python_version=python.version)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/ignores.py b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
index 97447719b2..9a39955ac5 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/ignores.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/ignores.py
@@ -1,6 +1,5 @@
"""Sanity test for the sanity ignore file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -10,11 +9,13 @@ from . import (
SanityVersionNeutral,
SanitySuccess,
SanityMessage,
+ SanityTargets,
)
from ...test import (
calculate_confidence,
calculate_best_confidence,
+ TestResult,
)
from ...config import (
@@ -34,13 +35,7 @@ class IgnoresTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
sanity_ignore = SanityIgnoreParser.load(args)
messages = []
diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py
index 9dfd4f3407..c4e7f78c72 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/import.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/import.py
@@ -1,10 +1,9 @@
"""Sanity test for proper import exception handling."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import tempfile
+import typing as t
from . import (
SanityMultipleVersion,
@@ -13,6 +12,17 @@ from . import (
SanitySuccess,
SanitySkipped,
TARGET_SANITY_ROOT,
+ SanityTargets,
+ create_sanity_virtualenv,
+ check_sanity_virtualenv_yaml,
+)
+
+from ...constants import (
+ REMOTE_ONLY_PYTHON_VERSIONS,
+)
+
+from ...test import (
+ TestResult,
)
from ...target import (
@@ -20,21 +30,13 @@ from ...target import (
)
from ...util import (
- ANSIBLE_TEST_DATA_ROOT,
SubprocessError,
- remove_tree,
display,
parse_to_list_of_dict,
is_subdir,
- generate_pip_command,
- find_python,
- get_hash,
- REMOTE_ONLY_PYTHON_VERSIONS,
)
from ...util_common import (
- intercept_command,
- run_command,
ResultType,
)
@@ -42,9 +44,8 @@ from ...ansible_util import (
ansible_environment,
)
-from ...executor import (
- generate_pip_install,
- install_cryptography,
+from ...python_requirements import (
+ install_requirements,
)
from ...config import (
@@ -52,17 +53,17 @@ from ...config import (
)
from ...coverage_util import (
- coverage_context,
-)
-
-from ...venv import (
- create_virtual_environment,
+ cover_python,
)
from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
def _get_module_test(module_restrictions): # type: (bool) -> t.Callable[[str], bool]
"""Create a predicate which tests whether a path can be used by modules or not."""
@@ -80,69 +81,49 @@ class ImportTest(SanityMultipleVersion):
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' and
any(is_subdir(target.path, path) for path in data_context().content.plugin_paths.values())]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- settings = self.load_processor(args, python_version)
-
- paths = [target.path for target in targets.include]
+ @property
+ def needs_pypi(self): # type: () -> bool
+ """True if the test requires PyPI, otherwise False."""
+ return True
- capture_pip = args.verbosity < 2
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
+ settings = self.load_processor(args, python.version)
- python = find_python(python_version)
+ paths = [target.path for target in targets.include]
- if python_version.startswith('2.') and args.requirements:
+ if python.version.startswith('2.'):
# hack to make sure that virtualenv is available under Python 2.x
# on Python 3.x we can use the built-in venv
- pip = generate_pip_command(python)
- run_command(args, generate_pip_install(pip, '', packages=['virtualenv']), capture=capture_pip)
-
- env = ansible_environment(args, color=False)
+ install_requirements(args, python, virtualenv=True) # sanity (import)
temp_root = os.path.join(ResultType.TMP.path, 'sanity', 'import')
messages = []
- for import_type, test, add_ansible_requirements in (
+ for import_type, test, controller in (
('module', _get_module_test(True), False),
('plugin', _get_module_test(False), True),
):
- if import_type == 'plugin' and python_version in REMOTE_ONLY_PYTHON_VERSIONS:
+ if import_type == 'plugin' and python.version in REMOTE_ONLY_PYTHON_VERSIONS:
continue
data = '\n'.join([path for path in paths if test(path)])
+
if not data:
continue
- requirements_file = None
-
- # create a clean virtual environment to minimize the available imports beyond the python standard library
- virtual_environment_dirname = 'minimal-py%s' % python_version.replace('.', '')
- if add_ansible_requirements:
- requirements_file = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.import-plugins.txt')
- virtual_environment_dirname += '-requirements-%s' % get_hash(requirements_file)
- virtual_environment_path = os.path.join(temp_root, virtual_environment_dirname)
- virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')
+ virtualenv_python = create_sanity_virtualenv(args, python, f'{self.name}.{import_type}', ansible=controller, coverage=args.coverage, minimize=True)
- remove_tree(virtual_environment_path)
+ if not virtualenv_python:
+ display.warning(f'Skipping sanity test "{self.name}" ({import_type}) on Python {python.version} due to missing virtual environment support.')
+ return SanitySkipped(self.name, python.version)
- if not create_virtual_environment(args, python_version, virtual_environment_path):
- display.warning("Skipping sanity test '%s' on Python %s due to missing virtual environment support." % (self.name, python_version))
- return SanitySkipped(self.name, python_version)
+ virtualenv_yaml = check_sanity_virtualenv_yaml(virtualenv_python)
- # add the importer to our virtual environment so it can be accessed through the coverage injector
- importer_path = os.path.join(virtual_environment_bin, 'importer.py')
- yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
- if not args.explain:
- os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'importer.py')), importer_path)
- os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
+ if virtualenv_yaml is False:
+ display.warning(f'Sanity test "{self.name}" ({import_type}) on Python {python.version} may be slow due to missing libyaml support in PyYAML.')
- # activate the virtual environment
- env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
+ env = ansible_environment(args, color=False)
env.update(
SANITY_TEMP_PATH=ResultType.TMP.path,
@@ -152,44 +133,23 @@ class ImportTest(SanityMultipleVersion):
if data_context().content.collection:
env.update(
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
- SANITY_EXTERNAL_PYTHON=python,
+ SANITY_EXTERNAL_PYTHON=python.path,
)
- virtualenv_python = os.path.join(virtual_environment_bin, 'python')
- virtualenv_pip = generate_pip_command(virtualenv_python)
-
- # make sure requirements are installed if needed
- if requirements_file:
- install_cryptography(args, virtualenv_python, python_version, virtualenv_pip)
- run_command(args, generate_pip_install(virtualenv_pip, 'sanity', context='import-plugins'), env=env, capture=capture_pip)
-
- # make sure coverage is available in the virtual environment if needed
- if args.coverage:
- run_command(args, generate_pip_install(virtualenv_pip, '', packages=['setuptools']), env=env, capture=capture_pip)
- run_command(args, generate_pip_install(virtualenv_pip, '', packages=['coverage']), env=env, capture=capture_pip)
-
- try:
- # In some environments pkg_resources is installed as a separate pip package which needs to be removed.
- # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
- # However, a venv is created with an additional pkg-resources package which is independent of setuptools.
- # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
- # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
- # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pkg-resources'], env=env, capture=capture_pip)
- except SubprocessError:
- pass
-
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env, capture=capture_pip)
- run_command(args, virtualenv_pip + ['uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env, capture=capture_pip)
-
display.info(import_type + ': ' + data, verbosity=4)
cmd = ['importer.py']
try:
- with coverage_context(args):
- stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version,
- virtualenv=virtualenv_python)
+ with tempfile.TemporaryDirectory(prefix='ansible-test', suffix='-import') as temp_dir:
+ # make the importer available in the temporary directory
+ os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'importer.py')), os.path.join(temp_dir, 'importer.py'))
+ os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'yaml_to_json.py')), os.path.join(temp_dir, 'yaml_to_json.py'))
+
+ # add the importer to the path so it can be accessed through the coverage injector
+ env['PATH'] = os.pathsep.join([temp_dir, env['PATH']])
+
+ stdout, stderr = cover_python(args, virtualenv_python, cmd, self.name, env, capture=True, data=data)
if stdout or stderr:
raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
@@ -213,6 +173,6 @@ class ImportTest(SanityMultipleVersion):
results = settings.process_errors(messages, paths)
if results:
- return SanityFailure(self.name, messages=results, python_version=python_version)
+ return SanityFailure(self.name, messages=results, python_version=python.version)
- return SanitySuccess(self.name, python_version=python_version)
+ return SanitySuccess(self.name, python_version=python.version)
diff --git a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
index 13db83df7c..bce4f0cf4f 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/integration_aliases.py
@@ -1,15 +1,13 @@
"""Sanity test to check integration test aliases."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import textwrap
import os
-
-from ... import types as t
+import typing as t
from . import (
- SanityVersionNeutral,
+ SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
@@ -17,6 +15,10 @@ from . import (
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...config import (
SanityConfig,
)
@@ -39,7 +41,6 @@ from ...io import (
from ...util import (
display,
- find_python,
raw_command,
)
@@ -48,8 +49,12 @@ from ...util_common import (
ResultType,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
-class IntegrationAliasesTest(SanityVersionNeutral):
+class IntegrationAliasesTest(SanitySingleVersion):
"""Sanity test to evaluate integration test aliases."""
CI_YML = '.azure-pipelines/azure-pipelines.yml'
TEST_ALIAS_PREFIX = 'shippable' # this will be changed at some point in the future
@@ -95,7 +100,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
ansible_only = True
def __init__(self):
- super(IntegrationAliasesTest, self).__init__()
+ super().__init__()
self._ci_config = {} # type: t.Dict[str, t.Any]
self._ci_test_groups = {} # type: t.Dict[str, t.List[int]]
@@ -110,10 +115,10 @@ class IntegrationAliasesTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- def load_ci_config(self, args): # type: (SanityConfig) -> t.Dict[str, t.Any]
+ def load_ci_config(self, python): # type: (PythonConfig) -> t.Dict[str, t.Any]
"""Load and return the CI YAML configuration."""
if not self._ci_config:
- self._ci_config = self.load_yaml(args, self.CI_YML)
+ self._ci_config = self.load_yaml(python, self.CI_YML)
return self._ci_config
@@ -192,19 +197,12 @@ class IntegrationAliasesTest(SanityVersionNeutral):
return alias
- def load_yaml(self, args, path): # type: (SanityConfig, str) -> t.Dict[str, t.Any]
+ def load_yaml(self, python, path): # type: (PythonConfig, str) -> t.Dict[str, t.Any]
"""Load the specified YAML file and return the contents."""
yaml_to_json_path = os.path.join(SANITY_ROOT, self.name, 'yaml_to_json.py')
- python = find_python(args.python_version)
+ return json.loads(raw_command([python.path, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
- return json.loads(raw_command([python, yaml_to_json_path], data=read_text_file(path), capture=True)[0])
-
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
if args.explain:
return SanitySuccess(self.name)
@@ -219,7 +217,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
labels={},
)
- self.load_ci_config(args)
+ self.load_ci_config(python)
self.check_changes(args, results)
write_json_test_results(ResultType.BOT, 'data-sanity-ci.json', results)
@@ -244,7 +242,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
clouds = get_cloud_platforms(args, posix_targets)
cloud_targets = ['cloud/%s/' % cloud for cloud in clouds]
- all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], include=True, directories=False, errors=False))
+ all_cloud_targets = tuple(filter_targets(posix_targets, ['cloud/'], directories=False, errors=False))
invalid_cloud_targets = tuple(filter_targets(all_cloud_targets, cloud_targets, include=False, directories=False, errors=False))
messages = []
@@ -258,15 +256,13 @@ class IntegrationAliasesTest(SanityVersionNeutral):
messages.append(SanityMessage('invalid alias `%s`' % alias, '%s/aliases' % target.path))
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False,
- directories=False, errors=False)),
+ targets=tuple(filter_targets(posix_targets, ['cloud/', '%s/generic/' % self.TEST_ALIAS_PREFIX], include=False, directories=False, errors=False)),
find=self.format_test_group_alias('linux').replace('linux', 'posix'),
find_incidental=['%s/posix/incidental/' % self.TEST_ALIAS_PREFIX],
)
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], include=True, directories=False,
- errors=False)),
+ targets=tuple(filter_targets(posix_targets, ['%s/generic/' % self.TEST_ALIAS_PREFIX], directories=False, errors=False)),
find=self.format_test_group_alias('generic'),
)
@@ -279,7 +275,7 @@ class IntegrationAliasesTest(SanityVersionNeutral):
find_incidental = ['%s/%s/incidental/' % (self.TEST_ALIAS_PREFIX, cloud), '%s/cloud/incidental/' % self.TEST_ALIAS_PREFIX]
messages += self.check_ci_group(
- targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], include=True, directories=False, errors=False)),
+ targets=tuple(filter_targets(posix_targets, ['cloud/%s/' % cloud], directories=False, errors=False)),
find=find,
find_incidental=find_incidental,
)
@@ -310,11 +306,11 @@ class IntegrationAliasesTest(SanityVersionNeutral):
:rtype: list[SanityMessage]
"""
all_paths = set(target.path for target in targets)
- supported_paths = set(target.path for target in filter_targets(targets, [find], include=True, directories=False, errors=False))
- unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], include=True, directories=False, errors=False))
+ supported_paths = set(target.path for target in filter_targets(targets, [find], directories=False, errors=False))
+ unsupported_paths = set(target.path for target in filter_targets(targets, [self.UNSUPPORTED], directories=False, errors=False))
if find_incidental:
- incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, include=True, directories=False, errors=False))
+ incidental_paths = set(target.path for target in filter_targets(targets, find_incidental, directories=False, errors=False))
else:
incidental_paths = set()
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pep8.py b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
index b1244c6872..71241c913f 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pep8.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pep8.py
@@ -1,19 +1,22 @@
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -22,7 +25,6 @@ from ...util import (
SubprocessError,
read_lines_without_comments,
parse_to_list_of_dict,
- find_python,
is_subdir,
)
@@ -34,6 +36,10 @@ from ...config import (
SanityConfig,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class Pep8Test(SanitySingleVersion):
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
@@ -46,13 +52,7 @@ class Pep8Test(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
current_ignore_file = os.path.join(SANITY_ROOT, 'pep8', 'current-ignore.txt')
current_ignore = sorted(read_lines_without_comments(current_ignore_file, remove_blank_lines=True))
@@ -61,7 +61,7 @@ class Pep8Test(SanitySingleVersion):
paths = [target.path for target in targets.include]
cmd = [
- find_python(python_version),
+ python.path,
'-m', 'pycodestyle',
'--max-line-length', '160',
'--config', '/dev/null',
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pslint.py b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
index 1a6b5eb377..254b7778c3 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pslint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pslint.py
@@ -1,12 +1,10 @@
"""Sanity test using PSScriptAnalyzer."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
import re
-
-from ... import types as t
+import typing as t
from . import (
SanityVersionNeutral,
@@ -14,9 +12,14 @@ from . import (
SanityFailure,
SanitySuccess,
SanitySkipped,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -51,12 +54,7 @@ class PslintTest(SanityVersionNeutral):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')]
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
@@ -66,10 +64,10 @@ class PslintTest(SanityVersionNeutral):
cmds = []
- if args.requirements:
- cmds.append([os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.ps1')])
+ if args.controller.is_managed or args.requirements:
+ cmds.append(['pwsh', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.pslint.ps1')])
- cmds.append([os.path.join(SANITY_ROOT, 'pslint', 'pslint.ps1')] + paths)
+ cmds.append(['pwsh', os.path.join(SANITY_ROOT, 'pslint', 'pslint.ps1')] + paths)
stdout = ''
diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
index e69d58b762..c1d06f3a8d 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py
@@ -1,22 +1,26 @@
"""Sanity test using pylint."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import itertools
import json
import os
import datetime
-
-from ... import types as t
+import configparser
+import typing as t
from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -24,9 +28,7 @@ from ...target import (
from ...util import (
SubprocessError,
display,
- ConfigParser,
is_subdir,
- find_python,
)
from ...util_common import (
@@ -48,12 +50,16 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class PylintTest(SanitySingleVersion):
"""Sanity test using pylint."""
def __init__(self):
- super(PylintTest, self).__init__()
+ super().__init__()
self.optional_error_codes.update([
'ansible-deprecated-date',
'too-complex',
@@ -68,13 +74,7 @@ class PylintTest(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.py' or is_subdir(target.path, 'bin')]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
plugin_dir = os.path.join(SANITY_ROOT, 'pylint', 'plugins')
plugin_names = sorted(p[0] for p in [
os.path.splitext(p) for p in os.listdir(plugin_dir)] if p[1] == '.py' and p[0] != '__init__')
@@ -85,7 +85,7 @@ class PylintTest(SanitySingleVersion):
module_paths = [os.path.relpath(p, data_context().content.module_path).split(os.path.sep) for p in
paths if is_subdir(p, data_context().content.module_path)]
- module_dirs = sorted(set([p[0] for p in module_paths if len(p) > 1]))
+ module_dirs = sorted({p[0] for p in module_paths if len(p) > 1})
large_module_group_threshold = 500
large_module_groups = [key for key, value in
@@ -93,7 +93,7 @@ class PylintTest(SanitySingleVersion):
large_module_group_paths = [os.path.relpath(p, data_context().content.module_path).split(os.path.sep) for p in paths
if any(is_subdir(p, os.path.join(data_context().content.module_path, g)) for g in large_module_groups)]
- large_module_group_dirs = sorted(set([os.path.sep.join(p[:2]) for p in large_module_group_paths if len(p) > 2]))
+ large_module_group_dirs = sorted({os.path.sep.join(p[:2]) for p in large_module_group_paths if len(p) > 2})
contexts = []
remaining_paths = set(paths)
@@ -138,9 +138,9 @@ class PylintTest(SanitySingleVersion):
else:
add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_util/controller/sanity/validate-modules/'))
add_context(remaining_paths, 'validate-modules-unit', filter_path('test/lib/ansible_test/tests/validate-modules-unit/'))
- add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_util/controller/sanity/'))
- add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_util/target/sanity/'))
+ add_context(remaining_paths, 'code-smell', filter_path('test/lib/ansible_test/_util/controller/sanity/code-smell/'))
add_context(remaining_paths, 'legacy-collection-loader', filter_path('test/lib/ansible_test/_util/target/legacy_collection_loader/'))
+ add_context(remaining_paths, 'ansible-test-target', filter_path('test/lib/ansible_test/_util/target/'))
add_context(remaining_paths, 'ansible-test', filter_path('test/lib/'))
add_context(remaining_paths, 'test', filter_path('test/'))
add_context(remaining_paths, 'hacking', filter_path('hacking/'))
@@ -149,8 +149,6 @@ class PylintTest(SanitySingleVersion):
messages = []
context_times = []
- python = find_python(python_version)
-
collection_detail = None
if data_context().content.collection:
@@ -207,7 +205,7 @@ class PylintTest(SanitySingleVersion):
paths, # type: t.List[str]
plugin_dir, # type: str
plugin_names, # type: t.List[str]
- python, # type: str
+ python, # type: PythonConfig
collection_detail, # type: CollectionDetail
): # type: (...) -> t.List[t.Dict[str, str]]
"""Run pylint using the config specified by the context on the specified paths."""
@@ -219,7 +217,7 @@ class PylintTest(SanitySingleVersion):
else:
rcfile = os.path.join(SANITY_ROOT, 'pylint', 'config', 'default.cfg')
- parser = ConfigParser()
+ parser = configparser.ConfigParser()
parser.read(rcfile)
if parser.has_section('ansible-test'):
@@ -231,7 +229,7 @@ class PylintTest(SanitySingleVersion):
load_plugins = set(plugin_names + ['pylint.extensions.mccabe']) - disable_plugins
cmd = [
- python,
+ python.path,
'-m', 'pylint',
'--jobs', '0',
'--reports', 'n',
diff --git a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
index 5ebdae84d6..a663bd96f3 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/sanity_docs.py
@@ -1,6 +1,5 @@
"""Sanity test for documentation of sanity tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
@@ -9,9 +8,14 @@ from . import (
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
sanity_get_tests,
)
+from ...test import (
+ TestResult,
+)
+
from ...config import (
SanityConfig,
)
@@ -35,13 +39,7 @@ class SanityDocsTest(SanityVersionNeutral):
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
- # noinspection PyUnusedLocal
- def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity'
sanity_docs = set(part[0] for part in (os.path.splitext(os.path.basename(path)) for path in data_context().content.get_files(sanity_dir))
if part[1] == '.rst')
diff --git a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
index 66f7b643bf..19805ea9aa 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/shellcheck.py
@@ -1,25 +1,28 @@
"""Sanity test using shellcheck."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
from xml.etree.ElementTree import (
fromstring,
Element,
)
-from ... import types as t
-
from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
SanitySkipped,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -50,12 +53,7 @@ class ShellcheckTest(SanityVersionNeutral):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] == '.sh']
- def test(self, args, targets):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :rtype: TestResult
- """
+ def test(self, args, targets): # type: (SanityConfig, SanityTargets) -> TestResult
exclude_file = os.path.join(SANITY_ROOT, 'shellcheck', 'exclude.txt')
exclude = set(read_lines_without_comments(exclude_file, remove_blank_lines=True, optional=True))
diff --git a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
index e191f1658a..0eccc01f9c 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/validate_modules.py
@@ -1,20 +1,23 @@
"""Sanity test using validate-modules."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
-
-from ... import types as t
+import typing as t
from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -22,7 +25,6 @@ from ...target import (
from ...util import (
SubprocessError,
display,
- find_python,
)
from ...util_common import (
@@ -47,12 +49,17 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class ValidateModulesTest(SanitySingleVersion):
"""Sanity test using validate-modules."""
def __init__(self):
- super(ValidateModulesTest, self).__init__()
+ super().__init__()
+
self.optional_error_codes.update([
'deprecated-date',
])
@@ -66,23 +73,15 @@ class ValidateModulesTest(SanitySingleVersion):
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if target.module]
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
env = ansible_environment(args, color=False)
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
- python = find_python(python_version)
-
cmd = [
- python,
+ python.path,
os.path.join(SANITY_ROOT, 'validate-modules', 'validate-modules'),
'--format', 'json',
'--arg-spec',
@@ -136,7 +135,6 @@ class ValidateModulesTest(SanitySingleVersion):
path=filename,
line=int(item['line']) if 'line' in item else 0,
column=int(item['column']) if 'column' in item else 0,
- level='error',
code='%s' % item['code'],
message=item['msg'],
))
diff --git a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
index 42a69acedf..4ca6dfe8c1 100644
--- a/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
+++ b/test/lib/ansible_test/_internal/commands/sanity/yamllint.py
@@ -1,25 +1,23 @@
"""Sanity test using yamllint."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
-
-from ... import types as t
-
-from ...ansible_util import (
- check_pyyaml,
-)
+import typing as t
from . import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
- SanitySkipped,
SanitySuccess,
+ SanityTargets,
SANITY_ROOT,
)
+from ...test import (
+ TestResult,
+)
+
from ...target import (
TestTarget,
)
@@ -28,7 +26,6 @@ from ...util import (
SubprocessError,
display,
is_subdir,
- find_python,
)
from ...util_common import (
@@ -43,6 +40,10 @@ from ...data import (
data_context,
)
+from ...host_configs import (
+ PythonConfig,
+)
+
class YamllintTest(SanitySingleVersion):
"""Sanity test using yamllint."""
@@ -51,6 +52,11 @@ class YamllintTest(SanitySingleVersion):
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'ansible-test'
+ @property
+ def require_libyaml(self): # type: () -> bool
+ """True if the test requires PyYAML to have libyaml support."""
+ return True
+
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
yaml_targets = [target for target in targets if os.path.splitext(target.path)[1] in ('.yml', '.yaml')]
@@ -66,25 +72,11 @@ class YamllintTest(SanitySingleVersion):
return yaml_targets
- def test(self, args, targets, python_version):
- """
- :type args: SanityConfig
- :type targets: SanityTargets
- :type python_version: str
- :rtype: TestResult
- """
- pyyaml_presence = check_pyyaml(args, python_version, quiet=True)
- if not pyyaml_presence['cloader']:
- display.warning("Skipping sanity test '%s' due to missing libyaml support in PyYAML."
- % self.name)
- return SanitySkipped(self.name)
-
+ def test(self, args, targets, python): # type: (SanityConfig, SanityTargets, PythonConfig) -> TestResult
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
- python = find_python(python_version)
-
results = self.test_paths(args, paths, python)
results = settings.process_errors(results, paths)
@@ -94,15 +86,10 @@ class YamllintTest(SanitySingleVersion):
return SanitySuccess(self.name)
@staticmethod
- def test_paths(args, paths, python):
- """
- :type args: SanityConfig
- :type paths: list[str]
- :type python: str
- :rtype: list[SanityMessage]
- """
+ def test_paths(args, paths, python): # type: (SanityConfig, t.List[str], PythonConfig) -> t.List[SanityMessage]
+ """Test the specified paths using the given Python and return the results."""
cmd = [
- python,
+ python.path,
os.path.join(SANITY_ROOT, 'yamllint', 'yamllinter.py'),
]
diff --git a/test/lib/ansible_test/_internal/commands/shell/__init__.py b/test/lib/ansible_test/_internal/commands/shell/__init__.py
index 52d3008e05..7364819e0c 100644
--- a/test/lib/ansible_test/_internal/commands/shell/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/shell/__init__.py
@@ -1,9 +1,12 @@
"""Open a shell prompt inside an ansible-test environment."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-from ...util_common import (
- run_command,
+import os
+import typing as t
+
+from ...util import (
+ ApplicationError,
+ display,
)
from ...config import (
@@ -12,19 +15,75 @@ from ...config import (
from ...executor import (
Delegate,
- create_shell_command,
- install_command_requirements,
)
+from ...connections import (
+ LocalConnection,
+ SshConnection,
+)
+
+from ...host_profiles import (
+ ControllerProfile,
+ PosixProfile,
+ SshTargetHostProfile,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...host_configs import (
+ ControllerConfig,
+ OriginConfig,
+)
+
+
+def command_shell(args): # type: (ShellConfig) -> None
+ """Entry point for the `shell` command."""
+ if args.raw and isinstance(args.targets[0], ControllerConfig):
+ raise ApplicationError('The --raw option has no effect on the controller.')
+
+ host_state = prepare_profiles(args, skip_setup=args.raw) # shell
-def command_shell(args):
- """
- :type args: ShellConfig
- """
if args.delegate:
- raise Delegate()
+ raise Delegate(host_state=host_state)
+
+ if args.raw and not isinstance(args.controller, OriginConfig):
+ display.warning('The --raw option will only be applied to the target.')
+
+ target_profile = t.cast(SshTargetHostProfile, host_state.target_profiles[0])
+
+ if isinstance(target_profile, ControllerProfile):
+ # run the shell locally unless a target was requested
+ con = LocalConnection(args)
+ else:
+ # a target was requested, connect to it over SSH
+ con = target_profile.get_controller_target_connections()[0]
+
+ if isinstance(con, SshConnection) and args.raw:
+ cmd = []
+ elif isinstance(target_profile, PosixProfile):
+ cmd = []
+
+ if args.raw:
+ shell = 'sh' # shell required for non-ssh connection
+ else:
+ shell = 'bash'
+
+ python = target_profile.python # make sure the python interpreter has been initialized before opening a shell
+ display.info(f'Target Python {python.version} is at: {python.path}')
+
+ optional_vars = (
+ 'TERM', # keep backspace working
+ )
+
+ env = {name: os.environ[name] for name in optional_vars if name in os.environ}
+
+ if env:
+ cmd = ['/usr/bin/env'] + [f'{name}={value}' for name, value in env.items()]
- install_command_requirements(args)
+ cmd += [shell, '-i']
+ else:
+ cmd = []
- cmd = create_shell_command(['bash', '-i'])
- run_command(args, cmd)
+ con.run(cmd)
diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py
index c6d520acf0..4c27d3e57c 100644
--- a/test/lib/ansible_test/_internal/commands/units/__init__.py
+++ b/test/lib/ansible_test/_internal/commands/units/__init__.py
@@ -1,9 +1,16 @@
"""Execute unit tests using pytest."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
import sys
+import typing as t
+
+from ...constants import (
+ CONTROLLER_MIN_PYTHON_VERSION,
+ CONTROLLER_PYTHON_VERSIONS,
+ REMOTE_ONLY_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
from ...io import (
write_text_file,
@@ -13,19 +20,13 @@ from ...io import (
from ...util import (
ANSIBLE_TEST_DATA_ROOT,
display,
- get_available_python_versions,
is_subdir,
SubprocessError,
- SUPPORTED_PYTHON_VERSIONS,
- CONTROLLER_MIN_PYTHON_VERSION,
- CONTROLLER_PYTHON_VERSIONS,
- REMOTE_ONLY_PYTHON_VERSIONS,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_TARGET_ROOT,
)
from ...util_common import (
- intercept_command,
ResultType,
handle_layout_messages,
create_temp_dir,
@@ -33,7 +34,6 @@ from ...util_common import (
from ...ansible_util import (
ansible_environment,
- check_pyyaml,
get_ansible_python_path,
)
@@ -47,7 +47,7 @@ from ...config import (
)
from ...coverage_util import (
- coverage_context,
+ cover_python,
)
from ...data import (
@@ -58,13 +58,32 @@ from ...executor import (
AllTargetsSkipped,
Delegate,
get_changes_filter,
- install_command_requirements,
+)
+
+from ...python_requirements import (
+ install_requirements,
)
from ...content_config import (
get_content_config,
)
+from ...host_configs import (
+ PosixConfig,
+)
+
+from ...provisioning import (
+ prepare_profiles,
+)
+
+from ...pypi_proxy import (
+ configure_pypi_proxy,
+)
+
+from ...host_profiles import (
+ PosixProfile,
+)
+
class TestContext:
"""Contexts that unit tests run in based on the type of content."""
@@ -110,31 +129,68 @@ def command_units(args):
if not paths:
raise AllTargetsSkipped()
- if args.python and args.python in REMOTE_ONLY_PYTHON_VERSIONS:
- if args.python not in supported_remote_python_versions:
- display.warning('Python %s is not supported by this collection. Supported Python versions are: %s' % (
- args.python, ', '.join(content_config.python_versions)))
- raise AllTargetsSkipped()
+ targets = t.cast(t.List[PosixConfig], args.targets)
+ target_versions = {target.python.version: target for target in targets} # type: t.Dict[str, PosixConfig]
+ skipped_versions = args.host_settings.skipped_python_versions
+ warn_versions = []
+
+ # requested python versions that are remote-only and not supported by this collection
+ test_versions = [version for version in target_versions if version in REMOTE_ONLY_PYTHON_VERSIONS and version not in supported_remote_python_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is not supported by this collection.'
+ f' Supported Python versions are: {", ".join(content_config.python_versions)}')
+
+ warn_versions.extend(test_versions)
- if not remote_paths:
- display.warning('Python %s is only supported by module and module_utils unit tests, but none were selected.' % args.python)
+ if warn_versions == list(target_versions):
raise AllTargetsSkipped()
- if args.python and args.python not in supported_remote_python_versions and not controller_paths:
- display.warning('Python %s is not supported by this collection for modules/module_utils. Supported Python versions are: %s' % (
- args.python, ', '.join(supported_remote_python_versions)))
- raise AllTargetsSkipped()
+ if not remote_paths:
+ # all selected unit tests are controller tests
+
+ # requested python versions that are remote-only
+ test_versions = [version for version in target_versions if version in REMOTE_ONLY_PYTHON_VERSIONS and version not in warn_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is only supported by module/module_utils unit tests.'
+ ' No module/module_utils unit tests were selected.')
+
+ warn_versions.extend(test_versions)
+
+ if warn_versions == list(target_versions):
+ raise AllTargetsSkipped()
+
+ if not controller_paths:
+ # all selected unit tests are remote tests
+
+ # requested python versions that are not supported by remote tests for this collection
+ test_versions = [version for version in target_versions if version not in supported_remote_python_versions and version not in warn_versions]
+
+ if test_versions:
+ for version in test_versions:
+ display.warning(f'Skipping unit tests on Python {version} because it is not supported by module/module_utils unit tests of this collection.'
+ f' Supported Python versions are: {", ".join(supported_remote_python_versions)}')
+
+ warn_versions.extend(test_versions)
+
+ if warn_versions == list(target_versions):
+ raise AllTargetsSkipped()
+
+ host_state = prepare_profiles(args, targets_use_pypi=True) # units
if args.delegate:
- raise Delegate(require=changes, exclude=args.exclude)
+ raise Delegate(host_state=host_state, require=changes, exclude=args.exclude)
test_sets = []
- available_versions = sorted(get_available_python_versions().keys())
+ if args.requirements_mode != 'skip':
+ configure_pypi_proxy(args, host_state.controller_profile) # units
for version in SUPPORTED_PYTHON_VERSIONS:
- # run all versions unless version given, in which case run only that version
- if args.python and version != args.python_version:
+ if version not in target_versions and version not in skipped_versions:
continue
test_candidates = []
@@ -157,25 +213,30 @@ def command_units(args):
ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION=CONTROLLER_MIN_PYTHON_VERSION,
)
- test_candidates.append((test_context, version, paths, env))
+ test_candidates.append((test_context, paths, env))
if not test_candidates:
continue
- if not args.python and version not in available_versions:
- display.warning("Skipping unit tests on Python %s due to missing interpreter." % version)
+ if version in skipped_versions:
+ display.warning("Skipping unit tests on Python %s because it could not be found." % version)
continue
+ target_profiles = {profile.config.python.version: profile for profile in host_state.targets(PosixProfile)} # type: t.Dict[str, PosixProfile]
+ target_profile = target_profiles[version]
+
+ final_candidates = [(test_context, target_profile.python, paths, env) for test_context, paths, env in test_candidates]
+ controller = any(test_context == TestContext.controller for test_context, python, paths, env in final_candidates)
+
if args.requirements_mode != 'skip':
- install_command_requirements(args, version)
- check_pyyaml(args, version)
+ install_requirements(args, target_profile.python, ansible=controller, command=True) # units
- test_sets.extend(test_candidates)
+ test_sets.extend(final_candidates)
if args.requirements_mode == 'only':
sys.exit()
- for test_context, version, paths, env in test_sets:
+ for test_context, python, paths, env in test_sets:
cmd = [
'pytest',
'--boxed',
@@ -185,13 +246,13 @@ def command_units(args):
'yes' if args.color else 'no',
'-p', 'no:cacheprovider',
'-c', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest.ini'),
- '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (version, test_context)),
+ '--junit-xml', os.path.join(ResultType.JUNIT.path, 'python%s-%s-units.xml' % (python.version, test_context)),
]
if not data_context().content.collection:
cmd.append('--durations=25')
- if version != '2.6':
+ if python.version != '2.6':
# added in pytest 4.5.0, which requires python 2.7+
cmd.append('--strict-markers')
@@ -215,11 +276,10 @@ def command_units(args):
cmd.extend(paths)
- display.info('Unit test %s with Python %s' % (test_context, version))
+ display.info('Unit test %s with Python %s' % (test_context, python.version))
try:
- with coverage_context(args):
- intercept_command(args, cmd, target_name=test_context, env=env, python_version=version)
+ cover_python(args, python, cmd, test_context, env)
except SubprocessError as ex:
# pytest exits with status code 5 when all tests are skipped, which isn't an error for our use case
if ex.status != 5:
diff --git a/test/lib/ansible_test/_internal/compat/__init__.py b/test/lib/ansible_test/_internal/compat/__init__.py
index e69de29bb2..e9cb68168d 100644
--- a/test/lib/ansible_test/_internal/compat/__init__.py
+++ b/test/lib/ansible_test/_internal/compat/__init__.py
@@ -0,0 +1,2 @@
+"""Nearly empty __init__.py to keep pylint happy."""
+from __future__ import annotations
diff --git a/test/lib/ansible_test/_internal/compat/packaging.py b/test/lib/ansible_test/_internal/compat/packaging.py
index e91d14a583..a38e1abc2b 100644
--- a/test/lib/ansible_test/_internal/compat/packaging.py
+++ b/test/lib/ansible_test/_internal/compat/packaging.py
@@ -1,12 +1,16 @@
"""Packaging compatibility."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
try:
- from packaging.specifiers import SpecifierSet
- from packaging.version import Version
+ from packaging import (
+ specifiers,
+ version,
+ )
+
+ SpecifierSet = specifiers.SpecifierSet
+ Version = version.Version
PACKAGING_IMPORT_ERROR = None
except ImportError as ex:
- SpecifierSet = None
- Version = None
+ SpecifierSet = None # pylint: disable=invalid-name
+ Version = None # pylint: disable=invalid-name
PACKAGING_IMPORT_ERROR = ex
diff --git a/test/lib/ansible_test/_internal/compat/yaml.py b/test/lib/ansible_test/_internal/compat/yaml.py
index 11740ce06b..daa5ef0ed4 100644
--- a/test/lib/ansible_test/_internal/compat/yaml.py
+++ b/test/lib/ansible_test/_internal/compat/yaml.py
@@ -1,6 +1,5 @@
"""PyYAML compatibility."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
from functools import (
partial,
diff --git a/test/lib/ansible_test/_internal/completion.py b/test/lib/ansible_test/_internal/completion.py
new file mode 100644
index 0000000000..25cc6367cc
--- /dev/null
+++ b/test/lib/ansible_test/_internal/completion.py
@@ -0,0 +1,226 @@
+"""Loading, parsing and storing of completion configurations."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import os
+import typing as t
+
+from .constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from .util import (
+ ANSIBLE_TEST_DATA_ROOT,
+ read_lines_without_comments,
+)
+
+from .data import (
+ data_context,
+)
+
+
+@dataclasses.dataclass(frozen=True)
+class CompletionConfig(metaclass=abc.ABCMeta):
+ """Base class for completion configuration."""
+ name: str
+
+ @property
+ @abc.abstractmethod
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixCompletionConfig(CompletionConfig, metaclass=abc.ABCMeta):
+ """Base class for completion configuration of POSIX environments."""
+ @property
+ @abc.abstractmethod
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+
+ @abc.abstractmethod
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+
+ def get_default_python(self, controller): # type: (bool) -> str
+ """Return the default Python version for a controller or target as specified."""
+ context_pythons = CONTROLLER_PYTHON_VERSIONS if controller else SUPPORTED_PYTHON_VERSIONS
+ version = [python for python in self.supported_pythons if python in context_pythons][0]
+ return version
+
+ @property
+ def controller_supported(self): # type: () -> bool
+ """True if at least one Python version is provided which supports the controller, otherwise False."""
+ return any(version in CONTROLLER_PYTHON_VERSIONS for version in self.supported_pythons)
+
+
+@dataclasses.dataclass(frozen=True)
+class PythonCompletionConfig(PosixCompletionConfig, metaclass=abc.ABCMeta):
+ """Base class for completion configuration of Python environments."""
+ python: str = ''
+ python_dir: str = '/usr/bin'
+
+ @property
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+ versions = self.python.split(',') if self.python else []
+ versions = [version for version in versions if version in SUPPORTED_PYTHON_VERSIONS]
+ return versions
+
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+ return os.path.join(self.python_dir, f'python{version}')
+
+
+@dataclasses.dataclass(frozen=True)
+class RemoteCompletionConfig(CompletionConfig):
+ """Base class for completion configuration of remote environments provisioned through Ansible Core CI."""
+ provider: t.Optional[str] = None
+
+ @property
+ def platform(self):
+ """The name of the platform."""
+ return self.name.partition('/')[0]
+
+ @property
+ def version(self):
+ """The version of the platform."""
+ return self.name.partition('/')[2]
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return not self.version
+
+ def __post_init__(self):
+ if not self.provider:
+ raise Exception(f'Remote completion entry "{self.name}" must provide a "provider" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class InventoryCompletionConfig(CompletionConfig):
+ """Configuration for inventory files."""
+ def __init__(self): # type: () -> None
+ super().__init__(name='inventory')
+
+ @property
+ def is_default(self): # type: () -> bool
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixSshCompletionConfig(PythonCompletionConfig):
+ """Configuration for a POSIX host reachable over SSH."""
+ def __init__(self, user, host): # type: (str, str) -> None
+ super().__init__(
+ name=f'{user}@{host}',
+ python=','.join(SUPPORTED_PYTHON_VERSIONS),
+ )
+
+ @property
+ def is_default(self): # type: () -> bool
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class DockerCompletionConfig(PythonCompletionConfig):
+ """Configuration for Docker containers."""
+ image: str = ''
+ seccomp: str = 'default'
+ placeholder: bool = False
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+ def __post_init__(self):
+ if not self.image:
+ raise Exception(f'Docker completion entry "{self.name}" must provide an "image" setting.')
+
+ if not self.supported_pythons and not self.placeholder:
+ raise Exception(f'Docker completion entry "{self.name}" must provide a "python" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class NetworkRemoteCompletionConfig(RemoteCompletionConfig):
+ """Configuration for remote network platforms."""
+ collection: str = ''
+ connection: str = ''
+
+
+@dataclasses.dataclass(frozen=True)
+class PosixRemoteCompletionConfig(RemoteCompletionConfig, PythonCompletionConfig):
+ """Configuration for remote POSIX platforms."""
+ placeholder: bool = False
+
+ def __post_init__(self):
+ if not self.supported_pythons:
+ if self.version and not self.placeholder:
+ raise Exception(f'POSIX remote completion entry "{self.name}" must provide a "python" setting.')
+ else:
+ if not self.version:
+ raise Exception(f'POSIX remote completion entry "{self.name}" is a platform default and cannot provide a "python" setting.')
+
+
+@dataclasses.dataclass(frozen=True)
+class WindowsRemoteCompletionConfig(RemoteCompletionConfig):
+ """Configuration for remote Windows platforms."""
+
+
+TCompletionConfig = t.TypeVar('TCompletionConfig', bound=CompletionConfig)
+
+
+def load_completion(name, completion_type): # type: (str, t.Type[TCompletionConfig]) -> t.Dict[str, TCompletionConfig]
+ """Load the named completion entries, returning them in dictionary form using the specified completion type."""
+ lines = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
+
+ if data_context().content.collection:
+ context = 'collection'
+ else:
+ context = 'ansible-core'
+
+ items = {name: data for name, data in [parse_completion_entry(line) for line in lines] if data.get('context', context) == context}
+
+ for item in items.values():
+ item.pop('context', None)
+ item.pop('placeholder', None)
+
+ completion = {name: completion_type(name=name, **data) for name, data in items.items()}
+
+ return completion
+
+
+def parse_completion_entry(value): # type: (str) -> t.Tuple[str, t.Dict[str, str]]
+ """Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
+ values = value.split()
+
+ name = values[0]
+ data = {kvp[0]: kvp[1] if len(kvp) > 1 else '' for kvp in [item.split('=', 1) for item in values[1:]]}
+
+ return name, data
+
+
+def filter_completion(
+ completion, # type: t.Dict[str, TCompletionConfig]
+ controller_only=False, # type: bool
+ include_defaults=False, # type: bool
+): # type: (...) -> t.Dict[str, TCompletionConfig]
+ """Return a the given completion dictionary, filtering out configs which do not support the controller if controller_only is specified."""
+ if controller_only:
+ completion = {name: config for name, config in completion.items() if config.controller_supported}
+
+ if not include_defaults:
+ completion = {name: config for name, config in completion.items() if not config.is_default}
+
+ return completion
+
+
+DOCKER_COMPLETION = load_completion('docker', DockerCompletionConfig)
+REMOTE_COMPLETION = load_completion('remote', PosixRemoteCompletionConfig)
+WINDOWS_COMPLETION = load_completion('windows', WindowsRemoteCompletionConfig)
+NETWORK_COMPLETION = load_completion('network', NetworkRemoteCompletionConfig)
diff --git a/test/lib/ansible_test/_internal/config.py b/test/lib/ansible_test/_internal/config.py
index 8ebfca0615..6ee3d3eb4c 100644
--- a/test/lib/ansible_test/_internal/config.py
+++ b/test/lib/ansible_test/_internal/config.py
@@ -1,22 +1,18 @@
"""Configuration classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import enum
import os
import sys
-
-from . import types as t
+import typing as t
from .util import (
- find_python,
- generate_pip_command,
- ApplicationError,
+ display,
+ verify_sys_executable,
+ version_to_str,
)
from .util_common import (
- docker_qualify_image,
- get_docker_completion,
- get_remote_completion,
CommonConfig,
)
@@ -28,11 +24,27 @@ from .data import (
data_context,
)
-try:
- # noinspection PyTypeChecker
- TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound='IntegrationConfig')
-except AttributeError:
- TIntegrationConfig = None # pylint: disable=invalid-name
+from .host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ HostConfig,
+ HostSettings,
+ OriginConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+
+
+class TerminateMode(enum.Enum):
+ """When to terminate instances."""
+ ALWAYS = enum.auto()
+ NEVER = enum.auto()
+ SUCCESS = enum.auto()
+
+ def __str__(self):
+ return self.name.lower()
class ParsedRemote:
@@ -60,113 +72,122 @@ class ParsedRemote:
class EnvironmentConfig(CommonConfig):
"""Configuration common to all commands which execute in an environment."""
- def __init__(self, args, command):
- """
- :type args: any
- :type command: str
- """
- super(EnvironmentConfig, self).__init__(args, command)
+ def __init__(self, args, command): # type: (t.Any, str) -> None
+ super().__init__(args, command)
- self.pypi_endpoint = args.pypi_endpoint # type: str
+ self.host_settings = args.host_settings # type: HostSettings
+ self.host_path = args.host_path # type: t.Optional[str]
+ self.containers = args.containers # type: t.Optional[str]
self.pypi_proxy = args.pypi_proxy # type: bool
+ self.pypi_endpoint = args.pypi_endpoint # type: t.Optional[str]
- self.local = args.local is True
- self.venv = args.venv
- self.venv_system_site_packages = args.venv_system_site_packages
-
- self.python = args.python if 'python' in args else None # type: str
-
- self.docker = docker_qualify_image(args.docker) # type: str
- self.docker_raw = args.docker # type: str
- self.remote = args.remote # type: str
-
- if self.remote:
- self.parsed_remote = ParsedRemote.parse(self.remote)
+ # Set by check_controller_python once HostState has been created by prepare_profiles.
+ # This is here for convenience, to avoid needing to pass HostState to some functions which already have access to EnvironmentConfig.
+ self.controller_python = None # type: t.Optional[PythonConfig]
+ """
+ The Python interpreter used by the controller.
+ Only available after delegation has been performed or skipped (if delegation is not required).
+ """
- if not self.parsed_remote or not self.parsed_remote.platform or not self.parsed_remote.version:
- raise ApplicationError('Unrecognized remote "%s" syntax. Use "platform/version" or "arch/platform/version".' % self.remote)
+ if self.host_path:
+ self.delegate = False
else:
- self.parsed_remote = None
-
- self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
- self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
- self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
- self.docker_seccomp = args.docker_seccomp if 'docker_seccomp' in args else None # type: str
- self.docker_memory = args.docker_memory if 'docker_memory' in args else None
- self.docker_terminate = args.docker_terminate if 'docker_terminate' in args else None # type: str
- self.docker_network = args.docker_network if 'docker_network' in args else None # type: str
+ self.delegate = (
+ not isinstance(self.controller, OriginConfig)
+ or isinstance(self.controller.python, VirtualPythonConfig)
+ or self.controller.python.version != version_to_str(sys.version_info[:2])
+ or verify_sys_executable(self.controller.python.path)
+ )
- if self.docker_seccomp is None:
- self.docker_seccomp = get_docker_completion().get(self.docker_raw, {}).get('seccomp', 'default')
+ self.docker_network = args.docker_network # type: t.Optional[str]
+ self.docker_terminate = args.docker_terminate # type: t.Optional[TerminateMode]
- self.remote_stage = args.remote_stage # type: str
- self.remote_provider = args.remote_provider # type: str
self.remote_endpoint = args.remote_endpoint # type: t.Optional[str]
- self.remote_terminate = args.remote_terminate # type: str
-
- if self.remote_provider == 'default':
- self.remote_provider = None
+ self.remote_stage = args.remote_stage # type: t.Optional[str]
+ self.remote_terminate = args.remote_terminate # type: t.Optional[TerminateMode]
self.requirements = args.requirements # type: bool
- if self.python == 'default':
- self.python = None
+ self.delegate_args = [] # type: t.List[str]
- actual_major_minor = '.'.join(str(i) for i in sys.version_info[:2])
+ def host_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """Add the host files to the payload file list."""
+ config = self
- self.python_version = self.python or actual_major_minor
- self.python_interpreter = args.python_interpreter
+ if config.host_path:
+ settings_path = os.path.join(config.host_path, 'settings.dat')
+ state_path = os.path.join(config.host_path, 'state.dat')
- self.pip_check = args.pip_check
+ files.append((os.path.abspath(settings_path), settings_path))
+ files.append((os.path.abspath(state_path), state_path))
- self.delegate = self.docker or self.remote or self.venv
- self.delegate_args = [] # type: t.List[str]
+ data_context().register_payload_callback(host_callback)
- if self.delegate:
- self.requirements = True
+ if args.docker_no_pull:
+ display.warning('The --docker-no-pull option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
- self.containers = args.containers # type: t.Optional[t.Dict[str, t.Dict[str, t.Dict[str, t.Any]]]]
+ if args.no_pip_check:
+ display.warning('The --no-pip-check option is deprecated and has no effect. It will be removed in a future version of ansible-test.')
- if self.get_delegated_completion().get('pip-check', 'enabled') == 'disabled':
- self.pip_check = False
+ @property
+ def controller(self): # type: () -> ControllerHostConfig
+ """Host configuration for the controller."""
+ return self.host_settings.controller
- if args.check_python and args.check_python != actual_major_minor:
- raise ApplicationError('Running under Python %s instead of Python %s as expected.' % (actual_major_minor, args.check_python))
+ @property
+ def targets(self): # type: () -> t.List[HostConfig]
+ """Host configuration for the targets."""
+ return self.host_settings.targets
- if self.docker_keep_git:
- def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
- """Add files from the content root .git directory to the payload file list."""
- for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
- paths = [os.path.join(dirpath, filename) for filename in filenames]
- files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
+ def only_target(self, target_type): # type: (t.Type[THostConfig]) -> THostConfig
+ """
+ Return the host configuration for the target.
+ Requires that there is exactly one target of the specified type.
+ """
+ targets = list(self.targets)
- data_context().register_payload_callback(git_callback)
+ if len(targets) != 1:
+ raise Exception('There must be exactly one target.')
- @property
- def python_executable(self):
+ target = targets.pop()
+
+ if not isinstance(target, target_type):
+ raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
+
+ return target
+
+ def only_targets(self, target_type): # type: (t.Type[THostConfig]) -> t.List[THostConfig]
"""
- :rtype: str
+ Return a list of target host configurations.
+ Requires that there are one or more targets, all of the specified type.
"""
- return find_python(self.python_version)
+ if not self.targets:
+ raise Exception('There must be one or more targets.')
+
+ for target in self.targets:
+ if not isinstance(target, target_type):
+ raise Exception(f'Target is {type(target_type)} instead of {target_type}.')
+
+ return self.targets
@property
- def pip_command(self):
+ def target_type(self): # type: () -> t.Type[HostConfig]
"""
- :rtype: list[str]
+ The true type of the target(s).
+ If the target is the controller, the controller type is returned.
+ Requires at least one target, and all targets must be of the same type.
"""
- return generate_pip_command(self.python_executable)
+ target_types = set(type(target) for target in self.targets)
- def get_delegated_completion(self):
- """Returns a dictionary of settings specific to the selected delegation system, if any. Otherwise returns an empty dictionary.
- :rtype: dict[str, str]
- """
- if self.docker:
- return get_docker_completion().get(self.docker_raw, {})
+ if len(target_types) != 1:
+ raise Exception('There must be one or more targets, all of the same type.')
+
+ target_type = target_types.pop()
- if self.remote:
- return get_remote_completion().get(self.remote, {})
+ if issubclass(target_type, ControllerConfig):
+ target_type = type(self.controller)
- return {}
+ return target_type
class TestConfig(EnvironmentConfig):
@@ -176,12 +197,10 @@ class TestConfig(EnvironmentConfig):
:type args: any
:type command: str
"""
- super(TestConfig, self).__init__(args, command)
+ super().__init__(args, command)
self.coverage = args.coverage # type: bool
- self.coverage_label = args.coverage_label # type: str
self.coverage_check = args.coverage_check # type: bool
- self.coverage_config_base_path = None # type: t.Optional[str]
self.include = args.include or [] # type: t.List[str]
self.exclude = args.exclude or [] # type: t.List[str]
self.require = args.require or [] # type: t.List[str]
@@ -196,9 +215,9 @@ class TestConfig(EnvironmentConfig):
self.changed_path = args.changed_path # type: t.List[str]
self.base_branch = args.base_branch # type: str
- self.lint = args.lint if 'lint' in args else False # type: bool
- self.junit = args.junit if 'junit' in args else False # type: bool
- self.failure_ok = args.failure_ok if 'failure_ok' in args else False # type: bool
+ self.lint = getattr(args, 'lint', False) # type: bool
+ self.junit = getattr(args, 'junit', False) # type: bool
+ self.failure_ok = getattr(args, 'failure_ok', False) # type: bool
self.metadata = Metadata.from_file(args.metadata) if args.metadata else Metadata()
self.metadata_path = None
@@ -210,7 +229,7 @@ class TestConfig(EnvironmentConfig):
"""Add the metadata file to the payload file list."""
config = self
- if self.metadata_path:
+ if config.metadata_path:
files.append((os.path.abspath(config.metadata_path), config.metadata_path))
data_context().register_payload_callback(metadata_callback)
@@ -222,7 +241,7 @@ class ShellConfig(EnvironmentConfig):
"""
:type args: any
"""
- super(ShellConfig, self).__init__(args, 'shell')
+ super().__init__(args, 'shell')
self.raw = args.raw # type: bool
@@ -233,15 +252,26 @@ class SanityConfig(TestConfig):
"""
:type args: any
"""
- super(SanityConfig, self).__init__(args, 'sanity')
+ super().__init__(args, 'sanity')
self.test = args.test # type: t.List[str]
self.skip_test = args.skip_test # type: t.List[str]
self.list_tests = args.list_tests # type: bool
self.allow_disabled = args.allow_disabled # type: bool
self.enable_optional_errors = args.enable_optional_errors # type: bool
+ self.keep_git = args.keep_git
+
self.info_stderr = self.lint
+ if self.keep_git:
+ def git_callback(files): # type: (t.List[t.Tuple[str, str]]) -> None
+ """Add files from the content root .git directory to the payload file list."""
+ for dirpath, _dirnames, filenames in os.walk(os.path.join(data_context().content.root, '.git')):
+ paths = [os.path.join(dirpath, filename) for filename in filenames]
+ files.extend((path, os.path.relpath(path, data_context().content.root)) for path in paths)
+
+ data_context().register_payload_callback(git_callback)
+
class IntegrationConfig(TestConfig):
"""Configuration for the integration command."""
@@ -250,7 +280,7 @@ class IntegrationConfig(TestConfig):
:type args: any
:type command: str
"""
- super(IntegrationConfig, self).__init__(args, command)
+ super().__init__(args, command)
self.start_at = args.start_at # type: str
self.start_at_task = args.start_at_task # type: str
@@ -272,9 +302,6 @@ class IntegrationConfig(TestConfig):
self.no_temp_workdir = args.no_temp_workdir
self.no_temp_unicode = args.no_temp_unicode
- if self.get_delegated_completion().get('temp-unicode', 'enabled') == 'disabled':
- self.no_temp_unicode = True
-
if self.list_targets:
self.explain = True
self.info_stderr = True
@@ -286,50 +313,40 @@ class IntegrationConfig(TestConfig):
if not os.path.exists(ansible_config_path):
# use the default empty configuration unless one has been provided
- ansible_config_path = super(IntegrationConfig, self).get_ansible_config()
+ ansible_config_path = super().get_ansible_config()
return ansible_config_path
+TIntegrationConfig = t.TypeVar('TIntegrationConfig', bound=IntegrationConfig)
+
+
class PosixIntegrationConfig(IntegrationConfig):
"""Configuration for the posix integration command."""
-
def __init__(self, args):
"""
:type args: any
"""
- super(PosixIntegrationConfig, self).__init__(args, 'integration')
+ super().__init__(args, 'integration')
class WindowsIntegrationConfig(IntegrationConfig):
"""Configuration for the windows integration command."""
-
def __init__(self, args):
"""
:type args: any
"""
- super(WindowsIntegrationConfig, self).__init__(args, 'windows-integration')
-
- self.windows = args.windows # type: t.List[str]
- self.inventory = args.inventory # type: str
-
- if self.windows:
- self.allow_destructive = True
+ super().__init__(args, 'windows-integration')
class NetworkIntegrationConfig(IntegrationConfig):
"""Configuration for the network integration command."""
-
def __init__(self, args):
"""
:type args: any
"""
- super(NetworkIntegrationConfig, self).__init__(args, 'network-integration')
+ super().__init__(args, 'network-integration')
- self.platform = args.platform # type: t.List[str]
- self.platform_collection = dict(args.platform_collection or []) # type: t.Dict[str, str]
- self.platform_connection = dict(args.platform_connection or []) # type: t.Dict[str, str]
- self.inventory = args.inventory # type: str
self.testcase = args.testcase # type: str
@@ -339,12 +356,12 @@ class UnitsConfig(TestConfig):
"""
:type args: any
"""
- super(UnitsConfig, self).__init__(args, 'units')
+ super().__init__(args, 'units')
self.collect_only = args.collect_only # type: bool
self.num_workers = args.num_workers # type: int
- self.requirements_mode = args.requirements_mode if 'requirements_mode' in args else ''
+ self.requirements_mode = getattr(args, 'requirements_mode', '') # type: str
if self.requirements_mode == 'only':
self.requirements = True
diff --git a/test/lib/ansible_test/_internal/connections.py b/test/lib/ansible_test/_internal/connections.py
new file mode 100644
index 0000000000..7835cd3a91
--- /dev/null
+++ b/test/lib/ansible_test/_internal/connections.py
@@ -0,0 +1,243 @@
+"""Connection abstraction for interacting with test hosts."""
+from __future__ import annotations
+
+import abc
+import functools
+import shlex
+import sys
+import tempfile
+import typing as t
+
+from .io import (
+ read_text_file,
+)
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ Display,
+ SubprocessError,
+ retry,
+)
+
+from .util_common import (
+ run_command,
+)
+
+from .docker_util import (
+ DockerInspect,
+ docker_exec,
+ docker_inspect,
+ docker_network_disconnect,
+)
+
+from .ssh import (
+ SshConnectionDetail,
+)
+
+from .become import (
+ Become,
+)
+
+
+class Connection(metaclass=abc.ABCMeta):
+ """Base class for connecting to a host."""
+ @abc.abstractmethod
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+
+ def extract_archive(self,
+ chdir, # type: str
+ src, # type: t.IO[bytes]
+ ):
+ """Extract the given archive file stream in the specified directory."""
+ # This will not work on AIX.
+ # However, AIX isn't supported as a controller, which is where this would be needed.
+ tar_cmd = ['tar', 'oxzf', '-', '-C', chdir]
+
+ retry(lambda: self.run(tar_cmd, stdin=src))
+
+ def create_archive(self,
+ chdir, # type: str
+ name, # type: str
+ dst, # type: t.IO[bytes]
+ exclude=None, # type: t.Optional[str]
+ ):
+ """Create the specified archive file stream from the specified directory, including the given name and optionally excluding the given name."""
+ tar_cmd = ['tar', 'cf', '-', '-C', chdir]
+ gzip_cmd = ['gzip']
+
+ if exclude:
+ # This will not work on AIX.
+ # However, AIX isn't supported as a controller, which is where this would be needed.
+ tar_cmd += ['--exclude', exclude]
+
+ tar_cmd.append(name)
+
+ # Using gzip to compress the archive allows this to work on all POSIX systems we support, including AIX.
+ commands = [tar_cmd, gzip_cmd]
+
+ sh_cmd = ['sh', '-c', ' | '.join(' '.join(shlex.quote(cmd) for cmd in command) for command in commands)]
+
+ retry(lambda: self.run(sh_cmd, stdout=dst))
+
+
+class LocalConnection(Connection):
+ """Connect to localhost."""
+ def __init__(self, args): # type: (EnvironmentConfig) -> None
+ self.args = args
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ return run_command(
+ args=self.args,
+ cmd=command,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ )
+
+
+class SshConnection(Connection):
+ """Connect to a host using SSH."""
+ def __init__(self, args, settings, become=None): # type: (EnvironmentConfig, SshConnectionDetail, t.Optional[Become]) -> None
+ self.args = args
+ self.settings = settings
+ self.become = become
+
+ self.options = ['-i', settings.identity_file]
+
+ ssh_options = dict(
+ BatchMode='yes',
+ StrictHostKeyChecking='no',
+ UserKnownHostsFile='/dev/null',
+ ServerAliveInterval=15,
+ ServerAliveCountMax=4,
+ )
+
+ for ssh_option in sorted(ssh_options):
+ self.options.extend(['-o', f'{ssh_option}={ssh_options[ssh_option]}'])
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ options = list(self.options)
+
+ if self.become:
+ command = self.become.prepare_command(command)
+
+ options.append('-q')
+
+ if not data and not stdin and not stdout and sys.stdin.isatty():
+ options.append('-tt')
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-ssh-debug-', suffix='.log') as ssh_logfile:
+ options.extend(['-vvv', '-E', ssh_logfile.name])
+
+ if self.settings.port:
+ options.extend(['-p', str(self.settings.port)])
+
+ options.append(f'{self.settings.user}@{self.settings.host}')
+ options.append(' '.join(shlex.quote(cmd) for cmd in command))
+
+ return run_command(
+ args=self.args,
+ cmd=['ssh'] + options,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ error_callback=functools.partial(self.capture_log_details, ssh_logfile.name),
+ )
+
+ @staticmethod
+ def capture_log_details(path, ex): # type: (str, SubprocessError) -> None
+ """Read the specified SSH debug log and add relevant details to the provided exception."""
+ if ex.status != 255:
+ return
+
+ markers = [
+ 'debug1: Connection Established',
+ 'debug1: Authentication successful',
+ 'debug1: Entering interactive session',
+ 'debug1: Sending command',
+ 'debug2: PTY allocation request accepted',
+ 'debug2: exec request accepted',
+ ]
+
+ file_contents = read_text_file(path)
+ messages = []
+
+ for line in reversed(file_contents.splitlines()):
+ messages.append(line)
+
+ if any(line.startswith(marker) for marker in markers):
+ break
+
+ message = '\n'.join(reversed(messages))
+
+ ex.message += '>>> SSH Debug Output\n'
+ ex.message += '%s%s\n' % (message.strip(), Display.clear)
+
+
+class DockerConnection(Connection):
+ """Connect to a host using Docker."""
+ def __init__(self, args, container_id, user=None): # type: (EnvironmentConfig, str, t.Optional[str]) -> None
+ self.args = args
+ self.container_id = container_id
+ self.user = user # type: t.Optional[str]
+
+ def run(self,
+ command, # type: t.List[str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ stdin=None, # type: t.Optional[t.IO[bytes]]
+ stdout=None, # type: t.Optional[t.IO[bytes]]
+ ): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run the specified command and return the result."""
+ options = []
+
+ if self.user:
+ options.extend(['--user', self.user])
+
+ if not data and not stdin and not stdout and sys.stdin.isatty():
+ options.append('-it')
+
+ return docker_exec(
+ args=self.args,
+ container_id=self.container_id,
+ cmd=command,
+ options=options,
+ capture=capture,
+ data=data,
+ stdin=stdin,
+ stdout=stdout,
+ )
+
+ def inspect(self): # type: () -> DockerInspect
+ """Inspect the container and return a DockerInspect instance with the results."""
+ return docker_inspect(self.args, self.container_id)
+
+ def disconnect_network(self, network): # type: (str) -> None
+ """Disconnect the container from the specified network."""
+ docker_network_disconnect(self.args, self.container_id, network)
diff --git a/test/lib/ansible_test/_internal/constants.py b/test/lib/ansible_test/_internal/constants.py
index f4307822a1..07455d8e8b 100644..120000
--- a/test/lib/ansible_test/_internal/constants.py
+++ b/test/lib/ansible_test/_internal/constants.py
@@ -1,10 +1 @@
-"""Constants used by ansible-test. Imports should not be used in this file."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
-# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
-SOFT_RLIMIT_NOFILE = 1024
-
-# File used to track the ansible-test test execution timeout.
-TIMEOUT_PATH = '.ansible-test-timeout.json'
+../_util/target/common/constants.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_internal/containers.py b/test/lib/ansible_test/_internal/containers.py
index 935282faba..64c1d9ec76 100644
--- a/test/lib/ansible_test/_internal/containers.py
+++ b/test/lib/ansible_test/_internal/containers.py
@@ -1,19 +1,15 @@
"""High level functions for working with containers."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import atexit
import contextlib
+import enum
import json
import random
import time
import uuid
-
-from . import types as t
-
-from .encoding import (
- Text,
-)
+import threading
+import typing as t
from .util import (
ApplicationError,
@@ -30,6 +26,9 @@ from .util_common import (
from .config import (
EnvironmentConfig,
IntegrationConfig,
+ SanityConfig,
+ ShellConfig,
+ UnitsConfig,
WindowsIntegrationConfig,
)
@@ -42,9 +41,9 @@ from .docker_util import (
docker_rm,
docker_run,
docker_start,
- get_docker_command,
get_docker_container_id,
get_docker_host_ip,
+ require_docker,
)
from .ansible_util import (
@@ -67,8 +66,23 @@ from .ssh import (
generate_ssh_inventory,
)
+from .host_configs import (
+ ControllerConfig,
+ DockerConfig,
+ OriginConfig,
+ PosixSshConfig,
+ PythonConfig,
+ RemoteConfig,
+ WindowsInventoryConfig,
+)
+
+from .connections import (
+ SshConnection,
+)
+
# information about support containers provisioned by the current ansible-test instance
support_containers = {} # type: t.Dict[str, ContainerDescriptor]
+support_containers_mutex = threading.Lock()
class HostType:
@@ -78,6 +92,13 @@ class HostType:
managed = 'managed'
+class CleanupMode(enum.Enum):
+ """How container cleanup should be handled."""
+ YES = enum.auto()
+ NO = enum.auto()
+ INFO = enum.auto()
+
+
def run_support_container(
args, # type: EnvironmentConfig
context, # type: str
@@ -87,36 +108,33 @@ def run_support_container(
aliases=None, # type: t.Optional[t.List[str]]
start=True, # type: bool
allow_existing=False, # type: bool
- cleanup=None, # type: t.Optional[bool]
+ cleanup=None, # type: t.Optional[CleanupMode]
cmd=None, # type: t.Optional[t.List[str]]
env=None, # type: t.Optional[t.Dict[str, str]]
+ options=None, # type: t.Optional[t.List[str]]
+ publish_ports=True, # type: bool
): # type: (...) -> ContainerDescriptor
"""
Start a container used to support tests, but not run them.
Containers created this way will be accessible from tests.
"""
- if name in support_containers:
- raise Exception('Container already defined: %s' % name)
-
# SSH is required for publishing ports, as well as modifying the hosts file.
# Initializing the SSH key here makes sure it is available for use after delegation.
SshKey(args)
aliases = aliases or [sanitize_host_name(name)]
+ docker_command = require_docker().command
current_container_id = get_docker_container_id()
- publish_ports = True
- docker_command = get_docker_command().command
-
if docker_command == 'docker':
- if args.docker:
+ if isinstance(args.controller, DockerConfig) and all(isinstance(target, (ControllerConfig, DockerConfig)) for target in args.targets):
publish_ports = False # publishing ports is not needed when test hosts are on the docker network
if current_container_id:
publish_ports = False # publishing ports is pointless if already running in a docker container
- options = ['--name', name]
+ options = (options or []) + ['--name', name]
if start:
options.append('-d')
@@ -165,7 +183,7 @@ def run_support_container(
existing = False
if cleanup is None:
- cleanup = not existing
+ cleanup = CleanupMode.INFO if existing else CleanupMode.YES
descriptor = ContainerDescriptor(
image,
@@ -181,24 +199,42 @@ def run_support_container(
env,
)
- if not support_containers:
- atexit.register(cleanup_containers, args)
+ with support_containers_mutex:
+ if name in support_containers:
+ raise Exception(f'Container already defined: {name}')
+
+ if not support_containers:
+ atexit.register(cleanup_containers, args)
+
+ support_containers[name] = descriptor
- support_containers[name] = descriptor
+ display.info(f'Adding "{name}" to container database.')
+
+ if start:
+ descriptor.register(args)
return descriptor
def get_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
"""Return the current container database, creating it as needed, or returning the one provided on the command line through delegation."""
- if not args.containers:
- args.containers = create_container_database(args)
- elif isinstance(args.containers, (str, bytes, Text)):
- args.containers = ContainerDatabase.from_dict(json.loads(args.containers))
+ try:
+ return get_container_database.database
+ except AttributeError:
+ pass
+
+ if args.containers:
+ display.info('Parsing container database.', verbosity=1)
+ database = ContainerDatabase.from_dict(json.loads(args.containers))
+ else:
+ display.info('Creating container database.', verbosity=1)
+ database = create_container_database(args)
+
+ display.info('>>> Container Database\n%s' % json.dumps(database.to_dict(), indent=4, sort_keys=True), verbosity=3)
- display.info('>>> Container Database\n%s' % json.dumps(args.containers.to_dict(), indent=4, sort_keys=True), verbosity=3)
+ get_container_database.database = database
- return args.containers
+ return get_container_database.database
class ContainerAccess:
@@ -285,9 +321,21 @@ class ContainerDatabase:
for access_name, contexts in self.data.items())
-def local_ssh(args): # type: (EnvironmentConfig) -> SshConnectionDetail
+def local_ssh(args, python): # type: (EnvironmentConfig, PythonConfig) -> SshConnectionDetail
"""Return SSH connection details for localhost, connecting as root to the default SSH port."""
- return SshConnectionDetail('localhost', 'localhost', None, 'root', SshKey(args).key, args.python_executable)
+ return SshConnectionDetail('localhost', 'localhost', None, 'root', SshKey(args).key, python.path)
+
+
+def root_ssh(ssh): # type: (SshConnection) -> SshConnectionDetail
+ """Return the SSH connection details from the given SSH connection. If become was specified, the user will be changed to `root`."""
+ settings = ssh.settings.__dict__.copy()
+
+ if ssh.become:
+ settings.update(
+ user='root',
+ )
+
+ return SshConnectionDetail(**settings)
def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDatabase
@@ -315,7 +363,7 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
ports=container.ports,
forwards=None,
)
- elif get_docker_command().command == 'podman':
+ elif require_docker().command == 'podman':
# published ports for rootless podman containers should be accessible from the host's IP
container_access = ContainerAccess(
host_ip=get_host_ip(),
@@ -338,9 +386,11 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
origin_context = origin.setdefault(container.context, {})
origin_context[name] = published_access
- if args.remote:
+ if isinstance(args.controller, RemoteConfig):
pass # SSH forwarding required
- elif args.docker or get_docker_container_id():
+ elif '-controller-' in name:
+ pass # hack to avoid exposing the controller container to the controller
+ elif isinstance(args.controller, DockerConfig) or (isinstance(args.controller, OriginConfig) and get_docker_container_id()):
if container_access:
control_context = control.setdefault(container.context, {})
control_context[name] = container_access
@@ -353,6 +403,23 @@ def create_container_database(args): # type: (EnvironmentConfig) -> ContainerDa
control_context = control.setdefault(container.context, {})
control_context[name] = published_access
+ if issubclass(args.target_type, (RemoteConfig, WindowsInventoryConfig, PosixSshConfig)):
+ pass # SSH forwarding required
+ elif '-controller-' in name or '-target-' in name:
+ pass # hack to avoid exposing the controller and target containers to the target
+ elif issubclass(args.target_type, DockerConfig) or (issubclass(args.target_type, OriginConfig) and get_docker_container_id()):
+ if container_access:
+ managed_context = managed.setdefault(container.context, {})
+ managed_context[name] = container_access
+ else:
+ raise Exception('Missing IP address for container: %s' % name)
+ else:
+ if not published_access:
+ raise Exception('Missing published ports for container: %s' % name)
+
+ managed_context = managed.setdefault(container.context, {})
+ managed_context[name] = published_access
+
data = {
HostType.origin: origin,
HostType.control: control,
@@ -388,8 +455,8 @@ def support_container_context(
ssh, # type: t.Optional[SshConnectionDetail]
): # type: (...) -> t.Optional[ContainerDatabase]
"""Create a context manager for integration tests that use support containers."""
- if not isinstance(args, IntegrationConfig):
- yield None # containers are only used for integration tests
+ if not isinstance(args, (IntegrationConfig, UnitsConfig, SanityConfig, ShellConfig)):
+ yield None # containers are only needed for commands that have targets (hosts or pythons)
return
containers = get_container_database(args)
@@ -425,6 +492,9 @@ def create_support_container_context(
for context_name, context in source.items():
for container_name, container in context.items():
+ if '-controller-' in container_name:
+ continue # hack to avoid exposing the controller container to the controller
+
for port, access_port in container.port_map():
container_map[(container.host_ip, access_port)] = (context_name, container_name, port)
@@ -475,7 +545,7 @@ class ContainerDescriptor:
publish_ports, # type: bool
running, # type: bool
existing, # type: bool
- cleanup, # type: bool
+ cleanup, # type: CleanupMode
env, # type: t.Optional[t.Dict[str, str]]
): # type: (...) -> None
self.image = image
@@ -495,6 +565,8 @@ class ContainerDescriptor:
"""Start the container. Used for containers which are created, but not started."""
docker_start(args, self.name)
+ self.register(args)
+
def register(self, args): # type: (EnvironmentConfig) -> SupportContainer
"""Record the container's runtime details. Must be used after the container has been started."""
if self.details:
@@ -580,9 +652,9 @@ def wait_for_file(args, # type: EnvironmentConfig
def cleanup_containers(args): # type: (EnvironmentConfig) -> None
"""Clean up containers."""
for container in support_containers.values():
- if container.cleanup:
+ if container.cleanup == CleanupMode.YES:
docker_rm(args, container.container_id)
- else:
+ elif container.cleanup == CleanupMode.INFO:
display.notice('Remember to run `docker rm -f %s` when finished testing.' % container.name)
@@ -605,6 +677,7 @@ def create_hosts_entries(context): # type: (t.Dict[str, ContainerAccess]) -> t.
def create_container_hooks(
args, # type: IntegrationConfig
+ control_connections, # type: t.List[SshConnectionDetail]
managed_connections, # type: t.Optional[t.List[SshConnectionDetail]]
): # type: (...) -> t.Tuple[t.Optional[t.Callable[[IntegrationTarget], None]], t.Optional[t.Callable[[IntegrationTarget], None]]]
"""Return pre and post target callbacks for enabling and disabling container access for each test target."""
@@ -628,13 +701,13 @@ def create_container_hooks(
control_state = {}
managed_state = {}
- control_connections = [local_ssh(args)]
-
def pre_target(target):
+ """Configure hosts for SSH port forwarding required by the specified target."""
forward_ssh_ports(args, control_connections, '%s_hosts_prepare.yml' % control_type, control_state, target, HostType.control, control_contexts)
forward_ssh_ports(args, managed_connections, '%s_hosts_prepare.yml' % managed_type, managed_state, target, HostType.managed, managed_contexts)
def post_target(target):
+ """Clean up previously configured SSH port forwarding which was required by the specified target."""
cleanup_ssh_ports(args, control_connections, '%s_hosts_restore.yml' % control_type, control_state, target, HostType.control)
cleanup_ssh_ports(args, managed_connections, '%s_hosts_restore.yml' % managed_type, managed_state, target, HostType.managed)
else:
@@ -682,6 +755,9 @@ def forward_ssh_ports(
return
if not ssh_connections:
+ if args.explain:
+ return
+
raise Exception('The %s host was not pre-configured for container access and SSH forwarding is not available.' % host_type)
redirects = [] # type: t.List[t.Tuple[int, str, int]]
diff --git a/test/lib/ansible_test/_internal/content_config.py b/test/lib/ansible_test/_internal/content_config.py
index 7802dc355e..10574cc0b6 100644
--- a/test/lib/ansible_test/_internal/content_config.py
+++ b/test/lib/ansible_test/_internal/content_config.py
@@ -1,10 +1,13 @@
"""Content configuration."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
+import typing as t
-from . import types as t
+from .constants import (
+ CONTROLLER_PYTHON_VERSIONS,
+ SUPPORTED_PYTHON_VERSIONS,
+)
from .compat.packaging import (
PACKAGING_IMPORT_ERROR,
@@ -23,10 +26,9 @@ from .io import (
from .util import (
ApplicationError,
- CONTROLLER_PYTHON_VERSIONS,
- SUPPORTED_PYTHON_VERSIONS,
display,
str_to_version,
+ cache,
)
from .data import (
@@ -47,7 +49,7 @@ class BaseConfig:
class ModulesConfig(BaseConfig):
"""Configuration for modules."""
def __init__(self, data): # type: (t.Any) -> None
- super(ModulesConfig, self).__init__(data)
+ super().__init__(data)
python_requires = data.get('python_requires', MISSING)
@@ -62,7 +64,7 @@ class ModulesConfig(BaseConfig):
class ContentConfig(BaseConfig):
"""Configuration for all content."""
def __init__(self, data): # type: (t.Any) -> None
- super(ContentConfig, self).__init__(data)
+ super().__init__(data)
# Configuration specific to modules/module_utils.
self.modules = ModulesConfig(data.get('modules', {}))
@@ -103,17 +105,13 @@ def load_config(path): # type: (str) -> t.Optional[ContentConfig]
return config
+@cache
def get_content_config(): # type: () -> ContentConfig
"""
Parse and return the content configuration (if any) for the current collection.
For ansible-core, a default configuration is used.
Results are cached.
"""
- try:
- return get_content_config.config
- except AttributeError:
- pass
-
collection_config_path = 'tests/config.yml'
config = None
@@ -128,8 +126,6 @@ def get_content_config(): # type: () -> ContentConfig
),
))
- get_content_config.config = config
-
if not config.modules.python_versions:
raise ApplicationError('This collection does not declare support for modules/module_utils on any known Python version.\n'
'Ansible supports modules/module_utils on Python versions: %s\n'
diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py
index aad7d1da24..04ce8b01ef 100644
--- a/test/lib/ansible_test/_internal/core_ci.py
+++ b/test/lib/ansible_test/_internal/core_ci.py
@@ -1,6 +1,5 @@
"""Access Ansible Core CI remote services."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
@@ -9,8 +8,7 @@ import traceback
import uuid
import errno
import time
-
-from . import types as t
+import typing as t
from .http import (
HttpClient,
@@ -29,6 +27,7 @@ from .util import (
ApplicationError,
display,
ANSIBLE_TEST_TARGET_ROOT,
+ mutex,
)
from .util_common import (
@@ -41,7 +40,6 @@ from .config import (
)
from .ci import (
- AuthContext,
get_ci_provider,
)
@@ -54,58 +52,20 @@ class AnsibleCoreCI:
"""Client for Ansible Core CI services."""
DEFAULT_ENDPOINT = 'https://ansible-core-ci.testing.ansible.com'
- # Assign a default provider for each VM platform supported.
- # This is used to determine the provider from the platform when no provider is specified.
- # The keys here also serve as the list of providers which users can select from the command line.
- #
- # Entries can take one of two formats:
- # {platform}
- # {platform} arch={arch}
- #
- # Entries with an arch are only used as a default if the value for --remote-arch matches the {arch} specified.
- # This allows arch specific defaults to be distinct from the default used when no arch is specified.
-
- PROVIDERS = dict(
- aws=(
- 'freebsd',
- 'ios',
- 'rhel',
- 'vyos',
- 'windows',
- ),
- azure=(
- ),
- ibmps=(
- 'aix',
- ),
- parallels=(
- 'macos',
- 'osx',
- ),
- )
-
- # Currently ansible-core-ci has no concept of arch selection. This effectively means each provider only supports one arch.
- # The list below identifies which platforms accept an arch, and which one. These platforms can only be used with the specified arch.
- PROVIDER_ARCHES = dict(
- )
-
- def __init__(self, args, platform, version, stage='prod', persist=True, load=True, provider=None, arch=None, internal=False):
+ def __init__(self, args, platform, version, provider, persist=True, load=True, suffix=None):
"""
:type args: EnvironmentConfig
:type platform: str
:type version: str
- :type stage: str
+ :type provider: str
:type persist: bool
:type load: bool
- :type provider: str | None
- :type arch: str | None
- :type internal: bool
+ :type suffix: str | None
"""
self.args = args
- self.arch = arch
self.platform = platform
self.version = version
- self.stage = stage
+ self.stage = args.remote_stage
self.client = HttpClient(args)
self.connection = None
self.instance_id = None
@@ -113,51 +73,13 @@ class AnsibleCoreCI:
self.default_endpoint = args.remote_endpoint or self.DEFAULT_ENDPOINT
self.retries = 3
self.ci_provider = get_ci_provider()
- self.auth_context = AuthContext()
-
- if self.arch:
- self.name = '%s-%s-%s' % (self.arch, self.platform, self.version)
- else:
- self.name = '%s-%s' % (self.platform, self.version)
-
- if provider:
- # override default provider selection (not all combinations are valid)
- self.provider = provider
- else:
- self.provider = None
-
- for candidate, platforms in self.PROVIDERS.items():
- choices = [
- platform,
- '%s arch=%s' % (platform, arch),
- ]
-
- if any(choice in platforms for choice in choices):
- # assign default provider based on platform
- self.provider = candidate
- break
-
- # If a provider has been selected, make sure the correct arch (or none) has been selected.
- if self.provider:
- required_arch = self.PROVIDER_ARCHES.get(self.provider)
-
- if self.arch != required_arch:
- if required_arch:
- if self.arch:
- raise ApplicationError('Provider "%s" requires the "%s" arch instead of "%s".' % (self.provider, required_arch, self.arch))
-
- raise ApplicationError('Provider "%s" requires the "%s" arch.' % (self.provider, required_arch))
+ self.provider = provider
+ self.name = '%s-%s' % (self.platform, self.version)
- raise ApplicationError('Provider "%s" does not support specification of an arch.' % self.provider)
+ if suffix:
+ self.name += '-' + suffix
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s-%s' % (self.name, self.provider, self.stage))
-
- if self.provider not in self.PROVIDERS and not internal:
- if self.arch:
- raise ApplicationError('Provider not detected for platform "%s" on arch "%s".' % (self.platform, self.arch))
-
- raise ApplicationError('Provider not detected for platform "%s" with no arch specified.' % self.platform)
-
self.ssh_key = SshKey(args)
if persist and load and self._load():
@@ -199,7 +121,7 @@ class AnsibleCoreCI:
@property
def available(self):
"""Return True if Ansible Core CI is supported."""
- return self.ci_provider.supports_core_ci_auth(self.auth_context)
+ return self.ci_provider.supports_core_ci_auth()
def start(self):
"""Start instance."""
@@ -208,7 +130,7 @@ class AnsibleCoreCI:
verbosity=1)
return None
- return self._start(self.ci_provider.prepare_core_ci_auth(self.auth_context))
+ return self._start(self.ci_provider.prepare_core_ci_auth())
def stop(self):
"""Stop instance."""
@@ -272,7 +194,7 @@ class AnsibleCoreCI:
running=True,
hostname='cloud.example.com',
port=12345,
- username='username',
+ username='root',
password='password' if self.platform == 'windows' else None,
)
else:
@@ -357,12 +279,7 @@ class AnsibleCoreCI:
return response.json()
- def _start_endpoint(self, data, headers):
- """
- :type data: dict[str, any]
- :type headers: dict[str, str]
- :rtype: HttpResponse
- """
+ def _start_endpoint(self, data, headers): # type: (t.Dict[str, t.Any], t.Dict[str, str]) -> HttpResponse
tries = self.retries
sleep = 15
@@ -481,7 +398,7 @@ class CoreHttpError(HttpError):
:type remote_message: str
:type remote_stack_trace: str
"""
- super(CoreHttpError, self).__init__(status, '%s%s' % (remote_message, remote_stack_trace))
+ super().__init__(status, '%s%s' % (remote_message, remote_stack_trace))
self.remote_message = remote_message
self.remote_stack_trace = remote_stack_trace
@@ -493,6 +410,7 @@ class SshKey:
KEY_NAME = 'id_%s' % KEY_TYPE
PUB_NAME = '%s.pub' % KEY_NAME
+ @mutex
def __init__(self, args):
"""
:type args: EnvironmentConfig
@@ -524,6 +442,15 @@ class SshKey:
self.pub_contents = read_text_file(self.pub).strip()
self.key_contents = read_text_file(self.key).strip()
+ @staticmethod
+ def get_relative_in_tree_private_key_path(): # type: () -> str
+ """Return the ansible-test SSH private key path relative to the content tree."""
+ temp_dir = ResultType.TMP.relative_path
+
+ key = os.path.join(temp_dir, SshKey.KEY_NAME)
+
+ return key
+
def get_in_tree_key_pair_paths(self): # type: () -> t.Optional[t.Tuple[str, str]]
"""Return the ansible-test SSH key pair paths from the content tree."""
temp_dir = ResultType.TMP.path
diff --git a/test/lib/ansible_test/_internal/coverage_util.py b/test/lib/ansible_test/_internal/coverage_util.py
index e5434231f1..e705db76e0 100644
--- a/test/lib/ansible_test/_internal/coverage_util.py
+++ b/test/lib/ansible_test/_internal/coverage_util.py
@@ -1,10 +1,10 @@
"""Utility code for facilitating collection of code coverage when running tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import contextlib
+import atexit
import os
import tempfile
+import typing as t
from .config import (
IntegrationConfig,
@@ -14,49 +14,120 @@ from .config import (
from .io import (
write_text_file,
+ make_dirs,
)
from .util import (
COVERAGE_CONFIG_NAME,
remove_tree,
+ sanitize_host_name,
)
from .data import (
data_context,
)
+from .util_common import (
+ intercept_python,
+ ResultType,
+)
+
+from .host_configs import (
+ DockerConfig,
+ HostConfig,
+ OriginConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ PythonConfig,
+)
-@contextlib.contextmanager
-def coverage_context(args): # type: (TestConfig) -> None
- """Content to set up and clean up code coverage configuration for tests."""
- coverage_setup(args)
- try:
- yield
- finally:
- coverage_cleanup(args)
+def cover_python(
+ args, # type: TestConfig
+ python, # type: PythonConfig
+ cmd, # type: t.List[str]
+ target_name, # type: str
+ env, # type: t.Dict[str, str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
+ """Run a command while collecting Python code coverage."""
+ if args.coverage:
+ env.update(get_coverage_environment(args, target_name, python.version))
+
+ return intercept_python(args, python, cmd, env, capture, data, cwd)
+
+
+def get_coverage_platform(config): # type: (HostConfig) -> str
+ """Return the platform label for the given host config."""
+ if isinstance(config, PosixRemoteConfig):
+ platform = f'remote-{sanitize_host_name(config.name)}'
+ elif isinstance(config, DockerConfig):
+ platform = f'docker-{sanitize_host_name(config.name)}'
+ elif isinstance(config, PosixSshConfig):
+ platform = f'ssh-{sanitize_host_name(config.host)}'
+ elif isinstance(config, OriginConfig):
+ platform = 'origin' # previous versions of ansible-test used "local-{python_version}"
+ else:
+ raise NotImplementedError(f'Coverage platform label not defined for type: {type(config)}')
+
+ return platform
+
+def get_coverage_environment(
+ args, # type: TestConfig
+ target_name, # type: str
+ version, # type: str
+): # type: (...) -> t.Dict[str, str]
+ """Return environment variables needed to collect code coverage."""
+ # unit tests, sanity tests and other special cases (localhost only)
+ # config is in a temporary directory
+ # results are in the source tree
+ config_file = get_coverage_config(args)
+ coverage_name = '='.join((args.command, target_name, get_coverage_platform(args.controller), f'python-{version}', 'coverage'))
+ coverage_dir = os.path.join(data_context().content.root, data_context().content.results_path, ResultType.COVERAGE.name)
+ coverage_file = os.path.join(coverage_dir, coverage_name)
-def coverage_setup(args): # type: (TestConfig) -> None
- """Set up code coverage configuration before running tests."""
- if not args.coverage:
- return
+ make_dirs(coverage_dir)
+
+ if args.coverage_check:
+ # cause the 'coverage' module to be found, but not imported or enabled
+ coverage_file = ''
+
+ # Enable code coverage collection on local Python programs (this does not include Ansible modules).
+ # Used by the injectors to support code coverage.
+ # Used by the pytest unit test plugin to support code coverage.
+ # The COVERAGE_FILE variable is also used directly by the 'coverage' module.
+ env = dict(
+ COVERAGE_CONF=config_file,
+ COVERAGE_FILE=coverage_file,
+ )
+
+ return env
+
+
+def get_coverage_config(args): # type: (TestConfig) -> str
+ """Return the path to the coverage config, creating the config if it does not already exist."""
+ try:
+ return get_coverage_config.path
+ except AttributeError:
+ pass
coverage_config = generate_coverage_config(args)
if args.explain:
- args.coverage_config_base_path = '/tmp/coverage-temp-dir'
+ temp_dir = '/tmp/coverage-temp-dir'
else:
- args.coverage_config_base_path = tempfile.mkdtemp()
+ temp_dir = tempfile.mkdtemp()
+ atexit.register(lambda: remove_tree(temp_dir))
- write_text_file(os.path.join(args.coverage_config_base_path, COVERAGE_CONFIG_NAME), coverage_config)
+ path = get_coverage_config.path = os.path.join(temp_dir, COVERAGE_CONFIG_NAME)
+ if not args.explain:
+ write_text_file(path, coverage_config)
-def coverage_cleanup(args): # type: (TestConfig) -> None
- """Clean up code coverage configuration after tests have finished."""
- if args.coverage_config_base_path and not args.explain:
- remove_tree(args.coverage_config_base_path)
- args.coverage_config_base_path = None
+ return path
def generate_coverage_config(args): # type: (TestConfig) -> str
diff --git a/test/lib/ansible_test/_internal/data.py b/test/lib/ansible_test/_internal/data.py
index 38ae6d210e..c3b2187ca2 100644
--- a/test/lib/ansible_test/_internal/data.py
+++ b/test/lib/ansible_test/_internal/data.py
@@ -1,10 +1,9 @@
"""Context information for the current invocation of ansible-test."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import dataclasses
import os
-
-from . import types as t
+import typing as t
from .util import (
ApplicationError,
@@ -14,6 +13,7 @@ from .util import (
ANSIBLE_TEST_ROOT,
ANSIBLE_SOURCE_ROOT,
display,
+ cache,
)
from .provider import (
@@ -53,7 +53,7 @@ class DataContext:
self.__source_providers = source_providers
self.__ansible_source = None # type: t.Optional[t.Tuple[t.Tuple[str, str], ...]]
- self.payload_callbacks = [] # type: t.List[t.Callable[t.List[t.Tuple[str, str]], None]]
+ self.payload_callbacks = [] # type: t.List[t.Callable[[t.List[t.Tuple[str, str]]], None]]
if content_path:
content = self.__create_content_layout(layout_providers, source_providers, content_path, False)
@@ -157,12 +157,13 @@ class DataContext:
return self.__ansible_source
- def register_payload_callback(self, callback): # type: (t.Callable[t.List[t.Tuple[str, str]], None]) -> None
+ def register_payload_callback(self, callback): # type: (t.Callable[[t.List[t.Tuple[str, str]]], None]) -> None
"""Register the given payload callback."""
self.payload_callbacks.append(callback)
-def data_init(): # type: () -> DataContext
+@cache
+def data_context(): # type: () -> DataContext
"""Initialize provider plugins."""
provider_types = (
'layout',
@@ -191,10 +192,51 @@ Current working directory: %s''' % ('\n'.join(options), os.getcwd()))
return context
-def data_context(): # type: () -> DataContext
- """Return the current data context."""
- try:
- return data_context.instance
- except AttributeError:
- data_context.instance = data_init()
- return data_context.instance
+@dataclasses.dataclass(frozen=True)
+class PluginInfo:
+ """Information about an Ansible plugin."""
+ plugin_type: str
+ name: str
+ paths: t.List[str]
+
+
+@cache
+def content_plugins():
+ """
+ Analyze content.
+ The primary purpose of this analysis is to facilitiate mapping of integration tests to the plugin(s) they are intended to test.
+ """
+ plugins = {} # type: t.Dict[str, t.Dict[str, PluginInfo]]
+
+ for plugin_type, plugin_directory in data_context().content.plugin_paths.items():
+ plugin_paths = sorted(data_context().content.walk_files(plugin_directory))
+ plugin_directory_offset = len(plugin_directory.split(os.path.sep))
+
+ plugin_files = {}
+
+ for plugin_path in plugin_paths:
+ plugin_filename = os.path.basename(plugin_path)
+ plugin_parts = plugin_path.split(os.path.sep)[plugin_directory_offset:-1]
+
+ if plugin_filename == '__init__.py':
+ if plugin_type != 'module_utils':
+ continue
+ else:
+ plugin_name = os.path.splitext(plugin_filename)[0]
+
+ if data_context().content.is_ansible and plugin_type == 'modules':
+ plugin_name = plugin_name.lstrip('_')
+
+ plugin_parts.append(plugin_name)
+
+ plugin_name = '.'.join(plugin_parts)
+
+ plugin_files.setdefault(plugin_name, []).append(plugin_filename)
+
+ plugins[plugin_type] = {plugin_name: PluginInfo(
+ plugin_type=plugin_type,
+ name=plugin_name,
+ paths=paths,
+ ) for plugin_name, paths in plugin_files.items()}
+
+ return plugins
diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py
index 53b52e1ffa..48d13be576 100644
--- a/test/lib/ansible_test/_internal/delegation.py
+++ b/test/lib/ansible_test/_internal/delegation.py
@@ -1,84 +1,40 @@
"""Delegate test execution to another environment."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import contextlib
import json
import os
-import re
-import sys
import tempfile
-
-from . import types as t
+import typing as t
from .io import (
make_dirs,
- read_text_file,
-)
-
-from .executor import (
- create_shell_command,
- run_pypi_proxy,
- get_python_interpreter,
- get_python_version,
)
from .config import (
- TestConfig,
EnvironmentConfig,
IntegrationConfig,
- WindowsIntegrationConfig,
- NetworkIntegrationConfig,
- ShellConfig,
SanityConfig,
+ ShellConfig,
+ TestConfig,
UnitsConfig,
)
-from .core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from .manage_ci import (
- ManagePosixCI,
- ManageWindowsCI,
- get_ssh_key_setup,
-)
-
from .util import (
- ApplicationError,
- common_environment,
+ SubprocessError,
display,
+ filter_args,
ANSIBLE_BIN_PATH,
- ANSIBLE_TEST_TARGET_ROOT,
ANSIBLE_LIB_ROOT,
ANSIBLE_TEST_ROOT,
- tempdir,
- SUPPORTED_PYTHON_VERSIONS,
)
from .util_common import (
- run_command,
ResultType,
- create_interpreter_wrapper,
- get_docker_completion,
- get_remote_completion,
-)
-
-from .docker_util import (
- docker_exec,
- docker_get,
- docker_inspect,
- docker_pull,
- docker_put,
- docker_rm,
- docker_run,
- docker_network_disconnect,
- get_docker_command,
- get_docker_hostname,
+ process_scoped_temporary_directory,
)
from .containers import (
- SshConnectionDetail,
support_container_context,
)
@@ -90,428 +46,204 @@ from .payload import (
create_payload,
)
-from .venv import (
- create_virtual_environment,
-)
-
from .ci import (
get_ci_provider,
)
+from .host_configs import (
+ OriginConfig,
+ PythonConfig,
+ VirtualPythonConfig,
+)
-def check_delegation_args(args):
- """
- :type args: CommonConfig
- """
- if not isinstance(args, EnvironmentConfig):
- return
-
- if args.docker:
- get_python_version(args, get_docker_completion(), args.docker_raw)
- elif args.remote:
- get_python_version(args, get_remote_completion(), args.remote)
-
-
-def delegate(args, exclude, require):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :rtype: bool
- """
- if isinstance(args, TestConfig):
- args.metadata.ci_provider = get_ci_provider().code
-
- make_dirs(ResultType.TMP.path)
-
- with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
- args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
- args.metadata.to_file(args.metadata_path)
-
- try:
- return delegate_command(args, exclude, require)
- finally:
- args.metadata_path = None
- else:
- return delegate_command(args, exclude, require)
-
-
-def delegate_command(args, exclude, require):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- :rtype: bool
- """
- if args.venv:
- delegate_venv(args, exclude, require)
- return True
-
- if args.docker:
- delegate_docker(args, exclude, require)
- return True
-
- if args.remote:
- delegate_remote(args, exclude, require)
- return True
-
- return False
-
+from .connections import (
+ Connection,
+ DockerConnection,
+ SshConnection,
+ LocalConnection,
+)
-def delegate_venv(args, # type: EnvironmentConfig
- exclude, # type: t.List[str]
- require, # type: t.List[str]
- ): # type: (...) -> None
- """Delegate ansible-test execution to a virtual environment using venv or virtualenv."""
- if args.python:
- versions = (args.python_version,)
- else:
- versions = SUPPORTED_PYTHON_VERSIONS
+from .provisioning import (
+ HostState,
+)
- if args.venv_system_site_packages:
- suffix = '-ssp'
- else:
- suffix = ''
- venvs = dict((version, os.path.join(ResultType.TMP.path, 'delegation', 'python%s%s' % (version, suffix))) for version in versions)
- venvs = dict((version, path) for version, path in venvs.items() if create_virtual_environment(args, version, path, args.venv_system_site_packages))
+@contextlib.contextmanager
+def delegation_context(args, host_state): # type: (EnvironmentConfig, HostState) -> None
+ """Context manager for serialized host state during delegation."""
+ make_dirs(ResultType.TMP.path)
- if not venvs:
- raise ApplicationError('No usable virtual environment support found.')
+ # noinspection PyUnusedLocal
+ python = host_state.controller_profile.python # make sure the python interpreter has been initialized before serializing host state
+ del python
- options = {
- '--venv': 0,
- '--venv-system-site-packages': 0,
- }
+ with tempfile.TemporaryDirectory(prefix='host-', dir=ResultType.TMP.path) as host_dir:
+ args.host_settings.serialize(os.path.join(host_dir, 'settings.dat'))
+ host_state.serialize(os.path.join(host_dir, 'state.dat'))
- with tempdir() as inject_path:
- for version, path in venvs.items():
- create_interpreter_wrapper(os.path.join(path, 'bin', 'python'), os.path.join(inject_path, 'python%s' % version))
+ args.host_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(host_dir))
- python_interpreter = os.path.join(inject_path, 'python%s' % args.python_version)
+ try:
+ yield
+ finally:
+ args.host_path = None
- cmd = generate_command(args, python_interpreter, ANSIBLE_BIN_PATH, data_context().content.root, options, exclude, require)
+def delegate(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
+ """Delegate execution of ansible-test to another environment."""
+ with delegation_context(args, host_state):
if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- cmd += ['--coverage-label', 'venv']
-
- env = common_environment()
-
- with tempdir() as library_path:
- # expose ansible and ansible_test to the virtual environment (only required when running from an install)
- os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
- os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
-
- env.update(
- PATH=inject_path + os.path.pathsep + env['PATH'],
- PYTHONPATH=library_path,
- )
-
- with support_container_context(args, None) as containers:
- if containers:
- cmd.extend(['--containers', json.dumps(containers.to_dict())])
+ args.metadata.ci_provider = get_ci_provider().code
- run_command(args, cmd, env=env)
-
-
-def delegate_docker(args, exclude, require):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- """
- get_docker_command(required=True) # fail early if docker is not available
+ make_dirs(ResultType.TMP.path)
- test_image = args.docker
- privileged = args.docker_privileged
+ with tempfile.NamedTemporaryFile(prefix='metadata-', suffix='.json', dir=ResultType.TMP.path) as metadata_fd:
+ args.metadata_path = os.path.join(ResultType.TMP.relative_path, os.path.basename(metadata_fd.name))
+ args.metadata.to_file(args.metadata_path)
- docker_pull(args, test_image)
+ try:
+ delegate_command(args, host_state, exclude, require)
+ finally:
+ args.metadata_path = None
+ else:
+ delegate_command(args, host_state, exclude, require)
- test_id = None
- success = False
- options = {
- '--docker': 1,
- '--docker-privileged': 0,
- '--docker-util': 1,
- }
+def delegate_command(args, host_state, exclude, require): # type: (EnvironmentConfig, HostState, t.List[str], t.List[str]) -> None
+ """Delegate execution based on the provided host state."""
+ con = host_state.controller_profile.get_origin_controller_connection()
+ working_directory = host_state.controller_profile.get_working_directory()
+ host_delegation = not isinstance(args.controller, OriginConfig)
- python_interpreter = get_python_interpreter(args, get_docker_completion(), args.docker_raw)
+ if host_delegation:
+ if data_context().content.collection:
+ content_root = os.path.join(working_directory, data_context().content.collection.directory)
+ else:
+ content_root = os.path.join(working_directory, 'ansible')
- pwd = '/root'
- ansible_root = os.path.join(pwd, 'ansible')
+ ansible_bin_path = os.path.join(working_directory, 'ansible', 'bin')
- if data_context().content.collection:
- content_root = os.path.join(pwd, data_context().content.collection.directory)
+ with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as payload_file:
+ create_payload(args, payload_file.name)
+ con.extract_archive(chdir=working_directory, src=payload_file)
else:
- content_root = ansible_root
+ content_root = working_directory
+ ansible_bin_path = ANSIBLE_BIN_PATH
- remote_results_root = os.path.join(content_root, data_context().content.results_path)
+ command = generate_command(args, host_state.controller_profile.python, ansible_bin_path, content_root, exclude, require)
- cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
+ if isinstance(con, SshConnection):
+ ssh = con.settings
+ else:
+ ssh = None
- if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- image_label = args.docker_raw
- image_label = re.sub('[^a-zA-Z0-9]+', '-', image_label)
- cmd += ['--coverage-label', 'docker-%s' % image_label]
+ options = []
- if isinstance(args, IntegrationConfig):
+ if isinstance(args, IntegrationConfig) and args.controller.is_managed and all(target.is_managed for target in args.targets):
if not args.allow_destructive:
- cmd.append('--allow-destructive')
-
- cmd_options = []
+ options.append('--allow-destructive')
- if isinstance(args, ShellConfig) or (isinstance(args, IntegrationConfig) and args.debug_strategy):
- cmd_options.append('-it')
+ with support_container_context(args, ssh) as containers:
+ if containers:
+ options.extend(['--containers', json.dumps(containers.to_dict())])
- pypi_proxy_id, pypi_proxy_endpoint = run_pypi_proxy(args)
+ # Run unit tests unprivileged to prevent stray writes to the source tree.
+ # Also disconnect from the network once requirements have been installed.
+ if isinstance(args, UnitsConfig) and isinstance(con, DockerConnection):
+ pytest_user = 'pytest'
- if pypi_proxy_endpoint:
- cmd += ['--pypi-endpoint', pypi_proxy_endpoint]
-
- with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
- try:
- create_payload(args, local_source_fd.name)
-
- test_options = [
- '--detach',
- '--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
- '--privileged=%s' % str(privileged).lower(),
+ writable_dirs = [
+ os.path.join(content_root, ResultType.JUNIT.relative_path),
+ os.path.join(content_root, ResultType.COVERAGE.relative_path),
]
- if args.docker_memory:
- test_options.extend([
- '--memory=%d' % args.docker_memory,
- '--memory-swap=%d' % args.docker_memory,
- ])
-
- docker_socket = '/var/run/docker.sock'
-
- if args.docker_seccomp != 'default':
- test_options += ['--security-opt', 'seccomp=%s' % args.docker_seccomp]
-
- if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
- test_options += ['--volume', '%s:%s' % (docker_socket, docker_socket)]
+ con.run(['mkdir', '-p'] + writable_dirs)
+ con.run(['chmod', '777'] + writable_dirs)
+ con.run(['chmod', '755', working_directory])
+ con.run(['chmod', '644', os.path.join(content_root, args.metadata_path)])
+ con.run(['useradd', pytest_user, '--create-home'])
+ con.run(insert_options(command, options + ['--requirements-mode', 'only']))
- test_id = docker_run(args, test_image, options=test_options)
+ container = con.inspect()
+ networks = container.get_network_names()
- setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'docker.sh'))
-
- ssh_keys_sh = get_ssh_key_setup(SshKey(args))
-
- setup_sh += ssh_keys_sh
- shell = setup_sh.splitlines()[0][2:]
-
- docker_exec(args, test_id, [shell], data=setup_sh)
-
- # write temporary files to /root since /tmp isn't ready immediately on container start
- docker_put(args, test_id, local_source_fd.name, '/root/test.tgz')
- docker_exec(args, test_id, ['tar', 'oxzf', '/root/test.tgz', '-C', '/root'])
-
- # docker images are only expected to have a single python version available
- if isinstance(args, UnitsConfig) and not args.python:
- cmd += ['--python', 'default']
-
- # run unit tests unprivileged to prevent stray writes to the source tree
- # also disconnect from the network once requirements have been installed
- if isinstance(args, UnitsConfig):
- writable_dirs = [
- os.path.join(content_root, ResultType.JUNIT.relative_path),
- os.path.join(content_root, ResultType.COVERAGE.relative_path),
- ]
-
- docker_exec(args, test_id, ['mkdir', '-p'] + writable_dirs)
- docker_exec(args, test_id, ['chmod', '777'] + writable_dirs)
- docker_exec(args, test_id, ['chmod', '755', '/root'])
- docker_exec(args, test_id, ['chmod', '644', os.path.join(content_root, args.metadata_path)])
-
- docker_exec(args, test_id, ['useradd', 'pytest', '--create-home'])
-
- docker_exec(args, test_id, cmd + ['--requirements-mode', 'only'], options=cmd_options)
-
- container = docker_inspect(args, test_id)
- networks = container.get_network_names()
-
- if networks is not None:
- for network in networks:
- docker_network_disconnect(args, test_id, network)
- else:
- display.warning('Network disconnection is not supported (this is normal under podman). '
- 'Tests will not be isolated from the network. Network-related tests may misbehave.')
-
- cmd += ['--requirements-mode', 'skip']
-
- cmd_options += ['--user', 'pytest']
+ if networks is not None:
+ for network in networks:
+ con.disconnect_network(network)
+ else:
+ display.warning('Network disconnection is not supported (this is normal under podman). '
+ 'Tests will not be isolated from the network. Network-related tests may misbehave.')
- try:
- with support_container_context(args, None) as containers:
- if containers:
- cmd.extend(['--containers', json.dumps(containers.to_dict())])
+ options.extend(['--requirements-mode', 'skip'])
- docker_exec(args, test_id, cmd, options=cmd_options)
- # docker_exec will throw SubprocessError if not successful
- # If we make it here, all the prep work earlier and the docker_exec line above were all successful.
- success = True
- finally:
- local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
+ con.user = pytest_user
- remote_test_root = os.path.dirname(remote_results_root)
- remote_results_name = os.path.basename(remote_results_root)
- remote_temp_file = os.path.join('/root', remote_results_name + '.tgz')
+ success = False
- try:
- make_dirs(local_test_root) # make sure directory exists for collections which have no tests
-
- with tempfile.NamedTemporaryFile(prefix='ansible-result-', suffix='.tgz') as local_result_fd:
- docker_exec(args, test_id, ['tar', 'czf', remote_temp_file, '--exclude', ResultType.TMP.name, '-C', remote_test_root,
- remote_results_name])
- docker_get(args, test_id, remote_temp_file, local_result_fd.name)
- run_command(args, ['tar', 'oxzf', local_result_fd.name, '-C', local_test_root])
- except Exception as ex: # pylint: disable=broad-except
- if success:
- raise # download errors are fatal, but only if tests succeeded
-
- # handle download error here to avoid masking test failures
- display.warning('Failed to download results while handling an exception: %s' % ex)
+ try:
+ con.run(insert_options(command, options))
+ success = True
finally:
- if pypi_proxy_id:
- docker_rm(args, pypi_proxy_id)
-
- if test_id:
- if args.docker_terminate == 'always' or (args.docker_terminate == 'success' and success):
- docker_rm(args, test_id)
-
+ if host_delegation:
+ download_results(args, con, content_root, success)
-def delegate_remote(args, exclude, require):
- """
- :type args: EnvironmentConfig
- :type exclude: list[str]
- :type require: list[str]
- """
- remote = args.parsed_remote
- core_ci = AnsibleCoreCI(args, remote.platform, remote.version, stage=args.remote_stage, provider=args.remote_provider, arch=remote.arch)
- success = False
+def insert_options(command, options):
+ """Insert addition command line options into the given command and return the result."""
+ result = []
- ssh_options = []
- content_root = None
+ for arg in command:
+ if options and arg.startswith('--'):
+ result.extend(options)
+ options = None
- try:
- core_ci.start()
- core_ci.wait()
-
- python_version = get_python_version(args, get_remote_completion(), args.remote)
- python_interpreter = None
-
- if remote.platform == 'windows':
- # Windows doesn't need the ansible-test fluff, just run the SSH command
- manage = ManageWindowsCI(core_ci)
- manage.setup(python_version)
-
- cmd = ['powershell.exe']
- elif isinstance(args, ShellConfig) and args.raw:
- manage = ManagePosixCI(core_ci)
- manage.setup(python_version)
-
- cmd = create_shell_command(['sh'])
- else:
- manage = ManagePosixCI(core_ci)
- pwd = manage.setup(python_version)
+ result.append(arg)
- options = {
- '--remote': 1,
- }
+ return result
- python_interpreter = get_python_interpreter(args, get_remote_completion(), args.remote)
- ansible_root = os.path.join(pwd, 'ansible')
-
- if data_context().content.collection:
- content_root = os.path.join(pwd, data_context().content.collection.directory)
- else:
- content_root = ansible_root
-
- cmd = generate_command(args, python_interpreter, os.path.join(ansible_root, 'bin'), content_root, options, exclude, require)
-
- if isinstance(args, TestConfig):
- if args.coverage and not args.coverage_label:
- cmd += ['--coverage-label', 'remote-%s-%s' % (remote.platform, remote.version)]
+def download_results(args, con, content_root, success): # type: (EnvironmentConfig, Connection, str, bool) -> None
+ """Download results from a delegated controller."""
+ remote_results_root = os.path.join(content_root, data_context().content.results_path)
+ local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
- if isinstance(args, IntegrationConfig):
- if not args.allow_destructive:
- cmd.append('--allow-destructive')
+ remote_test_root = os.path.dirname(remote_results_root)
+ remote_results_name = os.path.basename(remote_results_root)
- # remote instances are only expected to have a single python version available
- if isinstance(args, UnitsConfig) and not args.python:
- cmd += ['--python', 'default']
+ make_dirs(local_test_root) # make sure directory exists for collections which have no tests
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-result-', suffix='.tgz') as result_file:
try:
- ssh_con = core_ci.connection
- ssh = SshConnectionDetail(core_ci.name, ssh_con.hostname, ssh_con.port, ssh_con.username, core_ci.ssh_key.key, python_interpreter)
-
- with support_container_context(args, ssh) as containers:
- if containers:
- cmd.extend(['--containers', json.dumps(containers.to_dict())])
-
- manage.ssh(cmd, ssh_options)
-
- success = True
- finally:
- download = False
-
- if remote.platform != 'windows':
- download = True
-
- if isinstance(args, ShellConfig):
- if args.raw:
- download = False
-
- if download and content_root:
- local_test_root = os.path.dirname(os.path.join(data_context().content.root, data_context().content.results_path))
-
- remote_results_root = os.path.join(content_root, data_context().content.results_path)
- remote_results_name = os.path.basename(remote_results_root)
- remote_temp_path = os.path.join('/tmp', remote_results_name)
+ con.create_archive(chdir=remote_test_root, name=remote_results_name, dst=result_file, exclude=ResultType.TMP.name)
+ except SubprocessError as ex:
+ if success:
+ raise # download errors are fatal if tests succeeded
- # AIX cp and GNU cp provide different options, no way could be found to have a common
- # pattern and achieve the same goal
- cp_opts = '-hr' if remote.platform == 'aix' else '-a'
-
- try:
- command = 'rm -rf {0} && mkdir {0} && cp {1} {2}/* {0}/ && chmod -R a+r {0}'.format(remote_temp_path, cp_opts, remote_results_root)
-
- manage.ssh(command, capture=True) # pylint: disable=unexpected-keyword-arg
- manage.download(remote_temp_path, local_test_root)
- except Exception as ex: # pylint: disable=broad-except
- if success:
- raise # download errors are fatal, but only if tests succeeded
+ # surface download failures as a warning here to avoid masking test failures
+ display.warning(f'Failed to download results while handling an exception: {ex}')
+ else:
+ result_file.seek(0)
- # handle download error here to avoid masking test failures
- display.warning('Failed to download results while handling an exception: %s' % ex)
- finally:
- if args.remote_terminate == 'always' or (args.remote_terminate == 'success' and success):
- core_ci.stop()
+ local_con = LocalConnection(args)
+ local_con.extract_archive(chdir=local_test_root, src=result_file)
-def generate_command(args, python_interpreter, ansible_bin_path, content_root, options, exclude, require):
- """
- :type args: EnvironmentConfig
- :type python_interpreter: str | None
- :type ansible_bin_path: str
- :type content_root: str
- :type options: dict[str, int]
- :type exclude: list[str]
- :type require: list[str]
- :rtype: list[str]
- """
- options['--color'] = 1
+def generate_command(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ ansible_bin_path, # type: str
+ content_root, # type: str
+ exclude, # type: t.List[str]
+ require, # type: t.List[str]
+): # type: (...) -> t.List[str]
+ """Generate the command necessary to delegate ansible-test."""
+ options = {
+ '--color': 1,
+ '--docker-no-pull': 0,
+ }
cmd = [os.path.join(ansible_bin_path, 'ansible-test')]
-
- if python_interpreter:
- cmd = [python_interpreter] + cmd
+ cmd = [python.path] + cmd
# Force the encoding used during delegation.
# This is only needed because ansible-test relies on Python's file system encoding.
@@ -522,24 +254,40 @@ def generate_command(args, python_interpreter, ansible_bin_path, content_root, o
ANSIBLE_TEST_CONTENT_ROOT=content_root,
)
+ if isinstance(args.controller.python, VirtualPythonConfig):
+ # Expose the ansible and ansible_test library directories to the virtual environment.
+ # This is only required when running from an install.
+ library_path = process_scoped_temporary_directory(args)
+
+ os.symlink(ANSIBLE_LIB_ROOT, os.path.join(library_path, 'ansible'))
+ os.symlink(ANSIBLE_TEST_ROOT, os.path.join(library_path, 'ansible_test'))
+
+ env_vars.update(
+ PYTHONPATH=library_path,
+ )
+
+ # Propagate the TERM environment variable to the remote host when using the shell command.
+ if isinstance(args, ShellConfig):
+ term = os.environ.get('TERM')
+
+ if term is not None:
+ env_vars.update(TERM=term)
+
env_args = ['%s=%s' % (key, env_vars[key]) for key in sorted(env_vars)]
cmd = ['/usr/bin/env'] + env_args + cmd
- cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
+ cmd += list(filter_options(args, args.host_settings.filtered_args, options, exclude, require))
cmd += ['--color', 'yes' if args.color else 'no']
- if args.requirements:
- cmd += ['--requirements']
-
- if isinstance(args, ShellConfig):
- cmd = create_shell_command(cmd)
- elif isinstance(args, SanityConfig):
+ if isinstance(args, SanityConfig):
base_branch = args.base_branch or get_ci_provider().get_base_branch()
if base_branch:
cmd += ['--base-branch', base_branch]
+ cmd.extend(['--host-path', args.host_path])
+
return cmd
@@ -554,7 +302,6 @@ def filter_options(args, argv, options, exclude, require):
"""
options = options.copy()
- options['--requirements'] = 0
options['--truncate'] = 1
options['--redact'] = 0
options['--no-redact'] = 0
@@ -581,30 +328,9 @@ def filter_options(args, argv, options, exclude, require):
if isinstance(args, IntegrationConfig):
options.update({
'--no-temp-unicode': 0,
- '--no-pip-check': 0,
})
- if isinstance(args, (NetworkIntegrationConfig, WindowsIntegrationConfig)):
- options.update({
- '--inventory': 1,
- })
-
- remaining = 0
-
- for arg in argv:
- if not arg.startswith('-') and remaining:
- remaining -= 1
- continue
-
- remaining = 0
-
- parts = arg.split('=', 1)
- key = parts[0]
-
- if key in options:
- remaining = options[key] - len(parts) + 1
- continue
-
+ for arg in filter_args(argv, options):
yield arg
for arg in args.delegate_args:
@@ -626,14 +352,9 @@ def filter_options(args, argv, options, exclude, require):
yield '--truncate'
yield '%d' % args.truncate
- if args.redact:
- yield '--redact'
- else:
+ if not args.redact:
yield '--no-redact'
if isinstance(args, IntegrationConfig):
if args.no_temp_unicode:
yield '--no-temp-unicode'
-
- if not args.pip_check:
- yield '--no-pip-check'
diff --git a/test/lib/ansible_test/_internal/diff.py b/test/lib/ansible_test/_internal/diff.py
index 5671ec7f28..29c69932b8 100644
--- a/test/lib/ansible_test/_internal/diff.py
+++ b/test/lib/ansible_test/_internal/diff.py
@@ -1,12 +1,10 @@
"""Diff parsing functions and classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
import textwrap
import traceback
-
-from . import types as t
+import typing as t
from .util import (
ApplicationError,
diff --git a/test/lib/ansible_test/_internal/docker_util.py b/test/lib/ansible_test/_internal/docker_util.py
index 5672ca1c80..f1f2d68b42 100644
--- a/test/lib/ansible_test/_internal/docker_util.py
+++ b/test/lib/ansible_test/_internal/docker_util.py
@@ -1,17 +1,15 @@
"""Functions for accessing docker via the docker cli."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
import random
import socket
import time
-
-from . import types as t
+import urllib.parse
+import typing as t
from .io import (
- open_binary_file,
read_text_file,
)
@@ -21,10 +19,7 @@ from .util import (
display,
find_executable,
SubprocessError,
-)
-
-from .http import (
- urlparse,
+ cache,
)
from .util_common import (
@@ -36,8 +31,6 @@ from .config import (
EnvironmentConfig,
)
-BUFFER_SIZE = 256 * 256
-
DOCKER_COMMANDS = [
'docker',
'podman',
@@ -75,45 +68,43 @@ class DockerCommand:
return None
-def get_docker_command(required=False): # type: (bool) -> t.Optional[DockerCommand]
+def require_docker(): # type: () -> DockerCommand
"""Return the docker command to invoke. Raises an exception if docker is not available."""
- try:
- return get_docker_command.cmd
- except AttributeError:
- get_docker_command.cmd = DockerCommand.detect()
+ if command := get_docker_command():
+ return command
- if required and not get_docker_command.cmd:
- raise ApplicationError("No container runtime detected. Supported commands: %s" % ', '.join(DOCKER_COMMANDS))
+ raise ApplicationError(f'No container runtime detected. Supported commands: {", ".join(DOCKER_COMMANDS)}')
- return get_docker_command.cmd
+@cache
+def get_docker_command(): # type: () -> t.Optional[DockerCommand]
+ """Return the docker command to invoke, or None if docker is not available."""
+ return DockerCommand.detect()
+
+def docker_available(): # type: () -> bool
+ """Return True if docker is available, otherwise return False."""
+ return bool(get_docker_command())
+
+
+@cache
def get_docker_host_ip(): # type: () -> str
"""Return the IP of the Docker host."""
- try:
- return get_docker_host_ip.ip
- except AttributeError:
- pass
-
- docker_host_ip = get_docker_host_ip.ip = socket.gethostbyname(get_docker_hostname())
+ docker_host_ip = socket.gethostbyname(get_docker_hostname())
display.info('Detected docker host IP: %s' % docker_host_ip, verbosity=1)
return docker_host_ip
+@cache
def get_docker_hostname(): # type: () -> str
"""Return the hostname of the Docker service."""
- try:
- return get_docker_hostname.hostname
- except AttributeError:
- pass
-
docker_host = os.environ.get('DOCKER_HOST')
if docker_host and docker_host.startswith('tcp://'):
try:
- hostname = urlparse(docker_host)[1].split(':')[0]
+ hostname = urllib.parse.urlparse(docker_host)[1].split(':')[0]
display.info('Detected Docker host: %s' % hostname, verbosity=1)
except ValueError:
hostname = 'localhost'
@@ -122,20 +113,12 @@ def get_docker_hostname(): # type: () -> str
hostname = 'localhost'
display.info('Assuming Docker is available on localhost.', verbosity=1)
- get_docker_hostname.hostname = hostname
-
return hostname
-def get_docker_container_id():
- """
- :rtype: str | None
- """
- try:
- return get_docker_container_id.container_id
- except AttributeError:
- pass
-
+@cache
+def get_docker_container_id(): # type: () -> t.Optional[str]
+ """Return the current container ID if running in a container, otherwise return None."""
path = '/proc/self/cpuset'
container_id = None
@@ -152,8 +135,6 @@ def get_docker_container_id():
if cgroup_path in ('/docker', '/azpl_job'):
container_id = cgroup_name
- get_docker_container_id.container_id = container_id
-
if container_id:
display.info('Detected execution in Docker container: %s' % container_id, verbosity=1)
@@ -200,12 +181,12 @@ def docker_pull(args, image):
:type args: EnvironmentConfig
:type image: str
"""
- if ('@' in image or ':' in image) and docker_image_exists(args, image):
- display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2)
+ if '@' not in image and ':' not in image:
+ display.info('Skipping pull of image without tag or digest: %s' % image, verbosity=2)
return
- if not args.docker_pull:
- display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image)
+ if docker_image_exists(args, image):
+ display.info('Skipping pull of existing image: %s' % image, verbosity=2)
return
for _iteration in range(1, 10):
@@ -224,32 +205,6 @@ def docker_cp_to(args, container_id, src, dst): # type: (EnvironmentConfig, str
docker_command(args, ['cp', src, '%s:%s' % (container_id, dst)])
-def docker_put(args, container_id, src, dst):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :type src: str
- :type dst: str
- """
- # avoid 'docker cp' due to a bug which causes 'docker rm' to fail
- with open_binary_file(src) as src_fd:
- docker_exec(args, container_id, ['dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE],
- options=['-i'], stdin=src_fd, capture=True)
-
-
-def docker_get(args, container_id, src, dst):
- """
- :type args: EnvironmentConfig
- :type container_id: str
- :type src: str
- :type dst: str
- """
- # avoid 'docker cp' due to a bug which causes 'docker rm' to fail
- with open_binary_file(dst, 'wb') as dst_fd:
- docker_exec(args, container_id, ['dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE],
- options=['-i'], stdout=dst_fd, capture=True)
-
-
def docker_run(args, image, options, cmd=None, create_only=False):
"""
:type args: EnvironmentConfig
@@ -331,7 +286,7 @@ class DockerError(Exception):
class ContainerNotFoundError(DockerError):
"""The container identified by `identifier` was not found."""
def __init__(self, identifier):
- super(ContainerNotFoundError, self).__init__('The container "%s" was not found.' % identifier)
+ super().__init__('The container "%s" was not found.' % identifier)
self.identifier = identifier
@@ -480,25 +435,6 @@ def docker_image_exists(args, image): # type: (EnvironmentConfig, str) -> bool
return True
-def docker_network_inspect(args, network):
- """
- :type args: EnvironmentConfig
- :type network: str
- :rtype: list[dict]
- """
- if args.explain:
- return []
-
- try:
- stdout = docker_command(args, ['network', 'inspect', network], capture=True)[0]
- return json.loads(stdout)
- except SubprocessError as ex:
- try:
- return json.loads(ex.stdout)
- except Exception:
- raise ex
-
-
def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None, stdout=None, data=None):
"""
:type args: EnvironmentConfig
@@ -514,7 +450,7 @@ def docker_exec(args, container_id, cmd, options=None, capture=False, stdin=None
if not options:
options = []
- if data:
+ if data or stdin or stdout:
options.append('-i')
return docker_command(args, ['exec'] + options + [container_id] + cmd, capture=capture, stdin=stdin, stdout=stdout, data=data)
@@ -550,7 +486,7 @@ def docker_command(args, cmd, capture=False, stdin=None, stdout=None, always=Fal
:rtype: str | None, str | None
"""
env = docker_environment()
- command = get_docker_command(required=True).command
+ command = require_docker().command
return run_command(args, [command] + cmd, env=env, capture=capture, stdin=stdin, stdout=stdout, always=always, data=data)
diff --git a/test/lib/ansible_test/_internal/encoding.py b/test/lib/ansible_test/_internal/encoding.py
index 8e014794c7..189b44c01f 100644
--- a/test/lib/ansible_test/_internal/encoding.py
+++ b/test/lib/ansible_test/_internal/encoding.py
@@ -1,8 +1,7 @@
"""Functions for encoding and decoding strings."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-from . import types as t
+import typing as t
ENCODING = 'utf-8'
diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py
index 245589327f..2dd53de81d 100644
--- a/test/lib/ansible_test/_internal/executor.py
+++ b/test/lib/ansible_test/_internal/executor.py
@@ -1,52 +1,15 @@
"""Execute Ansible tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import atexit
-import json
-import os
-import re
-
-from . import types as t
+import typing as t
from .io import (
- make_dirs,
read_text_file,
- write_text_file,
)
from .util import (
ApplicationWarning,
- ApplicationError,
- SubprocessError,
display,
- find_executable,
- raw_command,
- generate_pip_command,
- find_python,
- cmd_quote,
- ANSIBLE_TEST_DATA_ROOT,
- ANSIBLE_TEST_TOOLS_ROOT,
- str_to_version,
- version_to_str,
-)
-
-from .util_common import (
- intercept_command,
- run_command,
- ResultType,
- CommonConfig,
-)
-
-from .docker_util import (
- docker_pull,
- docker_run,
- docker_inspect,
-)
-
-from .ansible_util import (
- ansible_environment,
- check_pyyaml,
)
from .ci import (
@@ -59,522 +22,19 @@ from .classification import (
from .config import (
TestConfig,
- EnvironmentConfig,
- IntegrationConfig,
- ShellConfig,
- UnitsConfig,
- SanityConfig,
)
from .metadata import (
ChangeDescription,
)
-from .data import (
- data_context,
-)
-
-from .http import (
- urlparse,
+from .provisioning import (
+ HostState,
)
-def create_shell_command(command):
- """
- :type command: list[str]
- :rtype: list[str]
- """
- optional_vars = (
- 'TERM',
- )
-
- cmd = ['/usr/bin/env']
- cmd += ['%s=%s' % (var, os.environ[var]) for var in optional_vars if var in os.environ]
- cmd += command
-
- return cmd
-
-
-def get_openssl_version(args, python, python_version): # type: (EnvironmentConfig, str, str) -> t.Optional[t.Tuple[int, ...]]
- """Return the openssl version."""
- if not python_version.startswith('2.'):
- # OpenSSL version checking only works on Python 3.x.
- # This should be the most accurate, since it is the Python we will be using.
- version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version']
-
- if version:
- display.info('Detected OpenSSL version %s under Python %s.' % (version_to_str(version), python_version), verbosity=1)
-
- return tuple(version)
-
- # Fall back to detecting the OpenSSL version from the CLI.
- # This should provide an adequate solution on Python 2.x.
- openssl_path = find_executable('openssl', required=False)
-
- if openssl_path:
- try:
- result = raw_command([openssl_path, 'version'], capture=True)[0]
- except SubprocessError:
- result = ''
-
- match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
-
- if match:
- version = str_to_version(match.group('version'))
-
- display.info('Detected OpenSSL version %s using the openssl CLI.' % version_to_str(version), verbosity=1)
-
- return version
-
- display.info('Unable to detect OpenSSL version.', verbosity=1)
-
- return None
-
-
-def is_cryptography_available(python): # type: (str) -> bool
- """Return True if cryptography is available for the given python."""
- try:
- raw_command([python, '-c', 'import cryptography'], capture=True)
- except SubprocessError:
- return False
-
- return True
-
-
-def get_setuptools_version(args, python): # type: (EnvironmentConfig, str) -> t.Tuple[int]
- """Return the setuptools version for the given python."""
- try:
- return str_to_version(raw_command([python, '-c', 'import setuptools; print(setuptools.__version__)'], capture=True)[0])
- except SubprocessError:
- if args.explain:
- return tuple() # ignore errors in explain mode in case setuptools is not aleady installed
-
- raise
-
-
-def install_cryptography(args, python, python_version, pip): # type: (EnvironmentConfig, str, str, t.List[str]) -> None
- """
- Install cryptography for the specified environment.
- """
- # make sure ansible-test's basic requirements are met before continuing
- # this is primarily to ensure that pip is new enough to facilitate further requirements installation
- install_ansible_test_requirements(args, pip)
-
- # make sure setuptools is available before trying to install cryptography
- # the installed version of setuptools affects the version of cryptography to install
- run_command(args, generate_pip_install(pip, '', packages=['setuptools']))
-
- # skip cryptography install if it is already available
- # this avoids downgrading cryptography when OS packages provide a newer version than we are able to install using pip
- if is_cryptography_available(python):
- return
-
- # install the latest cryptography version that the current requirements can support
- # use a custom constraints file to avoid the normal constraints file overriding the chosen version of cryptography
- # if not installed here later install commands may try to install an unsupported version due to the presence of older setuptools
- # this is done instead of upgrading setuptools to allow tests to function with older distribution provided versions of setuptools
- run_command(args, generate_pip_install(pip, '',
- packages=[get_cryptography_requirement(args, python, python_version)],
- constraints=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'cryptography-constraints.txt')))
-
-
-def get_cryptography_requirement(args, python, python_version): # type: (EnvironmentConfig, str, str) -> str
- """
- Return the correct cryptography requirement for the given python version.
- The version of cryptography installed depends on the python version, setuptools version and openssl version.
- """
- setuptools_version = get_setuptools_version(args, python)
- openssl_version = get_openssl_version(args, python, python_version)
-
- if setuptools_version >= (18, 5):
- if python_version == '2.6':
- # cryptography 2.2+ requires python 2.7+
- # see https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#22---2018-03-19
- cryptography = 'cryptography < 2.2'
- elif openssl_version and openssl_version < (1, 1, 0):
- # cryptography 3.2 requires openssl 1.1.x or later
- # see https://cryptography.io/en/latest/changelog.html#v3-2
- cryptography = 'cryptography < 3.2'
- else:
- # cryptography 3.4+ fails to install on many systems
- # this is a temporary work-around until a more permanent solution is available
- cryptography = 'cryptography < 3.4'
- else:
- # cryptography 2.1+ requires setuptools 18.5+
- # see https://github.com/pyca/cryptography/blob/62287ae18383447585606b9d0765c0f1b8a9777c/setup.py#L26
- cryptography = 'cryptography < 2.1'
-
- return cryptography
-
-
-def install_command_requirements(args, python_version=None, context=None, enable_pyyaml_check=False, extra_requirements=None):
- """
- :type args: EnvironmentConfig
- :type python_version: str | None
- :type context: str | None
- :type enable_pyyaml_check: bool
- :type extra_requirements: list[str] | None
- """
- if not args.explain:
- make_dirs(ResultType.COVERAGE.path)
- make_dirs(ResultType.DATA.path)
-
- if isinstance(args, ShellConfig):
- if args.raw:
- return
-
- if not args.requirements:
- return
-
- if isinstance(args, ShellConfig):
- return
-
- packages = []
-
- if isinstance(args, TestConfig):
- if args.coverage:
- packages.append('coverage')
- if args.junit:
- packages.append('junit-xml')
-
- if not python_version:
- python_version = args.python_version
-
- python = find_python(python_version)
- pip = generate_pip_command(python)
-
- # skip packages which have aleady been installed for python_version
-
- try:
- package_cache = install_command_requirements.package_cache
- except AttributeError:
- package_cache = install_command_requirements.package_cache = {}
-
- installed_packages = package_cache.setdefault(python_version, set())
- skip_packages = [package for package in packages if package in installed_packages]
-
- for package in skip_packages:
- packages.remove(package)
-
- installed_packages.update(packages)
-
- if args.command != 'sanity':
- install_cryptography(args, python, python_version, pip)
-
- commands = [generate_pip_install(pip, args.command, packages=packages, context=context)]
-
- if extra_requirements:
- for extra_requirement in extra_requirements:
- commands.append(generate_pip_install(pip, extra_requirement))
-
- commands = [cmd for cmd in commands if cmd]
-
- if not commands:
- return # no need to detect changes or run pip check since we are not making any changes
-
- # only look for changes when more than one requirements file is needed
- detect_pip_changes = len(commands) > 1
-
- # first pass to install requirements, changes expected unless environment is already set up
- install_ansible_test_requirements(args, pip)
- changes = run_pip_commands(args, pip, commands, detect_pip_changes)
-
- if changes:
- # second pass to check for conflicts in requirements, changes are not expected here
- changes = run_pip_commands(args, pip, commands, detect_pip_changes)
-
- if changes:
- raise ApplicationError('Conflicts detected in requirements. The following commands reported changes during verification:\n%s' %
- '\n'.join((' '.join(cmd_quote(c) for c in cmd) for cmd in changes)))
-
- if args.pip_check:
- # ask pip to check for conflicts between installed packages
- try:
- run_command(args, pip + ['check', '--disable-pip-version-check'], capture=True)
- except SubprocessError as ex:
- if ex.stderr.strip() == 'ERROR: unknown command "check"':
- display.warning('Cannot check pip requirements for conflicts because "pip check" is not supported.')
- else:
- raise
-
- if enable_pyyaml_check:
- # pyyaml may have been one of the requirements that was installed, so perform an optional check for it
- check_pyyaml(args, python_version, required=False)
-
-
-def install_ansible_test_requirements(args, pip): # type: (EnvironmentConfig, t.List[str]) -> None
- """Install requirements for ansible-test for the given pip if not already installed."""
- try:
- installed = install_command_requirements.installed
- except AttributeError:
- installed = install_command_requirements.installed = set()
-
- if tuple(pip) in installed:
- return
-
- # make sure basic ansible-test requirements are met, including making sure that pip is recent enough to support constraints
- # virtualenvs created by older distributions may include very old pip versions, such as those created in the centos6 test container (pip 6.0.8)
- run_command(args, generate_pip_install(pip, 'ansible-test', use_constraints=False))
-
- installed.add(tuple(pip))
-
-
-def run_pip_commands(args, pip, commands, detect_pip_changes=False):
- """
- :type args: EnvironmentConfig
- :type pip: list[str]
- :type commands: list[list[str]]
- :type detect_pip_changes: bool
- :rtype: list[list[str]]
- """
- changes = []
-
- after_list = pip_list(args, pip) if detect_pip_changes else None
-
- for cmd in commands:
- if not cmd:
- continue
-
- before_list = after_list
-
- run_command(args, cmd)
-
- after_list = pip_list(args, pip) if detect_pip_changes else None
-
- if before_list != after_list:
- changes.append(cmd)
-
- return changes
-
-
-def pip_list(args, pip):
- """
- :type args: EnvironmentConfig
- :type pip: list[str]
- :rtype: str
- """
- stdout = run_command(args, pip + ['list'], capture=True)[0]
- return stdout
-
-
-def generate_pip_install(pip, command, packages=None, constraints=None, use_constraints=True, context=None):
- """
- :type pip: list[str]
- :type command: str
- :type packages: list[str] | None
- :type constraints: str | None
- :type use_constraints: bool
- :type context: str | None
- :rtype: list[str] | None
- """
- constraints = constraints or os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')
- requirements = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', '%s.txt' % ('%s.%s' % (command, context) if context else command))
- content_constraints = None
-
- options = []
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- if command == 'sanity' and data_context().content.is_ansible:
- requirements = os.path.join(data_context().content.sanity_path, 'code-smell', '%s.requirements.txt' % context)
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- if command == 'units':
- requirements = os.path.join(data_context().content.unit_path, 'requirements.txt')
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- content_constraints = os.path.join(data_context().content.unit_path, 'constraints.txt')
-
- if command in ('integration', 'windows-integration', 'network-integration'):
- requirements = os.path.join(data_context().content.integration_path, 'requirements.txt')
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- requirements = os.path.join(data_context().content.integration_path, '%s.requirements.txt' % command)
-
- if os.path.exists(requirements) and os.path.getsize(requirements):
- options += ['-r', requirements]
-
- content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
-
- if command.startswith('integration.cloud.'):
- content_constraints = os.path.join(data_context().content.integration_path, 'constraints.txt')
-
- if packages:
- options += packages
-
- if not options:
- return None
-
- if use_constraints:
- if content_constraints and os.path.exists(content_constraints) and os.path.getsize(content_constraints):
- # listing content constraints first gives them priority over constraints provided by ansible-test
- options.extend(['-c', content_constraints])
-
- options.extend(['-c', constraints])
-
- return pip + ['install', '--disable-pip-version-check'] + options
-
-
-def parse_inventory(args, inventory_path): # type: (IntegrationConfig, str) -> t.Dict[str, t.Any]
- """Return a dict parsed from the given inventory file."""
- cmd = ['ansible-inventory', '-i', inventory_path, '--list']
- env = ansible_environment(args)
- inventory = json.loads(intercept_command(args, cmd, '', env, capture=True, disable_coverage=True)[0])
- return inventory
-
-
-def get_hosts(inventory, group_name): # type: (t.Dict[str, t.Any], str) -> t.Dict[str, t.Dict[str, t.Any]]
- """Return a dict of hosts from the specified group in the given inventory."""
- hostvars = inventory.get('_meta', {}).get('hostvars', {})
- group = inventory.get(group_name, {})
- host_names = group.get('hosts', [])
- hosts = dict((name, hostvars[name]) for name in host_names)
- return hosts
-
-
-def run_pypi_proxy(args): # type: (EnvironmentConfig) -> t.Tuple[t.Optional[str], t.Optional[str]]
- """Run a PyPI proxy container, returning the container ID and proxy endpoint."""
- use_proxy = False
-
- if args.docker_raw == 'centos6':
- use_proxy = True # python 2.6 is the only version available
-
- if args.docker_raw == 'default':
- if args.python == '2.6':
- use_proxy = True # python 2.6 requested
- elif not args.python and isinstance(args, (SanityConfig, UnitsConfig, ShellConfig)):
- use_proxy = True # multiple versions (including python 2.6) can be used
-
- if args.docker_raw and args.pypi_proxy:
- use_proxy = True # manual override to force proxy usage
-
- if not use_proxy:
- return None, None
-
- proxy_image = 'quay.io/ansible/pypi-test-container:1.0.0'
- port = 3141
-
- options = [
- '--detach',
- ]
-
- docker_pull(args, proxy_image)
-
- container_id = docker_run(args, proxy_image, options=options)
-
- container = docker_inspect(args, container_id)
-
- container_ip = container.get_ip_address()
-
- if not container_ip:
- raise Exception('PyPI container IP not available.')
-
- endpoint = 'http://%s:%d/root/pypi/+simple/' % (container_ip, port)
-
- return container_id, endpoint
-
-
-def configure_pypi_proxy(args): # type: (CommonConfig) -> None
- """Configure the environment to use a PyPI proxy, if present."""
- if not isinstance(args, EnvironmentConfig):
- return
-
- if args.pypi_endpoint:
- configure_pypi_block_access()
- configure_pypi_proxy_pip(args)
- configure_pypi_proxy_easy_install(args)
-
-
-def configure_pypi_block_access(): # type: () -> None
- """Block direct access to PyPI to ensure proxy configurations are always used."""
- if os.getuid() != 0:
- display.warning('Skipping custom hosts block for PyPI for non-root user.')
- return
-
- hosts_path = '/etc/hosts'
- hosts_block = '''
-127.0.0.1 pypi.org pypi.python.org files.pythonhosted.org
-'''
-
- def hosts_cleanup():
- display.info('Removing custom PyPI hosts entries: %s' % hosts_path, verbosity=1)
-
- with open(hosts_path) as hosts_file_read:
- content = hosts_file_read.read()
-
- content = content.replace(hosts_block, '')
-
- with open(hosts_path, 'w') as hosts_file_write:
- hosts_file_write.write(content)
-
- display.info('Injecting custom PyPI hosts entries: %s' % hosts_path, verbosity=1)
- display.info('Config: %s\n%s' % (hosts_path, hosts_block), verbosity=3)
-
- with open(hosts_path, 'a') as hosts_file:
- hosts_file.write(hosts_block)
-
- atexit.register(hosts_cleanup)
-
-
-def configure_pypi_proxy_pip(args): # type: (EnvironmentConfig) -> None
- """Configure a custom index for pip based installs."""
- pypi_hostname = urlparse(args.pypi_endpoint)[1].split(':')[0]
-
- pip_conf_path = os.path.expanduser('~/.pip/pip.conf')
- pip_conf = '''
-[global]
-index-url = {0}
-trusted-host = {1}
-'''.format(args.pypi_endpoint, pypi_hostname).strip()
-
- def pip_conf_cleanup():
- display.info('Removing custom PyPI config: %s' % pip_conf_path, verbosity=1)
- os.remove(pip_conf_path)
-
- if os.path.exists(pip_conf_path):
- raise ApplicationError('Refusing to overwrite existing file: %s' % pip_conf_path)
-
- display.info('Injecting custom PyPI config: %s' % pip_conf_path, verbosity=1)
- display.info('Config: %s\n%s' % (pip_conf_path, pip_conf), verbosity=3)
-
- write_text_file(pip_conf_path, pip_conf, True)
- atexit.register(pip_conf_cleanup)
-
-
-def configure_pypi_proxy_easy_install(args): # type: (EnvironmentConfig) -> None
- """Configure a custom index for easy_install based installs."""
- pydistutils_cfg_path = os.path.expanduser('~/.pydistutils.cfg')
- pydistutils_cfg = '''
-[easy_install]
-index_url = {0}
-'''.format(args.pypi_endpoint).strip()
-
- if os.path.exists(pydistutils_cfg_path):
- raise ApplicationError('Refusing to overwrite existing file: %s' % pydistutils_cfg_path)
-
- def pydistutils_cfg_cleanup():
- display.info('Removing custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
- os.remove(pydistutils_cfg_path)
-
- display.info('Injecting custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
- display.info('Config: %s\n%s' % (pydistutils_cfg_path, pydistutils_cfg), verbosity=3)
-
- write_text_file(pydistutils_cfg_path, pydistutils_cfg, True)
- atexit.register(pydistutils_cfg_cleanup)
-
-
-def get_changes_filter(args):
- """
- :type args: TestConfig
- :rtype: list[str]
- """
+def get_changes_filter(args): # type: (TestConfig) -> t.List[str]
+ """Return a list of targets which should be tested based on the changes made."""
paths = detect_changes(args)
if not args.metadata.change_description:
@@ -597,11 +57,8 @@ def get_changes_filter(args):
return args.metadata.change_description.targets
-def detect_changes(args):
- """
- :type args: TestConfig
- :rtype: list[str] | None
- """
+def detect_changes(args): # type: (TestConfig) -> t.Optional[t.List[str]]
+ """Return a list of changed paths."""
if args.changed:
paths = get_ci_provider().detect_changes(args)
elif args.changed_from or args.changed_path:
@@ -622,93 +79,37 @@ def detect_changes(args):
return paths
-def get_python_version(args, configs, name):
- """
- :type args: EnvironmentConfig
- :type configs: dict[str, dict[str, str]]
- :type name: str
- """
- config = configs.get(name, {})
- config_python = config.get('python')
-
- if not config or not config_python:
- if args.python:
- return args.python
-
- display.warning('No Python version specified. '
- 'Use completion config or the --python option to specify one.', unique=True)
-
- return '' # failure to provide a version may result in failures or reduced functionality later
-
- supported_python_versions = config_python.split(',')
- default_python_version = supported_python_versions[0]
-
- if args.python and args.python not in supported_python_versions:
- raise ApplicationError('Python %s is not supported by %s. Supported Python version(s) are: %s' % (
- args.python, name, ', '.join(sorted(supported_python_versions))))
-
- python_version = args.python or default_python_version
-
- return python_version
-
-
-def get_python_interpreter(args, configs, name):
- """
- :type args: EnvironmentConfig
- :type configs: dict[str, dict[str, str]]
- :type name: str
- """
- if args.python_interpreter:
- return args.python_interpreter
-
- config = configs.get(name, {})
-
- if not config:
- if args.python:
- guess = 'python%s' % args.python
- else:
- guess = 'python'
-
- display.warning('Using "%s" as the Python interpreter. '
- 'Use completion config or the --python-interpreter option to specify the path.' % guess, unique=True)
-
- return guess
-
- python_version = get_python_version(args, configs, name)
-
- python_dir = config.get('python_dir', '/usr/bin')
- python_interpreter = os.path.join(python_dir, 'python%s' % python_version)
- python_interpreter = config.get('python%s' % python_version, python_interpreter)
-
- return python_interpreter
-
-
class NoChangesDetected(ApplicationWarning):
"""Exception when change detection was performed, but no changes were found."""
def __init__(self):
- super(NoChangesDetected, self).__init__('No changes detected.')
+ super().__init__('No changes detected.')
class NoTestsForChanges(ApplicationWarning):
"""Exception when changes detected, but no tests trigger as a result."""
def __init__(self):
- super(NoTestsForChanges, self).__init__('No tests found for detected changes.')
+ super().__init__('No tests found for detected changes.')
class Delegate(Exception):
"""Trigger command delegation."""
- def __init__(self, exclude=None, require=None):
- """
- :type exclude: list[str] | None
- :type require: list[str] | None
- """
- super(Delegate, self).__init__()
+ def __init__(self, host_state, exclude=None, require=None): # type: (HostState, t.List[str], t.List[str]) -> None
+ super().__init__()
+ self.host_state = host_state
self.exclude = exclude or []
self.require = require or []
+class ListTargets(Exception):
+ """List integration test targets instead of executing them."""
+ def __init__(self, target_names): # type: (t.List[str]) -> None
+ super().__init__()
+
+ self.target_names = target_names
+
+
class AllTargetsSkipped(ApplicationWarning):
"""All targets skipped."""
def __init__(self):
- super(AllTargetsSkipped, self).__init__('All targets skipped.')
+ super().__init__('All targets skipped.')
diff --git a/test/lib/ansible_test/_internal/git.py b/test/lib/ansible_test/_internal/git.py
index acc39f3f69..dbee4ad44a 100644
--- a/test/lib/ansible_test/_internal/git.py
+++ b/test/lib/ansible_test/_internal/git.py
@@ -1,10 +1,8 @@
"""Wrapper around git command-line tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import re
-
-from . import types as t
+import typing as t
from .util import (
SubprocessError,
diff --git a/test/lib/ansible_test/_internal/host_configs.py b/test/lib/ansible_test/_internal/host_configs.py
new file mode 100644
index 0000000000..a819652e08
--- /dev/null
+++ b/test/lib/ansible_test/_internal/host_configs.py
@@ -0,0 +1,491 @@
+"""Configuration for the test hosts requested by the user."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import enum
+import os
+import pickle
+import sys
+import typing as t
+
+from .constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+
+from .io import (
+ open_binary_file,
+)
+
+from .completion import (
+ CompletionConfig,
+ DOCKER_COMPLETION,
+ DockerCompletionConfig,
+ InventoryCompletionConfig,
+ NETWORK_COMPLETION,
+ NetworkRemoteCompletionConfig,
+ PosixCompletionConfig,
+ PosixRemoteCompletionConfig,
+ PosixSshCompletionConfig,
+ REMOTE_COMPLETION,
+ RemoteCompletionConfig,
+ WINDOWS_COMPLETION,
+ WindowsRemoteCompletionConfig,
+ filter_completion,
+)
+
+from .util import (
+ find_python,
+ get_available_python_versions,
+ str_to_version,
+ version_to_str,
+)
+
+
+@dataclasses.dataclass(frozen=True)
+class OriginCompletionConfig(PosixCompletionConfig):
+ """Pseudo completion config for the origin."""
+ def __init__(self):
+ super().__init__(name='origin')
+
+ @property
+ def supported_pythons(self): # type: () -> t.List[str]
+ """Return a list of the supported Python versions."""
+ current_version = version_to_str(sys.version_info[:2])
+ versions = [version for version in SUPPORTED_PYTHON_VERSIONS if version == current_version] + \
+ [version for version in SUPPORTED_PYTHON_VERSIONS if version != current_version]
+ return versions
+
+ def get_python_path(self, version): # type: (str) -> str
+ """Return the path of the requested Python version."""
+ version = find_python(version)
+ return version
+
+ @property
+ def is_default(self):
+ """True if the completion entry is only used for defaults, otherwise False."""
+ return False
+
+
+@dataclasses.dataclass(frozen=True)
+class HostContext:
+ """Context used when getting and applying defaults for host configurations."""
+ controller_config: t.Optional['PosixConfig']
+
+ @property
+ def controller(self): # type: () -> bool
+ """True if the context is for the controller, otherwise False."""
+ return not self.controller_config
+
+
+@dataclasses.dataclass
+class HostConfig(metaclass=abc.ABCMeta):
+ """Base class for host configuration."""
+ @abc.abstractmethod
+ def get_defaults(self, context): # type: (HostContext) -> CompletionConfig
+ """Return the default settings."""
+
+ @abc.abstractmethod
+ def apply_defaults(self, context, defaults): # type: (HostContext, CompletionConfig) -> None
+ """Apply default settings."""
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if the host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return False
+
+
+@dataclasses.dataclass
+class PythonConfig(metaclass=abc.ABCMeta):
+ """Configuration for Python."""
+ version: t.Optional[str] = None
+ path: t.Optional[str] = None
+
+ @property
+ def tuple(self): # type: () -> t.Tuple[int, ...]
+ """Return the Python version as a tuple."""
+ return str_to_version(self.version)
+
+ @property
+ def major_version(self): # type: () -> int
+ """Return the Python major version."""
+ return self.tuple[0]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ if self.version in (None, 'default'):
+ self.version = defaults.get_default_python(context.controller)
+
+ if self.path:
+ if self.path.endswith('/'):
+ self.path = os.path.join(self.path, f'python{self.version}')
+
+ # FUTURE: If the host is origin, the python path could be validated here.
+ else:
+ self.path = defaults.get_python_path(self.version)
+
+ @property
+ @abc.abstractmethod
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+
+
+@dataclasses.dataclass
+class NativePythonConfig(PythonConfig):
+ """Configuration for native Python."""
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+ return False
+
+
+@dataclasses.dataclass
+class VirtualPythonConfig(PythonConfig):
+ """Configuration for Python in a virtual environment."""
+ system_site_packages: t.Optional[bool] = None
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ if self.system_site_packages is None:
+ self.system_site_packages = False
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this Python is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have requirements installed without explicit permission from the user.
+ """
+ return True
+
+
+@dataclasses.dataclass
+class PosixConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for POSIX host configuration."""
+ python: t.Optional[PythonConfig] = None
+
+ @property
+ @abc.abstractmethod
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+
+ @abc.abstractmethod
+ def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
+ """Return the default settings."""
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.python = self.python or NativePythonConfig()
+ self.python.apply_defaults(context, defaults)
+
+
+@dataclasses.dataclass
+class ControllerHostConfig(PosixConfig, metaclass=abc.ABCMeta):
+ """Base class for host configurations which support the controller."""
+ @abc.abstractmethod
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+
+
+@dataclasses.dataclass
+class RemoteConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for remote host configuration."""
+ name: t.Optional[str] = None
+ provider: t.Optional[str] = None
+
+ @property
+ def platform(self):
+ """The name of the platform."""
+ return self.name.partition('/')[0]
+
+ @property
+ def version(self):
+ """The version of the platform."""
+ return self.name.partition('/')[2]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, RemoteCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ if self.provider == 'default':
+ self.provider = None
+
+ self.provider = self.provider or defaults.provider or 'aws'
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return True
+
+
+@dataclasses.dataclass
+class PosixSshConfig(PosixConfig):
+ """Configuration for a POSIX SSH host."""
+ user: t.Optional[str] = None
+ host: t.Optional[str] = None
+ port: t.Optional[int] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixSshCompletionConfig
+ """Return the default settings."""
+ return PosixSshCompletionConfig(
+ user=self.user,
+ host=self.host,
+ )
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return self.user == 'root'
+
+
+@dataclasses.dataclass
+class InventoryConfig(HostConfig):
+ """Configuration using inventory."""
+ path: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> InventoryCompletionConfig
+ """Return the default settings."""
+ return InventoryCompletionConfig()
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, InventoryCompletionConfig) -> None
+ """Apply default settings."""
+
+
+@dataclasses.dataclass
+class DockerConfig(ControllerHostConfig, PosixConfig):
+ """Configuration for a docker host."""
+ name: t.Optional[str] = None
+ image: t.Optional[str] = None
+ memory: t.Optional[int] = None
+ privileged: t.Optional[bool] = None
+ seccomp: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> DockerCompletionConfig
+ """Return the default settings."""
+ return filter_completion(DOCKER_COMPLETION).get(self.name) or DockerCompletionConfig(
+ name=self.name,
+ image=self.name,
+ placeholder=True,
+ )
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ if self.name in filter_completion(DOCKER_COMPLETION):
+ defaults = self.get_defaults(context)
+ pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons}
+ else:
+ pythons = {context.controller_config.python.version: context.controller_config.python.path}
+
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, DockerCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.name = defaults.name
+ self.image = defaults.image
+
+ if self.seccomp is None:
+ self.seccomp = defaults.seccomp
+
+ if self.privileged is None:
+ self.privileged = False
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if this host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return True
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return True
+
+
+@dataclasses.dataclass
+class PosixRemoteConfig(RemoteConfig, ControllerHostConfig, PosixConfig):
+ """Configuration for a POSIX remote host."""
+ arch: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(REMOTE_COMPLETION).get(self.name) or REMOTE_COMPLETION.get(self.platform) or PosixRemoteCompletionConfig(
+ name=self.name,
+ placeholder=True,
+ )
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ if self.name in filter_completion(REMOTE_COMPLETION):
+ defaults = self.get_defaults(context)
+ pythons = {version: defaults.get_python_path(version) for version in defaults.supported_pythons}
+ else:
+ pythons = {context.controller_config.python.version: context.controller_config.python.path}
+
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in pythons.items()]
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return True
+
+
+@dataclasses.dataclass
+class WindowsConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for Windows host configuration."""
+
+
+@dataclasses.dataclass
+class WindowsRemoteConfig(RemoteConfig, WindowsConfig):
+ """Configuration for a remoe Windows host."""
+ def get_defaults(self, context): # type: (HostContext) -> WindowsRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(WINDOWS_COMPLETION).get(self.name) or WindowsRemoteCompletionConfig(
+ name=self.name,
+ )
+
+
+@dataclasses.dataclass
+class WindowsInventoryConfig(InventoryConfig, WindowsConfig):
+ """Configuration for Windows hosts using inventory."""
+
+
+@dataclasses.dataclass
+class NetworkConfig(HostConfig, metaclass=abc.ABCMeta):
+ """Base class for network host configuration."""
+
+
+@dataclasses.dataclass
+class NetworkRemoteConfig(RemoteConfig, NetworkConfig):
+ """Configuration for a remoe network host."""
+ collection: t.Optional[str] = None
+ connection: t.Optional[str] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> NetworkRemoteCompletionConfig
+ """Return the default settings."""
+ return filter_completion(NETWORK_COMPLETION).get(self.name) or NetworkRemoteCompletionConfig(
+ name=self.name,
+ )
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, NetworkRemoteCompletionConfig) -> None
+ """Apply default settings."""
+ super().apply_defaults(context, defaults)
+
+ self.collection = self.collection or defaults.collection
+ self.connection = self.connection or defaults.connection
+
+
+@dataclasses.dataclass
+class NetworkInventoryConfig(InventoryConfig, NetworkConfig):
+ """Configuration for network hosts using inventory."""
+
+
+@dataclasses.dataclass
+class OriginConfig(ControllerHostConfig, PosixConfig):
+ """Configuration for the origin host."""
+ def get_defaults(self, context): # type: (HostContext) -> OriginCompletionConfig
+ """Return the default settings."""
+ return OriginCompletionConfig()
+
+ def get_default_targets(self, context): # type: (HostContext) -> t.List[ControllerConfig]
+ """Return the default targets for this host config."""
+ return [ControllerConfig(python=NativePythonConfig(version=version, path=path)) for version, path in get_available_python_versions().items()]
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return os.getuid() != 0
+
+
+@dataclasses.dataclass
+class ControllerConfig(PosixConfig):
+ """Configuration for the controller host."""
+ controller: t.Optional[PosixConfig] = None
+
+ def get_defaults(self, context): # type: (HostContext) -> PosixCompletionConfig
+ """Return the default settings."""
+ return context.controller_config.get_defaults(context)
+
+ def apply_defaults(self, context, defaults): # type: (HostContext, PosixCompletionConfig) -> None
+ """Apply default settings."""
+ self.controller = context.controller_config
+
+ if not self.python and not defaults.supported_pythons:
+ # The user did not specify a target Python and supported Pythons are unknown, so use the controller Python specified by the user instead.
+ self.python = context.controller_config.python
+
+ super().apply_defaults(context, defaults)
+
+ @property
+ def is_managed(self): # type: () -> bool
+ """
+ True if the host is a managed instance, otherwise False.
+ Managed instances are used exclusively by ansible-test and can safely have destructive operations performed without explicit permission from the user.
+ """
+ return self.controller.is_managed
+
+ @property
+ def have_root(self): # type: () -> bool
+ """True if root is available, otherwise False."""
+ return self.controller.have_root
+
+
+class FallbackReason(enum.Enum):
+ """Reason fallback was peformed."""
+ ENVIRONMENT = enum.auto()
+ PYTHON = enum.auto()
+
+
+@dataclasses.dataclass(frozen=True)
+class FallbackDetail:
+ """Details about controller fallback behavior."""
+ reason: FallbackReason
+ message: str
+
+
+@dataclasses.dataclass(frozen=True)
+class HostSettings:
+ """Host settings for the controller and targets."""
+ controller: ControllerHostConfig
+ targets: t.List[HostConfig]
+ skipped_python_versions: t.List[str]
+ filtered_args: t.List[str]
+ controller_fallback: t.Optional[FallbackDetail]
+
+ def serialize(self, path): # type: (str) -> None
+ """Serialize the host settings to the given path."""
+ with open_binary_file(path, 'wb') as settings_file:
+ pickle.dump(self, settings_file)
+
+ @staticmethod
+ def deserialize(path): # type: (str) -> HostSettings
+ """Deserialize host settings from the path."""
+ with open_binary_file(path) as settings_file:
+ return pickle.load(settings_file)
+
+ def apply_defaults(self):
+ """Apply defaults to the host settings."""
+ context = HostContext(controller_config=None)
+ self.controller.apply_defaults(context, self.controller.get_defaults(context))
+
+ for target in self.targets:
+ context = HostContext(controller_config=self.controller)
+ target.apply_defaults(context, target.get_defaults(context))
diff --git a/test/lib/ansible_test/_internal/host_profiles.py b/test/lib/ansible_test/_internal/host_profiles.py
new file mode 100644
index 0000000000..30b4f8570c
--- /dev/null
+++ b/test/lib/ansible_test/_internal/host_profiles.py
@@ -0,0 +1,755 @@
+"""Profiles to represent individual test hosts or a user-provided inventory file."""
+from __future__ import annotations
+
+import abc
+import dataclasses
+import os
+import tempfile
+import time
+import typing as t
+
+from .io import (
+ write_text_file,
+)
+
+from .config import (
+ CommonConfig,
+ EnvironmentConfig,
+ IntegrationConfig,
+ TerminateMode,
+)
+
+from .host_configs import (
+ ControllerConfig,
+ ControllerHostConfig,
+ DockerConfig,
+ HostConfig,
+ NetworkInventoryConfig,
+ NetworkRemoteConfig,
+ OriginConfig,
+ PosixConfig,
+ PosixRemoteConfig,
+ PosixSshConfig,
+ PythonConfig,
+ RemoteConfig,
+ VirtualPythonConfig,
+ WindowsInventoryConfig,
+ WindowsRemoteConfig,
+)
+
+from .core_ci import (
+ AnsibleCoreCI,
+ SshKey,
+)
+
+from .util import (
+ ApplicationError,
+ SubprocessError,
+ cache,
+ display,
+ get_type_map,
+ sanitize_host_name,
+ sorted_versions,
+)
+
+from .util_common import (
+ intercept_python,
+)
+
+from .docker_util import (
+ docker_exec,
+ docker_rm,
+ get_docker_hostname,
+)
+
+from .bootstrap import (
+ BootstrapDocker,
+ BootstrapRemote,
+)
+
+from .venv import (
+ get_virtual_python,
+)
+
+from .ssh import (
+ SshConnectionDetail,
+)
+
+from .ansible_util import (
+ ansible_environment,
+ get_hosts,
+ parse_inventory,
+)
+
+from .containers import (
+ CleanupMode,
+ HostType,
+ get_container_database,
+ run_support_container,
+)
+
+from .connections import (
+ Connection,
+ DockerConnection,
+ LocalConnection,
+ SshConnection,
+)
+
+from .become import (
+ Su,
+ Sudo,
+)
+
+TControllerHostConfig = t.TypeVar('TControllerHostConfig', bound=ControllerHostConfig)
+THostConfig = t.TypeVar('THostConfig', bound=HostConfig)
+TPosixConfig = t.TypeVar('TPosixConfig', bound=PosixConfig)
+TRemoteConfig = t.TypeVar('TRemoteConfig', bound=RemoteConfig)
+
+
+@dataclasses.dataclass(frozen=True)
+class Inventory:
+ """Simple representation of an Ansible inventory."""
+ host_groups: t.Dict[str, t.Dict[str, t.Dict[str, str]]]
+ extra_groups: t.Optional[t.Dict[str, t.List[str]]] = None
+
+ @staticmethod
+ def create_single_host(name, variables): # type: (str, t.Dict[str, str]) -> Inventory
+ """Return an inventory instance created from the given hostname and variables."""
+ return Inventory(host_groups=dict(all={name: variables}))
+
+ def write(self, args, path): # type: (CommonConfig, str) -> None
+ """Write the given inventory to the specified path on disk."""
+
+ # NOTE: Switching the inventory generation to write JSON would be nice, but is currently not possible due to the use of hard-coded inventory filenames.
+ # The name `inventory` works for the POSIX integration tests, but `inventory.winrm` and `inventory.networking` will only parse in INI format.
+ # If tests are updated to use the `INVENTORY_PATH` environment variable, then this could be changed.
+ # Also, some tests detect the test type by inspecting the suffix on the inventory filename, which would break if it were changed.
+
+ inventory_text = ''
+
+ for group, hosts in self.host_groups.items():
+ inventory_text += f'[{group}]\n'
+
+ for host, variables in hosts.items():
+ kvp = ' '.join(f'{key}="{value}"' for key, value in variables.items())
+ inventory_text += f'{host} {kvp}\n'
+
+ inventory_text += '\n'
+
+ for group, children in (self.extra_groups or {}).items():
+ inventory_text += f'[{group}]\n'
+
+ for child in children:
+ inventory_text += f'{child}\n'
+
+ inventory_text += '\n'
+
+ inventory_text = inventory_text.strip()
+
+ if not args.explain:
+ write_text_file(path, inventory_text)
+
+ display.info(f'>>> Inventory\n{inventory_text}', verbosity=3)
+
+
+class HostProfile(t.Generic[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for host profiles."""
+ def __init__(self,
+ *,
+ args, # type: EnvironmentConfig
+ config, # type: THostConfig
+ targets, # type: t.Optional[t.List[HostConfig]]
+ ): # type: (...) -> None
+ self.args = args
+ self.config = config
+ self.controller = bool(targets)
+ self.targets = targets or []
+
+ self.state = {} # type: t.Dict[str, t.Any]
+ """State that must be persisted across delegation."""
+ self.cache = {} # type: t.Dict[str, t.Any]
+ """Cache that must not be persisted across delegation."""
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+
+ def setup(self): # type: () -> None
+ """Perform out-of-band setup before delegation."""
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+
+ def configure(self): # type: () -> None
+ """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
+
+ def __getstate__(self):
+ return {key: value for key, value in self.__dict__.items() if key not in ('args', 'cache')}
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+
+ # args will be populated after the instances are restored
+ self.cache = {}
+
+
+class PosixProfile(HostProfile[TPosixConfig], metaclass=abc.ABCMeta):
+ """Base class for POSIX host profiles."""
+ @property
+ def python(self): # type: () -> PythonConfig
+ """
+ The Python to use for this profile.
+ If it is a virtual python, it will be created the first time it is requested.
+ """
+ python = self.state.get('python')
+
+ if not python:
+ python = self.config.python
+
+ if isinstance(python, VirtualPythonConfig):
+ python = VirtualPythonConfig(
+ version=python.version,
+ system_site_packages=python.system_site_packages,
+ path=os.path.join(get_virtual_python(self.args, python), 'bin', 'python'),
+ )
+
+ self.state['python'] = python
+
+ return python
+
+
+class ControllerHostProfile(PosixProfile[TControllerHostConfig], metaclass=abc.ABCMeta):
+ """Base class for profiles usable as a controller."""
+ @abc.abstractmethod
+ def get_origin_controller_connection(self): # type: () -> Connection
+ """Return a connection for accessing the host as a controller from the origin."""
+
+ @abc.abstractmethod
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+
+
+class SshTargetHostProfile(HostProfile[THostConfig], metaclass=abc.ABCMeta):
+ """Base class for profiles offering SSH connectivity."""
+ @abc.abstractmethod
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+
+
+class RemoteProfile(SshTargetHostProfile[TRemoteConfig], metaclass=abc.ABCMeta):
+ """Base class for remote instance profiles."""
+ @property
+ def core_ci_state(self): # type: () -> t.Optional[t.Dict[str, str]]
+ """The saved Ansible Core CI state."""
+ return self.state.get('core_ci')
+
+ @core_ci_state.setter
+ def core_ci_state(self, value): # type: (t.Dict[str, str]) -> None
+ """The saved Ansible Core CI state."""
+ self.state['core_ci'] = value
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+ self.core_ci = self.create_core_ci(load=True)
+ self.core_ci.start()
+
+ self.core_ci_state = self.core_ci.save()
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+ if self.args.remote_terminate == TerminateMode.ALWAYS or (self.args.remote_terminate == TerminateMode.SUCCESS and self.args.success):
+ self.delete_instance()
+
+ @property
+ def core_ci(self): # type: () -> t.Optional[AnsibleCoreCI]
+ """Return the cached AnsibleCoreCI instance, if any, otherwise None."""
+ return self.cache.get('core_ci')
+
+ @core_ci.setter
+ def core_ci(self, value): # type: (AnsibleCoreCI) -> None
+ """Cache the given AnsibleCoreCI instance."""
+ self.cache['core_ci'] = value
+
+ def get_instance(self): # type: () -> t.Optional[AnsibleCoreCI]
+ """Return the current AnsibleCoreCI instance, loading it if not already loaded."""
+ if not self.core_ci and self.core_ci_state:
+ self.core_ci = self.create_core_ci(load=False)
+ self.core_ci.load(self.core_ci_state)
+
+ return self.core_ci
+
+ def delete_instance(self):
+ """Delete the AnsibleCoreCI VM instance."""
+ core_ci = self.get_instance()
+
+ if not core_ci:
+ return # instance has not been provisioned
+
+ core_ci.stop()
+
+ def wait_for_instance(self): # type: () -> AnsibleCoreCI
+ """Wait for an AnsibleCoreCI VM instance to become ready."""
+ core_ci = self.get_instance()
+ core_ci.wait()
+
+ return core_ci
+
+ def create_core_ci(self, load): # type: (bool) -> AnsibleCoreCI
+ """Create and return an AnsibleCoreCI instance."""
+ return AnsibleCoreCI(
+ args=self.args,
+ platform=self.config.platform,
+ version=self.config.version,
+ provider=self.config.provider,
+ suffix='controller' if self.controller else 'target',
+ load=load,
+ )
+
+
+class ControllerProfile(SshTargetHostProfile[ControllerConfig], PosixProfile[ControllerConfig]):
+ """Host profile for the controller as a target."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ settings = SshConnectionDetail(
+ name='localhost',
+ host='localhost',
+ port=None,
+ user='root',
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.args.controller_python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class DockerProfile(ControllerHostProfile[DockerConfig], SshTargetHostProfile[DockerConfig]):
+ """Host profile for a docker instance."""
+ @property
+ def container_name(self): # type: () -> t.Optional[str]
+ """Return the stored container name, if any, otherwise None."""
+ return self.state.get('container_name')
+
+ @container_name.setter
+ def container_name(self, value): # type: (str) -> None
+ """Store the given container name."""
+ self.state['container_name'] = value
+
+ def provision(self): # type: () -> None
+ """Provision the host before delegation."""
+ container = run_support_container(
+ args=self.args,
+ context='__test_hosts__',
+ image=self.config.image,
+ name=f'ansible-test-{"controller" if self.controller else "target"}-{self.args.session_name}',
+ ports=[22],
+ publish_ports=not self.controller, # connections to the controller over SSH are not required
+ options=self.get_docker_run_options(),
+ cleanup=CleanupMode.NO,
+ )
+
+ self.container_name = container.name
+
+ def setup(self): # type: () -> None
+ """Perform out-of-band setup before delegation."""
+ bootstrapper = BootstrapDocker(
+ controller=self.controller,
+ python_versions=[self.python.version],
+ ssh_key=SshKey(self.args),
+ )
+
+ setup_sh = bootstrapper.get_script()
+ shell = setup_sh.splitlines()[0][2:]
+
+ docker_exec(self.args, self.container_name, [shell], data=setup_sh)
+
+ def deprovision(self): # type: () -> None
+ """Deprovision the host after delegation has completed."""
+ if self.args.docker_terminate == TerminateMode.ALWAYS or (self.args.docker_terminate == TerminateMode.SUCCESS and self.args.success):
+ docker_rm(self.args, self.container_name)
+
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ if not self.controller:
+ con = self.get_controller_target_connections()[0]
+
+ for dummy in range(1, 60):
+ try:
+ con.run(['id'], capture=True)
+ except SubprocessError as ex:
+ if 'Permission denied' in ex.message:
+ raise
+
+ time.sleep(1)
+ else:
+ return
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ containers = get_container_database(self.args)
+ access = containers.data[HostType.control]['__test_hosts__'][self.container_name]
+
+ host = access.host_ip
+ port = dict(access.port_map())[22]
+
+ settings = SshConnectionDetail(
+ name=self.config.name,
+ user='root',
+ host=host,
+ port=port,
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+ def get_origin_controller_connection(self): # type: () -> DockerConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return DockerConnection(self.args, self.container_name)
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ return '/root'
+
+ def get_docker_run_options(self): # type: () -> t.List[str]
+ """Return a list of options needed to run the container."""
+ options = [
+ '--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
+ f'--privileged={str(self.config.privileged).lower()}',
+ ]
+
+ if self.config.memory:
+ options.extend([
+ f'--memory={self.config.memory}',
+ f'--memory-swap={self.config.memory}',
+ ])
+
+ if self.config.seccomp != 'default':
+ options.extend(['--security-opt', f'seccomp={self.config.seccomp}'])
+
+ docker_socket = '/var/run/docker.sock'
+
+ if get_docker_hostname() != 'localhost' or os.path.exists(docker_socket):
+ options.extend(['--volume', f'{docker_socket}:{docker_socket}'])
+
+ return options
+
+
+class NetworkInventoryProfile(HostProfile[NetworkInventoryConfig]):
+ """Host profile for a network inventory."""
+
+
+class NetworkRemoteProfile(RemoteProfile[NetworkRemoteConfig]):
+ """Host profile for a network remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def get_inventory_variables(self):
+ """Return inventory variables for accessing this host."""
+ core_ci = self.wait_for_instance()
+ connection = core_ci.connection
+
+ variables = dict(
+ ansible_connection=self.config.connection,
+ ansible_pipelining='yes',
+ ansible_host=connection.hostname,
+ ansible_port=connection.port,
+ ansible_user=connection.username,
+ ansible_ssh_private_key=core_ci.ssh_key.key,
+ ansible_network_os=f'{self.config.collection}.{self.config.platform}' if self.config.collection else self.config.platform,
+ )
+
+ return variables
+
+ def wait_until_ready(self): # type: () -> None
+ """Wait for the host to respond to an Ansible module request."""
+ core_ci = self.wait_for_instance()
+
+ if not isinstance(self.args, IntegrationConfig):
+ return # skip extended checks unless we're running integration tests
+
+ inventory = Inventory.create_single_host(sanitize_host_name(self.config.name), self.get_inventory_variables())
+ env = ansible_environment(self.args)
+ module_name = f'{self.config.collection + "." if self.config.collection else ""}{self.config.platform}_command'
+
+ with tempfile.NamedTemporaryFile() as inventory_file:
+ inventory.write(self.args, inventory_file.name)
+
+ cmd = ['ansible', '-m', module_name, '-a', 'commands=?', '-i', inventory_file.name, 'all']
+
+ for dummy in range(1, 90):
+ try:
+ intercept_python(self.args, self.args.controller_python, cmd, env)
+ except SubprocessError:
+ time.sleep(10)
+ else:
+ return
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ host=core_ci.connection.hostname,
+ port=core_ci.connection.port,
+ user=core_ci.connection.username,
+ identity_file=core_ci.ssh_key.key,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class OriginProfile(ControllerHostProfile[OriginConfig]):
+ """Host profile for origin."""
+ def get_origin_controller_connection(self): # type: () -> LocalConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return LocalConnection(self.args)
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ return os.getcwd()
+
+
+class PosixRemoteProfile(ControllerHostProfile[PosixRemoteConfig], RemoteProfile[PosixRemoteConfig]):
+ """Host profile for a POSIX remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def configure(self): # type: () -> None
+ """Perform in-band configuration. Executed before delegation for the controller and after delegation for targets."""
+ # a target uses a single python version, but a controller may include additional versions for targets running on the controller
+ python_versions = [self.python.version] + [target.python.version for target in self.targets if isinstance(target, ControllerConfig)]
+ python_versions = sorted_versions(list(set(python_versions)))
+
+ core_ci = self.wait_for_instance()
+ pwd = self.wait_until_ready()
+
+ display.info(f'Remote working directory: {pwd}', verbosity=1)
+
+ bootstrapper = BootstrapRemote(
+ controller=self.controller,
+ platform=self.config.platform,
+ platform_version=self.config.version,
+ python_versions=python_versions,
+ ssh_key=core_ci.ssh_key,
+ )
+
+ setup_sh = bootstrapper.get_script()
+ shell = setup_sh.splitlines()[0][2:]
+
+ ssh = self.get_origin_controller_connection()
+ ssh.run([shell], data=setup_sh)
+
+ def get_ssh_connection(self): # type: () -> SshConnection
+ """Return an SSH connection for accessing the host."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ user=core_ci.connection.username,
+ host=core_ci.connection.hostname,
+ port=core_ci.connection.port,
+ identity_file=core_ci.ssh_key.key,
+ python_interpreter=self.python.path,
+ )
+
+ if settings.user == 'root':
+ become = None
+ elif self.config.platform == 'freebsd':
+ become = Su()
+ elif self.config.platform == 'macos':
+ become = Sudo()
+ elif self.config.platform == 'rhel':
+ become = Sudo()
+ else:
+ raise NotImplementedError(f'Become support has not been implemented for platform "{self.config.platform}" and user "{settings.user}" is not root.')
+
+ return SshConnection(self.args, settings, become)
+
+ def wait_until_ready(self): # type: () -> str
+ """Wait for instance to respond to SSH, returning the current working directory once connected."""
+ core_ci = self.wait_for_instance()
+
+ for dummy in range(1, 90):
+ try:
+ return self.get_working_directory()
+ except SubprocessError as ex:
+ if 'Permission denied' in ex.message:
+ raise
+
+ time.sleep(10)
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ return [self.get_ssh_connection()]
+
+ def get_origin_controller_connection(self): # type: () -> SshConnection
+ """Return a connection for accessing the host as a controller from the origin."""
+ return self.get_ssh_connection()
+
+ def get_working_directory(self): # type: () -> str
+ """Return the working directory for the host."""
+ if not self.pwd:
+ ssh = self.get_origin_controller_connection()
+ stdout = ssh.run(['pwd'], capture=True)[0]
+
+ if self.args.explain:
+ return '/pwd'
+
+ pwd = stdout.strip().splitlines()[-1]
+
+ if not pwd.startswith('/'):
+ raise Exception(f'Unexpected current working directory "{pwd}" from "pwd" command output:\n{stdout.strip()}')
+
+ self.pwd = pwd
+
+ return self.pwd
+
+ @property
+ def pwd(self): # type: () -> t.Optional[str]
+ """Return the cached pwd, if any, otherwise None."""
+ return self.cache.get('pwd')
+
+ @pwd.setter
+ def pwd(self, value): # type: (str) -> None
+ """Cache the given pwd."""
+ self.cache['pwd'] = value
+
+
+class PosixSshProfile(SshTargetHostProfile[PosixSshConfig], PosixProfile[PosixSshConfig]):
+ """Host profile for a POSIX SSH instance."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ settings = SshConnectionDetail(
+ name='target',
+ user=self.config.user,
+ host=self.config.host,
+ port=self.config.port,
+ identity_file=SshKey(self.args).key,
+ python_interpreter=self.python.path,
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+class WindowsInventoryProfile(SshTargetHostProfile[WindowsInventoryConfig]):
+ """Host profile for a Windows inventory."""
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ inventory = parse_inventory(self.args, self.config.path)
+ hosts = get_hosts(inventory, 'windows')
+ identity_file = SshKey(self.args).key
+
+ settings = [SshConnectionDetail(
+ name=name,
+ host=config['ansible_host'],
+ port=22,
+ user=config['ansible_user'],
+ identity_file=identity_file,
+ shell_type='powershell',
+ ) for name, config in hosts.items()]
+
+ if settings:
+ details = '\n'.join(f'{ssh.name} {ssh.user}@{ssh.host}:{ssh.port}' for ssh in settings)
+ display.info(f'Generated SSH connection details from inventory:\n{details}', verbosity=1)
+
+ return [SshConnection(self.args, setting) for setting in settings]
+
+
+class WindowsRemoteProfile(RemoteProfile[WindowsRemoteConfig]):
+ """Host profile for a Windows remote instance."""
+ def wait(self): # type: () -> None
+ """Wait for the instance to be ready. Executed before delegation for the controller and after delegation for targets."""
+ self.wait_until_ready()
+
+ def get_inventory_variables(self):
+ """Return inventory variables for accessing this host."""
+ core_ci = self.wait_for_instance()
+ connection = core_ci.connection
+
+ variables = dict(
+ ansible_connection='winrm',
+ ansible_pipelining='yes',
+ ansible_winrm_server_cert_validation='ignore',
+ ansible_host=connection.hostname,
+ ansible_port=connection.port,
+ ansible_user=connection.username,
+ ansible_password=connection.password,
+ ansible_ssh_private_key=core_ci.ssh_key.key,
+ )
+
+ # HACK: force 2016 to use NTLM + HTTP message encryption
+ if self.config.version == '2016':
+ variables.update(
+ ansible_winrm_transport='ntlm',
+ ansible_winrm_scheme='http',
+ ansible_port='5985',
+ )
+
+ return variables
+
+ def wait_until_ready(self): # type: () -> None
+ """Wait for the host to respond to an Ansible module request."""
+ core_ci = self.wait_for_instance()
+
+ if not isinstance(self.args, IntegrationConfig):
+ return # skip extended checks unless we're running integration tests
+
+ inventory = Inventory.create_single_host(sanitize_host_name(self.config.name), self.get_inventory_variables())
+ env = ansible_environment(self.args)
+ module_name = 'ansible.windows.win_ping'
+
+ with tempfile.NamedTemporaryFile() as inventory_file:
+ inventory.write(self.args, inventory_file.name)
+
+ cmd = ['ansible', '-m', module_name, '-i', inventory_file.name, 'all']
+
+ for dummy in range(1, 120):
+ try:
+ intercept_python(self.args, self.args.controller_python, cmd, env)
+ except SubprocessError:
+ time.sleep(10)
+ else:
+ return
+
+ raise ApplicationError(f'Timeout waiting for {self.config.name} instance {core_ci.instance_id}.')
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing the host as a target from the controller."""
+ core_ci = self.wait_for_instance()
+
+ settings = SshConnectionDetail(
+ name=core_ci.name,
+ host=core_ci.connection.hostname,
+ port=22,
+ user=core_ci.connection.username,
+ identity_file=core_ci.ssh_key.key,
+ shell_type='powershell',
+ )
+
+ return [SshConnection(self.args, settings)]
+
+
+@cache
+def get_config_profile_type_map(): # type: () -> t.Dict[t.Type[HostConfig], t.Type[HostProfile]]
+ """Create and return a mapping of HostConfig types to HostProfile types."""
+ return get_type_map(HostProfile, HostConfig)
+
+
+def create_host_profile(
+ args, # type: EnvironmentConfig
+ config, # type: HostConfig
+ controller, # type: bool
+): # type: (...) -> HostProfile
+ """Create and return a host profile from the given host configuration."""
+ profile_type = get_config_profile_type_map()[type(config)]
+ profile = profile_type(args=args, config=config, targets=args.targets if controller else None)
+ return profile
diff --git a/test/lib/ansible_test/_internal/http.py b/test/lib/ansible_test/_internal/http.py
index 6607a10bc0..08aaf41689 100644
--- a/test/lib/ansible_test/_internal/http.py
+++ b/test/lib/ansible_test/_internal/http.py
@@ -2,24 +2,11 @@
Primitive replacement for requests to avoid extra dependency.
Avoids use of urllib2 due to lack of SNI support.
"""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import time
-
-try:
- from urllib import urlencode
-except ImportError:
- # noinspection PyCompatibility, PyUnresolvedReferences
- from urllib.parse import urlencode # pylint: disable=locally-disabled, import-error, no-name-in-module
-
-try:
- # noinspection PyCompatibility
- from urlparse import urlparse, urlunparse, parse_qs
-except ImportError:
- # noinspection PyCompatibility, PyUnresolvedReferences
- from urllib.parse import urlparse, urlunparse, parse_qs # pylint: disable=locally-disabled, ungrouped-imports
+import typing as t
from .util import (
ApplicationError,
@@ -35,12 +22,7 @@ from .util_common import (
class HttpClient:
"""Make HTTP requests via curl."""
- def __init__(self, args, always=False, insecure=False, proxy=None):
- """
- :type args: CommonConfig
- :type always: bool
- :type insecure: bool
- """
+ def __init__(self, args, always=False, insecure=False, proxy=None): # type: (CommonConfig, bool, bool, t.Optional[str]) -> None
self.args = args
self.always = always
self.insecure = insecure
@@ -177,5 +159,5 @@ class HttpError(ApplicationError):
:type status: int
:type message: str
"""
- super(HttpError, self).__init__('%s: %s' % (status, message))
+ super().__init__('%s: %s' % (status, message))
self.status = status
diff --git a/test/lib/ansible_test/_internal/init.py b/test/lib/ansible_test/_internal/init.py
index 682e6b0cf1..863c2589c7 100644
--- a/test/lib/ansible_test/_internal/init.py
+++ b/test/lib/ansible_test/_internal/init.py
@@ -1,6 +1,5 @@
"""Early initialization for ansible-test before most other imports have been performed."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import resource
diff --git a/test/lib/ansible_test/_internal/inventory.py b/test/lib/ansible_test/_internal/inventory.py
new file mode 100644
index 0000000000..cfd7c50a81
--- /dev/null
+++ b/test/lib/ansible_test/_internal/inventory.py
@@ -0,0 +1,170 @@
+"""Inventory creation from host profiles."""
+from __future__ import annotations
+
+import shutil
+import typing as t
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ sanitize_host_name,
+ exclude_none_values,
+)
+
+from .host_profiles import (
+ ControllerHostProfile,
+ ControllerProfile,
+ HostProfile,
+ Inventory,
+ NetworkInventoryProfile,
+ NetworkRemoteProfile,
+ SshTargetHostProfile,
+ WindowsInventoryProfile,
+ WindowsRemoteProfile,
+)
+
+
+def create_controller_inventory(args, path, controller_host): # type: (EnvironmentConfig, str, ControllerHostProfile) -> None
+ """Create and return inventory for use in controller-only integration tests."""
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=dict(
+ ansible_connection='local',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=controller_host.python.path,
+ ),
+ ),
+ ),
+ )
+
+ inventory.write(args, path)
+
+
+def create_windows_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None
+ """Create and return inventory for use in target Windows integration tests."""
+ first = target_hosts[0]
+
+ if isinstance(first, WindowsInventoryProfile):
+ if args.explain:
+ return
+
+ try:
+ shutil.copyfile(first.config.path, path)
+ except shutil.SameFileError:
+ pass
+
+ return
+
+ target_hosts = t.cast(t.List[WindowsRemoteProfile], target_hosts)
+ hosts = [(target_host, target_host.wait_for_instance().connection) for target_host in target_hosts]
+ windows_hosts = {sanitize_host_name(host.config.name): host.get_inventory_variables() for host, connection in hosts}
+
+ inventory = Inventory(
+ host_groups=dict(
+ windows=windows_hosts,
+ ),
+ # The `testhost` group is needed to support the `binary_modules_winrm` integration test.
+ # The test should be updated to remove the need for this.
+ extra_groups={
+ 'testhost:children': [
+ 'windows',
+ ],
+ },
+ )
+
+ inventory.write(args, path)
+
+
+def create_network_inventory(args, path, target_hosts): # type: (EnvironmentConfig, str, t.List[HostProfile]) -> None
+ """Create and return inventory for use in target network integration tests."""
+ first = target_hosts[0]
+
+ if isinstance(first, NetworkInventoryProfile):
+ if args.explain:
+ return
+
+ try:
+ shutil.copyfile(first.config.path, path)
+ except shutil.SameFileError:
+ pass
+
+ return
+
+ target_hosts = t.cast(t.List[NetworkRemoteProfile], target_hosts)
+ host_groups = {target_host.config.platform: {} for target_host in target_hosts}
+
+ for target_host in target_hosts:
+ host_groups[target_host.config.platform][sanitize_host_name(target_host.config.name)] = target_host.get_inventory_variables()
+
+ inventory = Inventory(
+ host_groups=host_groups,
+ # The `net` group was added to support platform agnostic testing. It may not longer be needed.
+ # see: https://github.com/ansible/ansible/pull/34661
+ # see: https://github.com/ansible/ansible/pull/34707
+ extra_groups={
+ 'net:children': sorted(host_groups),
+ },
+ )
+
+ inventory.write(args, path)
+
+
+def create_posix_inventory(args, path, target_hosts, needs_ssh=False): # type: (EnvironmentConfig, str, t.List[HostProfile], bool) -> None
+ """Create and return inventory for use in POSIX integration tests."""
+ target_hosts = t.cast(t.List[SshTargetHostProfile], target_hosts)
+
+ if len(target_hosts) != 1:
+ raise Exception()
+
+ target_host = target_hosts[0]
+
+ if isinstance(target_host, ControllerProfile) and not needs_ssh:
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=dict(
+ ansible_connection='local',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=target_host.python.path,
+ ),
+ ),
+ ),
+ )
+ else:
+ connections = target_host.get_controller_target_connections()
+
+ if len(connections) != 1:
+ raise Exception()
+
+ ssh = connections[0]
+
+ testhost = dict(
+ ansible_connection='ssh',
+ ansible_pipelining='yes',
+ ansible_python_interpreter=ssh.settings.python_interpreter,
+ ansible_host=ssh.settings.host,
+ ansible_port=ssh.settings.port,
+ ansible_user=ssh.settings.user,
+ ansible_ssh_private_key=ssh.settings.identity_file,
+ )
+
+ if ssh.become:
+ testhost.update(
+ ansible_become='yes',
+ ansible_become_method=ssh.become.method,
+ )
+
+ testhost = exclude_none_values(testhost)
+
+ inventory = Inventory(
+ host_groups=dict(
+ testgroup=dict(
+ testhost=testhost,
+ ),
+ ),
+ )
+
+ inventory.write(args, path)
diff --git a/test/lib/ansible_test/_internal/io.py b/test/lib/ansible_test/_internal/io.py
index 1b4fa25acf..9d3301a147 100644
--- a/test/lib/ansible_test/_internal/io.py
+++ b/test/lib/ansible_test/_internal/io.py
@@ -1,13 +1,11 @@
"""Functions for disk IO."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import errno
import io
import json
import os
-
-from . import types as t
+import typing as t
from .encoding import (
ENCODING,
@@ -42,11 +40,11 @@ def make_dirs(path): # type: (str) -> None
def write_json_file(path, # type: str
- content, # type: t.Union[t.List[t.Any], t.Dict[str, t.Any]]
+ content, # type: t.Any
create_directories=False, # type: bool
formatted=True, # type: bool
encoder=None, # type: t.Optional[t.Callable[[t.Any], t.Any]]
- ): # type: (...) -> None
+ ): # type: (...) -> str
"""Write the given json content to the specified path, optionally creating missing directories."""
text_content = json.dumps(content,
sort_keys=formatted,
@@ -57,6 +55,8 @@ def write_json_file(path, # type: str
write_text_file(path, text_content, create_directories=create_directories)
+ return text_content
+
def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
"""Write the given text content to the specified path, optionally creating missing directories."""
@@ -88,6 +88,7 @@ def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
class SortedSetEncoder(json.JSONEncoder):
"""Encode sets as sorted lists."""
def default(self, o):
+ """Return a serialized version of the `o` object."""
if isinstance(o, set):
return sorted(o)
diff --git a/test/lib/ansible_test/_internal/junit_xml.py b/test/lib/ansible_test/_internal/junit_xml.py
new file mode 120000
index 0000000000..bde5519b91
--- /dev/null
+++ b/test/lib/ansible_test/_internal/junit_xml.py
@@ -0,0 +1 @@
+../../../../lib/ansible/utils/_junit_xml.py \ No newline at end of file
diff --git a/test/lib/ansible_test/_internal/manage_ci.py b/test/lib/ansible_test/_internal/manage_ci.py
deleted file mode 100644
index f8d659d8e0..0000000000
--- a/test/lib/ansible_test/_internal/manage_ci.py
+++ /dev/null
@@ -1,436 +0,0 @@
-"""Access Ansible Core CI remote services."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import functools
-import os
-import tempfile
-import time
-
-from . import types as t
-
-from .io import (
- read_text_file,
-)
-
-from .util import (
- SubprocessError,
- ApplicationError,
- Display,
- cmd_quote,
- display,
- ANSIBLE_TEST_TARGET_ROOT,
-)
-
-from .util_common import (
- intercept_command,
- get_network_completion,
- run_command,
- ShellScriptTemplate,
-)
-
-from .core_ci import (
- AnsibleCoreCI,
- SshKey,
-)
-
-from .ansible_util import (
- ansible_environment,
-)
-
-from .config import (
- NetworkIntegrationConfig,
- ShellConfig,
-)
-
-from .payload import (
- create_payload,
-)
-
-
-class ManageWindowsCI:
- """Manage access to a Windows instance provided by Ansible Core CI."""
- def __init__(self, core_ci):
- """
- :type core_ci: AnsibleCoreCI
- """
- self.core_ci = core_ci
- self.ssh_args = ['-i', self.core_ci.ssh_key.key]
-
- ssh_options = dict(
- BatchMode='yes',
- StrictHostKeyChecking='no',
- UserKnownHostsFile='/dev/null',
- ServerAliveInterval=15,
- ServerAliveCountMax=4,
- )
-
- for ssh_option in sorted(ssh_options):
- self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
-
- def setup(self, python_version):
- """Used in delegate_remote to setup the host, no action is required for Windows.
- :type python_version: str
- """
-
- def wait(self):
- """Wait for instance to respond to ansible ping."""
- extra_vars = [
- 'ansible_connection=winrm',
- 'ansible_host=%s' % self.core_ci.connection.hostname,
- 'ansible_user=%s' % self.core_ci.connection.username,
- 'ansible_password=%s' % self.core_ci.connection.password,
- 'ansible_port=%s' % self.core_ci.connection.port,
- 'ansible_winrm_server_cert_validation=ignore',
- ]
-
- name = 'windows_%s' % self.core_ci.version
-
- env = ansible_environment(self.core_ci.args)
- cmd = ['ansible', '-m', 'ansible.windows.win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
-
- for dummy in range(1, 120):
- try:
- intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
- def download(self, remote, local):
- """
- :type remote: str
- :type local: str
- """
- self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
-
- def upload(self, local, remote):
- """
- :type local: str
- :type remote: str
- """
- self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
-
- def ssh(self, command, options=None, force_pty=True):
- """
- :type command: str | list[str]
- :type options: list[str] | None
- :type force_pty: bool
- """
- if not options:
- options = []
- if force_pty:
- options.append('-tt')
-
- if isinstance(command, list):
- command = ' '.join(cmd_quote(c) for c in command)
-
- run_command(self.core_ci.args,
- ['ssh', '-q'] + self.ssh_args +
- options +
- ['-p', '22',
- '%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
- [command])
-
- def scp(self, src, dst):
- """
- :type src: str
- :type dst: str
- """
- for dummy in range(1, 10):
- try:
- run_command(self.core_ci.args,
- ['scp'] + self.ssh_args +
- ['-P', '22', '-q', '-r', src, dst])
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Failed transfer: %s -> %s' % (src, dst))
-
-
-class ManageNetworkCI:
- """Manage access to a network instance provided by Ansible Core CI."""
- def __init__(self, args, core_ci):
- """
- :type args: NetworkIntegrationConfig
- :type core_ci: AnsibleCoreCI
- """
- self.args = args
- self.core_ci = core_ci
-
- def wait(self):
- """Wait for instance to respond to ansible ping."""
- settings = get_network_settings(self.args, self.core_ci.platform, self.core_ci.version)
-
- extra_vars = [
- 'ansible_host=%s' % self.core_ci.connection.hostname,
- 'ansible_port=%s' % self.core_ci.connection.port,
- 'ansible_ssh_private_key_file=%s' % self.core_ci.ssh_key.key,
- ] + [
- '%s=%s' % (key, value) for key, value in settings.inventory_vars.items()
- ]
-
- name = '%s-%s' % (self.core_ci.platform, self.core_ci.version.replace('.', '-'))
-
- env = ansible_environment(self.core_ci.args)
- cmd = [
- 'ansible',
- '-m', '%s%s_command' % (settings.collection + '.' if settings.collection else '', self.core_ci.platform),
- '-a', 'commands=?',
- '-u', self.core_ci.connection.username,
- '-i', '%s,' % name,
- '-e', ' '.join(extra_vars),
- name,
- ]
-
- for dummy in range(1, 90):
- try:
- intercept_command(self.core_ci.args, cmd, 'ping', env=env, disable_coverage=True)
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
-
-class ManagePosixCI:
- """Manage access to a POSIX instance provided by Ansible Core CI."""
- def __init__(self, core_ci):
- """
- :type core_ci: AnsibleCoreCI
- """
- self.core_ci = core_ci
- self.ssh_args = ['-i', self.core_ci.ssh_key.key]
-
- ssh_options = dict(
- BatchMode='yes',
- StrictHostKeyChecking='no',
- UserKnownHostsFile='/dev/null',
- ServerAliveInterval=15,
- ServerAliveCountMax=4,
- )
-
- for ssh_option in sorted(ssh_options):
- self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
-
- self.become = None
-
- if self.core_ci.platform == 'freebsd':
- self.become = ['su', '-l', 'root', '-c']
- elif self.core_ci.platform == 'macos':
- self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH', 'sh', '-c']
- elif self.core_ci.platform == 'osx':
- self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
- elif self.core_ci.platform == 'rhel':
- self.become = ['sudo', '-in', 'bash', '-c']
- elif self.core_ci.platform == 'aix':
- self.become = []
-
- if self.become is None:
- raise NotImplementedError('provider %s has not been implemented' % self.core_ci.provider)
-
- def setup(self, python_version):
- """Start instance and wait for it to become ready and respond to an ansible ping.
- :type python_version: str
- :rtype: str
- """
- pwd = self.wait()
-
- display.info('Remote working directory: %s' % pwd, verbosity=1)
-
- if isinstance(self.core_ci.args, ShellConfig):
- if self.core_ci.args.raw:
- return pwd
-
- self.configure(python_version)
- self.upload_source()
-
- return pwd
-
- def wait(self): # type: () -> str
- """Wait for instance to respond to SSH."""
- for dummy in range(1, 90):
- try:
- stdout = self.ssh('pwd', capture=True)[0]
-
- if self.core_ci.args.explain:
- return '/pwd'
-
- pwd = stdout.strip().splitlines()[-1]
-
- if not pwd.startswith('/'):
- raise Exception('Unexpected current working directory "%s" from "pwd" command output:\n%s' % (pwd, stdout))
-
- return pwd
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
- (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
-
- def configure(self, python_version):
- """Configure remote host for testing.
- :type python_version: str
- """
- template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'remote.sh')))
- setup_sh = template.substitute(
- platform=self.core_ci.platform,
- platform_version=self.core_ci.version,
- python_version=python_version,
- )
-
- ssh_keys_sh = get_ssh_key_setup(self.core_ci.ssh_key)
-
- setup_sh += ssh_keys_sh
- shell = setup_sh.splitlines()[0][2:]
-
- self.ssh(shell, data=setup_sh)
-
- def upload_source(self):
- """Upload and extract source."""
- with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
- remote_source_dir = '/tmp'
- remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
-
- create_payload(self.core_ci.args, local_source_fd.name)
-
- self.upload(local_source_fd.name, remote_source_dir)
- # AIX does not provide the GNU tar version, leading to parameters
- # being different and -z not being recognized. This pattern works
- # with both versions of tar.
- self.ssh(
- 'rm -rf ~/ansible ~/ansible_collections && cd ~/ && gunzip --stdout %s | tar oxf - && rm %s' %
- (remote_source_path, remote_source_path)
- )
-
- def download(self, remote, local):
- """
- :type remote: str
- :type local: str
- """
- self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
-
- def upload(self, local, remote):
- """
- :type local: str
- :type remote: str
- """
- self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
-
- def ssh(self, command, options=None, capture=False, data=None):
- """
- :type command: str | list[str]
- :type options: list[str] | None
- :type capture: bool
- :type data: str | None
- :rtype: str | None, str | None
- """
- if not options:
- options = []
-
- if isinstance(command, list):
- command = ' '.join(cmd_quote(c) for c in command)
-
- command = cmd_quote(command) if self.become else command
-
- options.append('-q')
-
- if not data:
- options.append('-tt')
-
- # Capture SSH debug logs
- with tempfile.NamedTemporaryFile(prefix='ansible-test-ssh-debug-', suffix='.log') as ssh_logfile:
- options.extend(['-vvv', '-E', ssh_logfile.name])
-
- return run_command(self.core_ci.args,
- ['ssh'] + self.ssh_args +
- options +
- ['-p', str(self.core_ci.connection.port),
- '%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
- self.become + [command], capture=capture, data=data,
- error_callback=functools.partial(self.capture_log_details, ssh_logfile.name))
-
- def capture_log_details(self, path, ex): # type: (str, SubprocessError) -> None
- """Reads ssh log file and returns relevant error."""
- if ex.status != 255:
- return
-
- markers = [
- 'debug1: Connection Established',
- 'debug1: Authentication successful',
- 'debug1: Entering interactive session',
- 'debug1: Sending command',
- 'debug2: PTY allocation request accepted',
- 'debug2: exec request accepted',
- ]
-
- file_contents = read_text_file(path)
- messages = []
-
- for line in reversed(file_contents.splitlines()):
- messages.append(line)
-
- if any(line.startswith(marker) for marker in markers):
- break
-
- message = '\n'.join(reversed(messages))
-
- ex.message += '>>> SSH Debug Output\n'
- ex.message += '%s%s\n' % (message.strip(), Display.clear)
-
- def scp(self, src, dst):
- """
- :type src: str
- :type dst: str
- """
- for dummy in range(1, 10):
- try:
- run_command(self.core_ci.args,
- ['scp'] + self.ssh_args +
- ['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
- return
- except SubprocessError:
- time.sleep(10)
-
- raise ApplicationError('Failed transfer: %s -> %s' % (src, dst))
-
-
-def get_ssh_key_setup(ssh_key): # type: (SshKey) -> str
- """Generate and return a script to configure SSH keys on a host."""
- template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ssh-keys.sh')))
-
- ssh_keys_sh = template.substitute(
- ssh_public_key=ssh_key.pub_contents,
- ssh_private_key=ssh_key.key_contents,
- ssh_key_type=ssh_key.KEY_TYPE,
- )
-
- return ssh_keys_sh
-
-
-def get_network_settings(args, platform, version): # type: (NetworkIntegrationConfig, str, str) -> NetworkPlatformSettings
- """Returns settings for the given network platform and version."""
- platform_version = '%s/%s' % (platform, version)
- completion = get_network_completion().get(platform_version, {})
- collection = args.platform_collection.get(platform, completion.get('collection'))
-
- settings = NetworkPlatformSettings(
- collection,
- dict(
- ansible_connection=args.platform_connection.get(platform, completion.get('connection')),
- ansible_network_os='%s.%s' % (collection, platform) if collection else platform,
- )
- )
-
- return settings
-
-
-class NetworkPlatformSettings:
- """Settings required for provisioning a network platform."""
- def __init__(self, collection, inventory_vars): # type: (str, t.Type[str, str]) -> None
- self.collection = collection
- self.inventory_vars = inventory_vars
diff --git a/test/lib/ansible_test/_internal/metadata.py b/test/lib/ansible_test/_internal/metadata.py
index 36575d0c73..3c309c92ef 100644
--- a/test/lib/ansible_test/_internal/metadata.py
+++ b/test/lib/ansible_test/_internal/metadata.py
@@ -1,8 +1,6 @@
"""Test metadata for passing data to delegated tests."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-from . import types as t
+from __future__ import annotations
+import typing as t
from .util import (
display,
@@ -25,7 +23,6 @@ class Metadata:
"""Initialize metadata."""
self.changes = {} # type: t.Dict[str, t.Tuple[t.Tuple[int, int]]]
self.cloud_config = None # type: t.Optional[t.Dict[str, str]]
- self.instance_config = None # type: t.Optional[t.List[t.Dict[str, str]]]
self.change_description = None # type: t.Optional[ChangeDescription]
self.ci_provider = None # type: t.Optional[str]
@@ -57,7 +54,6 @@ class Metadata:
return dict(
changes=self.changes,
cloud_config=self.cloud_config,
- instance_config=self.instance_config,
ci_provider=self.ci_provider,
change_description=self.change_description.to_dict(),
)
@@ -90,7 +86,6 @@ class Metadata:
metadata = Metadata()
metadata.changes = data['changes']
metadata.cloud_config = data['cloud_config']
- metadata.instance_config = data['instance_config']
metadata.ci_provider = data['ci_provider']
metadata.change_description = ChangeDescription.from_dict(data['change_description'])
diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py
index 1e6971e60b..7168f6de57 100644
--- a/test/lib/ansible_test/_internal/payload.py
+++ b/test/lib/ansible_test/_internal/payload.py
@@ -1,6 +1,5 @@
"""Payload management for sending Ansible files and test content to other systems (VMs, containers)."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import atexit
import os
@@ -8,8 +7,7 @@ import stat
import tarfile
import tempfile
import time
-
-from . import types as t
+import typing as t
from .config import (
IntegrationConfig,
@@ -47,7 +45,7 @@ ANSIBLE_BIN_SYMLINK_MAP = {
'ansible-inventory': 'ansible',
'ansible-playbook': 'ansible',
'ansible-pull': 'ansible',
- 'ansible-test': '../test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py',
+ 'ansible-test': '../test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py',
'ansible-vault': 'ansible',
}
diff --git a/test/lib/ansible_test/_internal/provider/__init__.py b/test/lib/ansible_test/_internal/provider/__init__.py
index a60d2a95ca..ce61e13527 100644
--- a/test/lib/ansible_test/_internal/provider/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/__init__.py
@@ -1,14 +1,11 @@
"""Provider (plugin) infrastructure for ansible-test."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import os
-
-from .. import types as t
+import typing as t
from ..util import (
- ABC,
ApplicationError,
get_subclasses,
)
@@ -59,13 +56,13 @@ def find_path_provider(provider_type, # type: t.Type[TPathProvider],
class ProviderNotFoundForPath(ApplicationError):
"""Exception generated when a path based provider cannot be found for a given path."""
def __init__(self, provider_type, path): # type: (t.Type, str) -> None
- super(ProviderNotFoundForPath, self).__init__('No %s found for path: %s' % (provider_type.__name__, path))
+ super().__init__('No %s found for path: %s' % (provider_type.__name__, path))
self.provider_type = provider_type
self.path = path
-class PathProvider(ABC):
+class PathProvider(metaclass=abc.ABCMeta):
"""Base class for provider plugins that are path based."""
sequence = 500
priority = 500
diff --git a/test/lib/ansible_test/_internal/provider/layout/__init__.py b/test/lib/ansible_test/_internal/provider/layout/__init__.py
index 8065c64e0b..147fcbd56f 100644
--- a/test/lib/ansible_test/_internal/provider/layout/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/layout/__init__.py
@@ -1,12 +1,10 @@
"""Code for finding content."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
import collections
import os
-
-from ... import types as t
+import typing as t
from ...util import (
ANSIBLE_SOURCE_ROOT,
@@ -94,7 +92,7 @@ class ContentLayout(Layout):
unit_module_utils_path, # type: str
unit_messages, # type: t.Optional[LayoutMessages]
): # type: (...) -> None
- super(ContentLayout, self).__init__(root, paths)
+ super().__init__(root, paths)
self.plugin_paths = plugin_paths
self.collection = collection
diff --git a/test/lib/ansible_test/_internal/provider/layout/ansible.py b/test/lib/ansible_test/_internal/provider/layout/ansible.py
index 49ca482b7b..345faa7c50 100644
--- a/test/lib/ansible_test/_internal/provider/layout/ansible.py
+++ b/test/lib/ansible_test/_internal/provider/layout/ansible.py
@@ -1,10 +1,8 @@
"""Layout provider for Ansible source."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
ContentLayout,
diff --git a/test/lib/ansible_test/_internal/provider/layout/collection.py b/test/lib/ansible_test/_internal/provider/layout/collection.py
index 6b393b6197..5dca046f02 100644
--- a/test/lib/ansible_test/_internal/provider/layout/collection.py
+++ b/test/lib/ansible_test/_internal/provider/layout/collection.py
@@ -1,10 +1,8 @@
"""Layout provider for Ansible collections."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
ContentLayout,
diff --git a/test/lib/ansible_test/_internal/provider/source/__init__.py b/test/lib/ansible_test/_internal/provider/source/__init__.py
index fab28b0903..359c5d6a2e 100644
--- a/test/lib/ansible_test/_internal/provider/source/__init__.py
+++ b/test/lib/ansible_test/_internal/provider/source/__init__.py
@@ -1,10 +1,8 @@
"""Common code for source providers."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import abc
-
-from ... import types as t
+import typing as t
from .. import (
PathProvider,
diff --git a/test/lib/ansible_test/_internal/provider/source/git.py b/test/lib/ansible_test/_internal/provider/source/git.py
index 0bf81a1cee..96f85dc73d 100644
--- a/test/lib/ansible_test/_internal/provider/source/git.py
+++ b/test/lib/ansible_test/_internal/provider/source/git.py
@@ -1,10 +1,8 @@
"""Source provider for a content root managed by git version control."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from ...git import (
Git,
diff --git a/test/lib/ansible_test/_internal/provider/source/installed.py b/test/lib/ansible_test/_internal/provider/source/installed.py
index d24a6e3dd8..f4ed6f3cbc 100644
--- a/test/lib/ansible_test/_internal/provider/source/installed.py
+++ b/test/lib/ansible_test/_internal/provider/source/installed.py
@@ -1,10 +1,8 @@
"""Source provider for content which has been installed."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from . import (
SourceProvider,
diff --git a/test/lib/ansible_test/_internal/provider/source/unversioned.py b/test/lib/ansible_test/_internal/provider/source/unversioned.py
index cb35fe3d9a..a78060cf8a 100644
--- a/test/lib/ansible_test/_internal/provider/source/unversioned.py
+++ b/test/lib/ansible_test/_internal/provider/source/unversioned.py
@@ -1,10 +1,8 @@
"""Fallback source provider when no other provider matches the content root."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import os
-
-from ... import types as t
+import typing as t
from ...constants import (
TIMEOUT_PATH,
diff --git a/test/lib/ansible_test/_internal/provisioning.py b/test/lib/ansible_test/_internal/provisioning.py
new file mode 100644
index 0000000000..6f81c40b86
--- /dev/null
+++ b/test/lib/ansible_test/_internal/provisioning.py
@@ -0,0 +1,184 @@
+"""Provision hosts for running tests."""
+from __future__ import annotations
+
+import atexit
+import dataclasses
+import functools
+import itertools
+import os
+import pickle
+import sys
+import time
+import traceback
+import typing as t
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+ open_binary_file,
+ verify_sys_executable,
+ version_to_str,
+)
+
+from .thread import (
+ WrappedThread,
+)
+
+from .host_profiles import (
+ ControllerHostProfile,
+ HostProfile,
+ SshConnection,
+ SshTargetHostProfile,
+ create_host_profile,
+)
+
+from .pypi_proxy import (
+ run_pypi_proxy,
+)
+
+THostProfile = t.TypeVar('THostProfile', bound=HostProfile)
+TEnvironmentConfig = t.TypeVar('TEnvironmentConfig', bound=EnvironmentConfig)
+
+
+@dataclasses.dataclass(frozen=True)
+class HostState:
+ """State of hosts and profiles to be passed to ansible-test during delegation."""
+ controller_profile: ControllerHostProfile
+ target_profiles: t.List[HostProfile]
+
+ @property
+ def profiles(self): # type: () -> t.List[HostProfile]
+ """Return all the profiles as a list."""
+ return [t.cast(HostProfile, self.controller_profile)] + self.target_profiles
+
+ def serialize(self, path): # type: (str) -> None
+ """Serialize the host state to the given path."""
+ with open_binary_file(path, 'wb') as state_file:
+ pickle.dump(self, state_file)
+
+ @staticmethod
+ def deserialize(args, path): # type: (EnvironmentConfig, str) -> HostState
+ """Deserialize host state from the given args and path."""
+ with open_binary_file(path) as state_file:
+ host_state = pickle.load(state_file) # type: HostState
+
+ host_state.controller_profile.args = args
+
+ for target in host_state.target_profiles:
+ target.args = args
+
+ return host_state
+
+ def get_controller_target_connections(self): # type: () -> t.List[SshConnection]
+ """Return SSH connection(s) for accessing all target hosts from the controller."""
+ return list(itertools.chain.from_iterable([target.get_controller_target_connections() for
+ target in self.target_profiles if isinstance(target, SshTargetHostProfile)]))
+
+ def targets(self, profile_type): # type: (t.Type[THostProfile]) -> t.List[THostProfile]
+ """The list of target(s), verified to be of the specified type."""
+ if not self.target_profiles:
+ raise Exception('No target profiles found.')
+
+ if not all(isinstance(target, profile_type) for target in self.target_profiles):
+ raise Exception(f'Target profile(s) are not of the required type: {profile_type}')
+
+ return self.target_profiles
+
+
+def prepare_profiles(
+ args, # type: TEnvironmentConfig
+ targets_use_pypi=False, # type: bool
+ skip_setup=False, # type: bool
+ requirements=None, # type: t.Optional[t.Callable[[TEnvironmentConfig, HostState], None]]
+): # type: (...) -> HostState
+ """
+ Create new profiles, or load existing ones, and return them.
+ If a requirements callback was provided, it will be used before configuring hosts if delegation has already been performed.
+ """
+ if args.host_path:
+ host_state = HostState.deserialize(args, os.path.join(args.host_path, 'state.dat'))
+ else:
+ run_pypi_proxy(args, targets_use_pypi)
+
+ host_state = HostState(
+ controller_profile=t.cast(ControllerHostProfile, create_host_profile(args, args.controller, True)),
+ target_profiles=[create_host_profile(args, target, False) for target in args.targets],
+ )
+
+ atexit.register(functools.partial(cleanup_profiles, host_state))
+
+ def provision(profile): # type: (HostProfile) -> None
+ """Provision the given profile."""
+ profile.provision()
+
+ if not skip_setup:
+ profile.setup()
+
+ dispatch_jobs([(profile, WrappedThread(functools.partial(provision, profile))) for profile in host_state.profiles])
+
+ host_state.controller_profile.configure()
+
+ if not args.delegate:
+ check_controller_python(args, host_state)
+
+ if requirements:
+ requirements(args, host_state)
+
+ def configure(profile): # type: (HostProfile) -> None
+ """Configure the given profile."""
+ profile.wait()
+
+ if not skip_setup:
+ profile.configure()
+
+ dispatch_jobs([(profile, WrappedThread(functools.partial(configure, profile))) for profile in host_state.target_profiles])
+
+ return host_state
+
+
+def check_controller_python(args, host_state): # type: (EnvironmentConfig, HostState) -> None
+ """Check the running environment to make sure it is what we expected."""
+ sys_version = version_to_str(sys.version_info[:2])
+ controller_python = host_state.controller_profile.python
+
+ if expected_executable := verify_sys_executable(controller_python.path):
+ raise ApplicationError(f'Running under Python interpreter "{sys.executable}" instead of "{expected_executable}".')
+
+ expected_version = controller_python.version
+
+ if expected_version != sys_version:
+ raise ApplicationError(f'Running under Python version {sys_version} instead of {expected_version}.')
+
+ args.controller_python = controller_python
+
+
+def cleanup_profiles(host_state): # type: (HostState) -> None
+ """Cleanup provisioned hosts when exiting."""
+ for profile in host_state.profiles:
+ profile.deprovision()
+
+
+def dispatch_jobs(jobs): # type: (t.List[t.Tuple[HostProfile, WrappedThread]]) -> None
+ """Run the given profile job threads and wait for them to complete."""
+ for profile, thread in jobs:
+ thread.daemon = True
+ thread.start()
+
+ while any(thread.is_alive() for profile, thread in jobs):
+ time.sleep(1)
+
+ failed = False
+
+ for profile, thread in jobs:
+ try:
+ thread.wait_for_result()
+ except Exception as ex: # pylint: disable=broad-except
+ display.error(f'Host {profile} job failed: {ex}\n{"".join(traceback.format_tb(ex.__traceback__))}')
+ failed = True
+
+ if failed:
+ raise ApplicationError('Host job(s) failed. See previous error(s) for details.')
diff --git a/test/lib/ansible_test/_internal/pypi_proxy.py b/test/lib/ansible_test/_internal/pypi_proxy.py
new file mode 100644
index 0000000000..968794fd20
--- /dev/null
+++ b/test/lib/ansible_test/_internal/pypi_proxy.py
@@ -0,0 +1,178 @@
+"""PyPI proxy management."""
+from __future__ import annotations
+
+import atexit
+import os
+import urllib.parse
+
+from .io import (
+ write_text_file,
+)
+
+from .config import (
+ EnvironmentConfig,
+)
+
+from .host_configs import (
+ PosixConfig,
+)
+
+from .util import (
+ ApplicationError,
+ display,
+)
+
+from .util_common import (
+ process_scoped_temporary_file,
+)
+
+from .docker_util import (
+ docker_available,
+)
+
+from .containers import (
+ HostType,
+ get_container_database,
+ run_support_container,
+)
+
+from .ansible_util import (
+ run_playbook,
+)
+
+from .host_profiles import (
+ HostProfile,
+)
+
+from .inventory import (
+ create_posix_inventory,
+)
+
+
+def run_pypi_proxy(args, targets_use_pypi): # type: (EnvironmentConfig, bool) -> None
+ """Run a PyPI proxy support container."""
+ if args.pypi_endpoint:
+ return # user has overridden the proxy endpoint, there is nothing to provision
+
+ posix_targets = [target for target in args.targets if isinstance(target, PosixConfig)]
+ need_proxy = targets_use_pypi and any(target.python.version == '2.6' for target in posix_targets)
+ use_proxy = args.pypi_proxy or need_proxy
+
+ if not use_proxy:
+ return
+
+ if not docker_available():
+ if args.pypi_proxy:
+ raise ApplicationError('Use of the PyPI proxy was requested, but Docker is not available.')
+
+ display.warning('Unable to use the PyPI proxy because Docker is not available. Installation of packages using `pip` may fail.')
+ return
+
+ image = 'quay.io/ansible/pypi-test-container:1.0.0'
+ port = 3141
+
+ run_support_container(
+ args=args,
+ context='__pypi_proxy__',
+ image=image,
+ name=f'pypi-test-container-{args.session_name}',
+ ports=[port],
+ )
+
+
+def configure_pypi_proxy(args, profile): # type: (EnvironmentConfig, HostProfile) -> None
+ """Configure the environment to use a PyPI proxy, if present."""
+ if args.pypi_endpoint:
+ pypi_endpoint = args.pypi_endpoint
+ else:
+ containers = get_container_database(args)
+ context = containers.data.get(HostType.control if profile.controller else HostType.managed, {}).get('__pypi_proxy__')
+
+ if not context:
+ return # proxy not configured
+
+ access = list(context.values())[0]
+
+ host = access.host_ip
+ port = dict(access.port_map())[3141]
+
+ pypi_endpoint = f'http://{host}:{port}/root/pypi/+simple/'
+
+ pypi_hostname = urllib.parse.urlparse(pypi_endpoint)[1].split(':')[0]
+
+ if profile.controller:
+ configure_controller_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname)
+ else:
+ configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname)
+
+
+def configure_controller_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure the controller environment to use a PyPI proxy."""
+ configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname)
+ configure_pypi_proxy_easy_install(args, profile, pypi_endpoint)
+
+
+def configure_target_pypi_proxy(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure the target environment to use a PyPI proxy."""
+ inventory_path = process_scoped_temporary_file(args)
+
+ create_posix_inventory(args, inventory_path, [profile])
+
+ def cleanup_pypi_proxy():
+ """Undo changes made to configure the PyPI proxy."""
+ run_playbook(args, inventory_path, 'pypi_proxy_restore.yml', capture=True)
+
+ force = 'yes' if profile.config.is_managed else 'no'
+
+ run_playbook(args, inventory_path, 'pypi_proxy_prepare.yml', dict(pypi_endpoint=pypi_endpoint, pypi_hostname=pypi_hostname, force=force), capture=True)
+
+ atexit.register(cleanup_pypi_proxy)
+
+
+def configure_pypi_proxy_pip(args, profile, pypi_endpoint, pypi_hostname): # type: (EnvironmentConfig, HostProfile, str, str) -> None
+ """Configure a custom index for pip based installs."""
+ pip_conf_path = os.path.expanduser('~/.pip/pip.conf')
+ pip_conf = '''
+[global]
+index-url = {0}
+trusted-host = {1}
+'''.format(pypi_endpoint, pypi_hostname).strip()
+
+ def pip_conf_cleanup(): # type: () -> None
+ """Remove custom pip PyPI config."""
+ display.info('Removing custom PyPI config: %s' % pip_conf_path, verbosity=1)
+ os.remove(pip_conf_path)
+
+ if os.path.exists(pip_conf_path) and not profile.config.is_managed:
+ raise ApplicationError('Refusing to overwrite existing file: %s' % pip_conf_path)
+
+ display.info('Injecting custom PyPI config: %s' % pip_conf_path, verbosity=1)
+ display.info('Config: %s\n%s' % (pip_conf_path, pip_conf), verbosity=3)
+
+ if not args.explain:
+ write_text_file(pip_conf_path, pip_conf, True)
+ atexit.register(pip_conf_cleanup)
+
+
+def configure_pypi_proxy_easy_install(args, profile, pypi_endpoint): # type: (EnvironmentConfig, HostProfile, str) -> None
+ """Configure a custom index for easy_install based installs."""
+ pydistutils_cfg_path = os.path.expanduser('~/.pydistutils.cfg')
+ pydistutils_cfg = '''
+[easy_install]
+index_url = {0}
+'''.format(pypi_endpoint).strip()
+
+ if os.path.exists(pydistutils_cfg_path) and not profile.config.is_managed:
+ raise ApplicationError('Refusing to overwrite existing file: %s' % pydistutils_cfg_path)
+
+ def pydistutils_cfg_cleanup(): # type: () -> None
+ """Remove custom PyPI config."""
+ display.info('Removing custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
+ os.remove(pydistutils_cfg_path)
+
+ display.info('Injecting custom PyPI config: %s' % pydistutils_cfg_path, verbosity=1)
+ display.info('Config: %s\n%s' % (pydistutils_cfg_path, pydistutils_cfg), verbosity=3)
+
+ if not args.explain:
+ write_text_file(pydistutils_cfg_path, pydistutils_cfg, True)
+ atexit.register(pydistutils_cfg_cleanup)
diff --git a/test/lib/ansible_test/_internal/python_requirements.py b/test/lib/ansible_test/_internal/python_requirements.py
new file mode 100644
index 0000000000..8fca783407
--- /dev/null
+++ b/test/lib/ansible_test/_internal/python_requirements.py
@@ -0,0 +1,482 @@
+"""Python requirements management"""
+from __future__ import annotations
+
+import base64
+import dataclasses
+import json
+import os
+import re
+import typing as t
+
+from .constants import (
+ COVERAGE_REQUIRED_VERSION,
+)
+
+from .encoding import (
+ to_text,
+ to_bytes,
+)
+
+from .io import (
+ read_text_file,
+)
+
+from .util import (
+ ANSIBLE_TEST_DATA_ROOT,
+ ANSIBLE_TEST_TARGET_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
+ SubprocessError,
+ display,
+ find_executable,
+ raw_command,
+ str_to_version,
+ version_to_str,
+)
+
+from .util_common import (
+ check_pyyaml,
+ create_result_directories,
+)
+
+from .config import (
+ EnvironmentConfig,
+ IntegrationConfig,
+ UnitsConfig,
+)
+
+from .data import (
+ data_context,
+)
+
+from .host_configs import (
+ PosixConfig,
+ PythonConfig,
+)
+
+from .connections import (
+ LocalConnection,
+ Connection,
+)
+
+QUIET_PIP_SCRIPT_PATH = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'quiet_pip.py')
+REQUIREMENTS_SCRIPT_PATH = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'requirements.py')
+
+
+# Pip Abstraction
+
+
+@dataclasses.dataclass(frozen=True)
+class PipCommand:
+ """Base class for pip commands."""""
+
+ def serialize(self): # type: () -> t.Tuple[str, t.Dict[str, t.Any]]
+ """Return a serialized representation of this command."""
+ name = type(self).__name__[3:].lower()
+ return name, self.__dict__
+
+
+@dataclasses.dataclass(frozen=True)
+class PipInstall(PipCommand):
+ """Details required to perform a pip install."""
+ requirements: t.List[t.Tuple[str, str]]
+ constraints: t.List[t.Tuple[str, str]]
+ packages: t.List[str]
+
+ def has_package(self, name): # type: (str) -> bool
+ """Return True if the specified package will be installed, otherwise False."""
+ name = name.lower()
+
+ return (any(name in package.lower() for package in self.packages) or
+ any(name in contents.lower() for path, contents in self.requirements))
+
+
+@dataclasses.dataclass(frozen=True)
+class PipUninstall(PipCommand):
+ """Details required to perform a pip uninstall."""
+ packages: t.List[str]
+ ignore_errors: bool
+
+
+# Entry Points
+
+
+def install_requirements(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ ansible=False, # type: bool
+ command=False, # type: bool
+ coverage=False, # type: bool
+ virtualenv=False, # type: bool
+ connection=None, # type: t.Optional[Connection]
+): # type: (...) -> None
+ """Install requirements for the given Python using the specified arguments."""
+ create_result_directories(args)
+
+ controller = not connection
+
+ if not requirements_allowed(args, controller):
+ return
+
+ if command and isinstance(args, (UnitsConfig, IntegrationConfig)) and args.coverage:
+ coverage = True
+
+ cryptography = False
+
+ if ansible:
+ try:
+ ansible_cache = install_requirements.ansible_cache
+ except AttributeError:
+ ansible_cache = install_requirements.ansible_cache = {}
+
+ ansible_installed = ansible_cache.get(python.path)
+
+ if ansible_installed:
+ ansible = False
+ else:
+ ansible_cache[python.path] = True
+
+ # Install the latest cryptography version that the current requirements can support if it is not already available.
+ # This avoids downgrading cryptography when OS packages provide a newer version than we are able to install using pip.
+ # If not installed here, later install commands may try to install a version of cryptography which cannot be installed.
+ cryptography = not is_cryptography_available(python.path)
+
+ commands = collect_requirements(
+ python=python,
+ controller=controller,
+ ansible=ansible,
+ cryptography=cryptography,
+ command=args.command if command else None,
+ coverage=coverage,
+ virtualenv=virtualenv,
+ minimize=False,
+ sanity=None,
+ )
+
+ if not commands:
+ return
+
+ run_pip(args, python, commands, connection)
+
+ if any(isinstance(command, PipInstall) and command.has_package('pyyaml') for command in commands):
+ check_pyyaml(python)
+
+
+def collect_requirements(
+ python, # type: PythonConfig
+ controller, # type: bool
+ ansible, # type: bool
+ cryptography, # type: bool
+ coverage, # type: bool
+ virtualenv, # type: bool
+ minimize, # type: bool
+ command, # type: t.Optional[str]
+ sanity, # type: t.Optional[str]
+): # type: (...) -> t.List[PipCommand]
+ """Collect requirements for the given Python using the specified arguments."""
+ commands = [] # type: t.List[PipCommand]
+
+ if virtualenv:
+ commands.extend(collect_package_install(packages=['virtualenv']))
+
+ if coverage:
+ commands.extend(collect_package_install(packages=[f'coverage=={COVERAGE_REQUIRED_VERSION}'], constraints=False))
+
+ if cryptography:
+ commands.extend(collect_package_install(packages=get_cryptography_requirements(python)))
+
+ if ansible or command:
+ commands.extend(collect_general_install(command, ansible))
+
+ if sanity:
+ commands.extend(collect_sanity_install(sanity))
+
+ if command == 'units':
+ commands.extend(collect_units_install())
+
+ if command in ('integration', 'windows-integration', 'network-integration'):
+ commands.extend(collect_integration_install(command, controller))
+
+ if minimize:
+ # In some environments pkg_resources is installed as a separate pip package which needs to be removed.
+ # For example, using Python 3.8 on Ubuntu 18.04 a virtualenv is created with only pip and setuptools.
+ # However, a venv is created with an additional pkg-resources package which is independent of setuptools.
+ # Making sure pkg-resources is removed preserves the import test consistency between venv and virtualenv.
+ # Additionally, in the above example, the pyparsing package vendored with pkg-resources is out-of-date and generates deprecation warnings.
+ # Thus it is important to remove pkg-resources to prevent system installed packages from generating deprecation warnings.
+ commands.extend(collect_uninstall(packages=['pkg-resources'], ignore_errors=True))
+ commands.extend(collect_uninstall(packages=['setuptools', 'pip']))
+
+ return commands
+
+
+def run_pip(
+ args, # type: EnvironmentConfig
+ python, # type: PythonConfig
+ commands, # type: t.List[PipCommand]
+ connection, # type: t.Optional[Connection]
+): # type: (...) -> None
+ """Run the specified pip commands for the given Python, and optionally the specified host."""
+ connection = connection or LocalConnection(args)
+ script = prepare_pip_script(commands)
+
+ if not args.explain:
+ connection.run([python.path], data=script)
+
+
+# Collect
+
+
+def collect_general_install(
+ command=None, # type: t.Optional[str]
+ ansible=False, # type: bool
+): # type: (...) -> t.List[PipInstall]
+ """Return details necessary for the specified general-purpose pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ if ansible:
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'ansible.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ if command:
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', f'{command}.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_package_install(packages, constraints=True): # type: (t.List[str], bool) -> t.List[PipInstall]
+ """Return the details necessary to install the specified packages."""
+ return collect_install([], [], packages, constraints=constraints)
+
+
+def collect_sanity_install(sanity): # type: (str) -> t.List[PipInstall]
+ """Return the details necessary for the specified sanity pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', f'sanity.{sanity}.txt')
+ requirements_paths.append((ANSIBLE_TEST_DATA_ROOT, path))
+
+ if data_context().content.is_ansible:
+ path = os.path.join(data_context().content.sanity_path, 'code-smell', f'{sanity}.requirements.txt')
+ requirements_paths.append((data_context().content.root, path))
+
+ return collect_install(requirements_paths, constraints_paths, constraints=False)
+
+
+def collect_units_install(): # type: () -> t.List[PipInstall]
+ """Return details necessary for the specified units pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ path = os.path.join(data_context().content.unit_path, 'requirements.txt')
+ requirements_paths.append((data_context().content.root, path))
+
+ path = os.path.join(data_context().content.unit_path, 'constraints.txt')
+ constraints_paths.append((data_context().content.root, path))
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_integration_install(command, controller): # type: (str, bool) -> t.List[PipInstall]
+ """Return details necessary for the specified integration pip install(s)."""
+ requirements_paths = [] # type: t.List[t.Tuple[str, str]]
+ constraints_paths = [] # type: t.List[t.Tuple[str, str]]
+
+ # Support for prefixed files was added to ansible-test in ansible-core 2.12 when split controller/target testing was implemented.
+ # Previous versions of ansible-test only recognize non-prefixed files.
+ # If a prefixed file exists (even if empty), it takes precedence over the non-prefixed file.
+ prefixes = ('controller.' if controller else 'target.', '')
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{prefix}requirements.txt')
+
+ if os.path.exists(path):
+ requirements_paths.append((data_context().content.root, path))
+ break
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{command}.{prefix}requirements.txt')
+
+ if os.path.exists(path):
+ requirements_paths.append((data_context().content.root, path))
+ break
+
+ for prefix in prefixes:
+ path = os.path.join(data_context().content.integration_path, f'{prefix}constraints.txt')
+
+ if os.path.exists(path):
+ constraints_paths.append((data_context().content.root, path))
+ break
+
+ return collect_install(requirements_paths, constraints_paths)
+
+
+def collect_install(
+ requirements_paths, # type: t.List[t.Tuple[str, str]]
+ constraints_paths, # type: t.List[t.Tuple[str, str]]
+ packages=None, # type: t.Optional[t.List[str]]
+ constraints=True, # type: bool
+) -> t.List[PipInstall]:
+ """Build a pip install list from the given requirements, constraints and packages."""
+ # listing content constraints first gives them priority over constraints provided by ansible-test
+ constraints_paths = list(constraints_paths)
+
+ if constraints:
+ constraints_paths.append((ANSIBLE_TEST_DATA_ROOT, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'constraints.txt')))
+
+ requirements = [(os.path.relpath(path, root), read_text_file(path)) for root, path in requirements_paths if usable_pip_file(path)]
+ constraints = [(os.path.relpath(path, root), read_text_file(path)) for root, path in constraints_paths if usable_pip_file(path)]
+ packages = packages or []
+
+ if requirements or packages:
+ installs = [PipInstall(
+ requirements=requirements,
+ constraints=constraints,
+ packages=packages,
+ )]
+ else:
+ installs = []
+
+ return installs
+
+
+def collect_uninstall(packages, ignore_errors=False): # type: (t.List[str], bool) -> t.List[PipUninstall]
+ """Return the details necessary for the specified pip uninstall."""
+ uninstall = PipUninstall(
+ packages=packages,
+ ignore_errors=ignore_errors,
+ )
+
+ return [uninstall]
+
+
+# Support
+
+
+def requirements_allowed(args, controller): # type: (EnvironmentConfig, bool) -> bool
+ """
+ Return True if requirements can be installed, otherwise return False.
+
+ Requirements are only allowed if one of the following conditions is met:
+
+ The user specified --requirements manually.
+ The install will occur on the controller and the controller or controller Python is managed by ansible-test.
+ The install will occur on the target and the target or target Python is managed by ansible-test.
+ """
+ if args.requirements:
+ return True
+
+ if controller:
+ return args.controller.is_managed or args.controller.python.is_managed
+
+ target = args.only_targets(PosixConfig)[0]
+
+ return target.is_managed or target.python.is_managed
+
+
+def prepare_pip_script(commands): # type: (t.List[PipCommand]) -> str
+ """Generate a Python script to perform the requested pip commands."""
+ data = [command.serialize() for command in commands]
+
+ display.info(f'>>> Requirements Commands\n{json.dumps(data, indent=4)}', verbosity=3)
+
+ args = dict(
+ script=read_text_file(QUIET_PIP_SCRIPT_PATH),
+ verbosity=display.verbosity,
+ commands=data,
+ )
+
+ payload = to_text(base64.b64encode(to_bytes(json.dumps(args))))
+ path = REQUIREMENTS_SCRIPT_PATH
+ template = read_text_file(path)
+ script = template.format(payload=payload)
+
+ display.info(f'>>> Python Script from Template ({path})\n{script.strip()}', verbosity=4)
+
+ return script
+
+
+def usable_pip_file(path): # type: (t.Optional[str]) -> bool
+ """Return True if the specified pip file is usable, otherwise False."""
+ return path and os.path.exists(path) and os.path.getsize(path)
+
+
+# Cryptography
+
+
+def is_cryptography_available(python): # type: (str) -> bool
+ """Return True if cryptography is available for the given python."""
+ try:
+ raw_command([python, '-c', 'import cryptography'], capture=True)
+ except SubprocessError:
+ return False
+
+ return True
+
+
+def get_cryptography_requirements(python): # type: (PythonConfig) -> t.List[str]
+ """
+ Return the correct cryptography and pyopenssl requirements for the given python version.
+ The version of cryptography installed depends on the python version and openssl version.
+ """
+ openssl_version = get_openssl_version(python)
+
+ if openssl_version and openssl_version < (1, 1, 0):
+ # cryptography 3.2 requires openssl 1.1.x or later
+ # see https://cryptography.io/en/latest/changelog.html#v3-2
+ cryptography = 'cryptography < 3.2'
+ # pyopenssl 20.0.0 requires cryptography 3.2 or later
+ pyopenssl = 'pyopenssl < 20.0.0'
+ else:
+ # cryptography 3.4+ fails to install on many systems
+ # this is a temporary work-around until a more permanent solution is available
+ cryptography = 'cryptography < 3.4'
+ # no specific version of pyopenssl required, don't install it
+ pyopenssl = None
+
+ requirements = [
+ cryptography,
+ pyopenssl,
+ ]
+
+ requirements = [requirement for requirement in requirements if requirement]
+
+ return requirements
+
+
+def get_openssl_version(python): # type: (PythonConfig) -> t.Optional[t.Tuple[int, ...]]
+ """Return the openssl version."""
+ if not python.version.startswith('2.'):
+ # OpenSSL version checking only works on Python 3.x.
+ # This should be the most accurate, since it is the Python we will be using.
+ version = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'sslcheck.py')], capture=True)[0])['version']
+
+ if version:
+ display.info(f'Detected OpenSSL version {version_to_str(version)} under Python {python.version}.', verbosity=1)
+
+ return tuple(version)
+
+ # Fall back to detecting the OpenSSL version from the CLI.
+ # This should provide an adequate solution on Python 2.x.
+ openssl_path = find_executable('openssl', required=False)
+
+ if openssl_path:
+ try:
+ result = raw_command([openssl_path, 'version'], capture=True)[0]
+ except SubprocessError:
+ result = ''
+
+ match = re.search(r'^OpenSSL (?P<version>[0-9]+\.[0-9]+\.[0-9]+)', result)
+
+ if match:
+ version = str_to_version(match.group('version'))
+
+ display.info(f'Detected OpenSSL version {version_to_str(version)} using the openssl CLI.', verbosity=1)
+
+ return version
+
+ display.info('Unable to detect OpenSSL version.', verbosity=1)
+
+ return None
diff --git a/test/lib/ansible_test/_internal/ssh.py b/test/lib/ansible_test/_internal/ssh.py
index ed246ea639..21212dc1aa 100644
--- a/test/lib/ansible_test/_internal/ssh.py
+++ b/test/lib/ansible_test/_internal/ssh.py
@@ -1,14 +1,14 @@
"""High level functions for working with SSH."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
+import dataclasses
import json
import os
import random
import re
import subprocess
-
-from . import types as t
+import shlex
+import typing as t
from .encoding import (
to_bytes,
@@ -17,9 +17,7 @@ from .encoding import (
from .util import (
ApplicationError,
- cmd_quote,
common_environment,
- devnull,
display,
exclude_none_values,
sanitize_host_name,
@@ -30,24 +28,19 @@ from .config import (
)
+@dataclasses.dataclass
class SshConnectionDetail:
"""Information needed to establish an SSH connection to a host."""
- def __init__(self,
- name, # type: str
- host, # type: str
- port, # type: t.Optional[int]
- user, # type: str
- identity_file, # type: str
- python_interpreter=None, # type: t.Optional[str]
- shell_type=None, # type: t.Optional[str]
- ): # type: (...) -> None
- self.name = sanitize_host_name(name)
- self.host = host
- self.port = port
- self.user = user
- self.identity_file = identity_file
- self.python_interpreter = python_interpreter
- self.shell_type = shell_type
+ name: str
+ host: str
+ port: t.Optional[int]
+ user: str
+ identity_file: str
+ python_interpreter: t.Optional[str] = None
+ shell_type: t.Optional[str] = None
+
+ def __post_init__(self):
+ self.name = sanitize_host_name(self.name)
class SshProcess:
@@ -183,7 +176,7 @@ def run_ssh_command(
cmd = create_ssh_command(ssh, options, cli_args, command)
env = common_environment()
- cmd_show = ' '.join([cmd_quote(c) for c in cmd])
+ cmd_show = ' '.join([shlex.quote(c) for c in cmd])
display.info('Run background command: %s' % cmd_show, verbosity=1, truncate=True)
cmd_bytes = [to_bytes(c) for c in cmd]
@@ -193,7 +186,7 @@ def run_ssh_command(
process = SshProcess(None)
else:
process = SshProcess(subprocess.Popen(cmd_bytes, env=env_bytes, bufsize=-1, # pylint: disable=consider-using-with
- stdin=devnull(), stdout=subprocess.PIPE, stderr=subprocess.PIPE))
+ stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
return process
diff --git a/test/lib/ansible_test/_internal/target.py b/test/lib/ansible_test/_internal/target.py
index 829b489323..09efd1e1ac 100644
--- a/test/lib/ansible_test/_internal/target.py
+++ b/test/lib/ansible_test/_internal/target.py
@@ -1,14 +1,13 @@
"""Test target identification, iteration and inclusion/exclusion."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import collections
+import enum
import os
import re
import itertools
import abc
-
-from . import types as t
+import typing as t
from .encoding import (
to_bytes,
@@ -28,6 +27,7 @@ from .util import (
from .data import (
data_context,
+ content_plugins,
)
MODULE_EXTENSIONS = '.py', '.ps1'
@@ -45,16 +45,16 @@ except AttributeError:
TIntegrationTarget = None # pylint: disable=invalid-name
-def find_target_completion(target_func, prefix):
+def find_target_completion(target_func, prefix, short):
"""
:type target_func: () -> collections.Iterable[CompletionTarget]
:type prefix: unicode
+ :type short: bool
:rtype: list[str]
"""
try:
targets = target_func()
- short = os.environ.get('COMP_TYPE') == '63' # double tab completion from bash
- matches = walk_completion_targets(targets, prefix, short)
+ matches = list(walk_completion_targets(targets, prefix, short))
return matches
except Exception as ex: # pylint: disable=locally-disabled, broad-except
return [u'%s' % ex]
@@ -95,14 +95,14 @@ def walk_internal_targets(targets, includes=None, excludes=None, requires=None):
"""
targets = tuple(targets)
- include_targets = sorted(filter_targets(targets, includes, errors=True, directories=False), key=lambda include_target: include_target.name)
+ include_targets = sorted(filter_targets(targets, includes, directories=False), key=lambda include_target: include_target.name)
if requires:
- require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
+ require_targets = set(filter_targets(targets, requires, directories=False))
include_targets = [require_target for require_target in include_targets if require_target in require_targets]
if excludes:
- list(filter_targets(targets, excludes, errors=True, include=False, directories=False))
+ list(filter_targets(targets, excludes, include=False, directories=False))
internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False))
return tuple(sorted(internal_targets, key=lambda sort_target: sort_target.name))
@@ -453,10 +453,8 @@ def analyze_integration_target_dependencies(integration_targets):
return dependencies
-class CompletionTarget:
+class CompletionTarget(metaclass=abc.ABCMeta):
"""Command-line argument completion target base class."""
- __metaclass__ = abc.ABCMeta
-
def __init__(self):
self.name = None
self.path = None
@@ -496,7 +494,7 @@ class DirectoryTarget(CompletionTarget):
:type path: str
:type modules: tuple[str]
"""
- super(DirectoryTarget, self).__init__()
+ super().__init__()
self.name = path
self.path = path
@@ -513,7 +511,7 @@ class TestTarget(CompletionTarget):
:type base_path: str
:type symlink: bool | None
"""
- super(TestTarget, self).__init__()
+ super().__init__()
if symlink is None:
symlink = os.path.islink(to_bytes(path.rstrip(os.path.sep)))
@@ -544,6 +542,67 @@ class TestTarget(CompletionTarget):
self.aliases = tuple(sorted(aliases))
+class IntegrationTargetType(enum.Enum):
+ """Type of integration test target."""
+ CONTROLLER = enum.auto()
+ TARGET = enum.auto()
+ UNKNOWN = enum.auto()
+ CONFLICT = enum.auto()
+
+
+def extract_plugin_references(name, aliases): # type: (str, t.List[str]) -> t.List[t.Tuple[str, str]]
+ """Return a list of plugin references found in the given integration test target name and aliases."""
+ plugins = content_plugins()
+ found = [] # type: t.List[t.Tuple[str, str]]
+
+ for alias in [name] + aliases:
+ plugin_type = 'modules'
+ plugin_name = alias
+
+ if plugin_name in plugins.get(plugin_type, {}):
+ found.append((plugin_type, plugin_name))
+
+ parts = alias.split('_')
+
+ for type_length in (1, 2):
+ if len(parts) > type_length:
+ plugin_type = '_'.join(parts[:type_length])
+ plugin_name = '_'.join(parts[type_length:])
+
+ if plugin_name in plugins.get(plugin_type, {}):
+ found.append((plugin_type, plugin_name))
+
+ return found
+
+
+def categorize_integration_test(name, aliases, force_target): # type: (str, t.List[str], bool) -> t.Tuple[IntegrationTargetType, IntegrationTargetType]
+ """Return the integration test target types (used and actual) based on the given target name and aliases."""
+ context_controller = f'context/{IntegrationTargetType.CONTROLLER.name.lower()}' in aliases
+ context_target = f'context/{IntegrationTargetType.TARGET.name.lower()}' in aliases or force_target
+ actual_type = None
+ strict_mode = data_context().content.is_ansible
+
+ if context_controller and context_target:
+ target_type = IntegrationTargetType.CONFLICT
+ elif context_controller and not context_target:
+ target_type = IntegrationTargetType.CONTROLLER
+ elif context_target and not context_controller:
+ target_type = IntegrationTargetType.TARGET
+ else:
+ target_types = {IntegrationTargetType.TARGET if plugin_type in ('modules', 'module_utils') else IntegrationTargetType.CONTROLLER
+ for plugin_type, plugin_name in extract_plugin_references(name, aliases)}
+
+ if len(target_types) == 1:
+ target_type = target_types.pop()
+ elif not target_types:
+ actual_type = IntegrationTargetType.UNKNOWN
+ target_type = actual_type if strict_mode else IntegrationTargetType.TARGET
+ else:
+ target_type = IntegrationTargetType.CONFLICT
+
+ return target_type, actual_type or target_type
+
+
class IntegrationTarget(CompletionTarget):
"""Integration test target."""
non_posix = frozenset((
@@ -564,7 +623,7 @@ class IntegrationTarget(CompletionTarget):
:type modules: frozenset[str]
:type prefixes: dict[str, str]
"""
- super(IntegrationTarget, self).__init__()
+ super().__init__()
self.relative_path = os.path.relpath(path, data_context().content.integration_targets_path)
self.name = self.relative_path.replace(os.path.sep, '.')
@@ -665,6 +724,24 @@ class IntegrationTarget(CompletionTarget):
if not any(g in self.non_posix for g in groups):
groups.append('posix')
+ # target type
+
+ # targets which are non-posix test against the target, even if they also support posix
+ force_target = any(group in self.non_posix for group in groups)
+
+ target_type, actual_type = categorize_integration_test(self.name, list(static_aliases), force_target)
+
+ self._remove_group(groups, 'context')
+
+ groups.extend(['context/', f'context/{target_type.name.lower()}'])
+
+ if target_type != actual_type:
+ # allow users to query for the actual type
+ groups.extend(['context/', f'context/{actual_type.name.lower()}'])
+
+ self.target_type = target_type
+ self.actual_type = actual_type
+
# aliases
aliases = [self.name] + \
@@ -682,6 +759,10 @@ class IntegrationTarget(CompletionTarget):
self.setup_always = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('setup/always/'))))
self.needs_target = tuple(sorted(set(g.split('/')[2] for g in groups if g.startswith('needs/target/'))))
+ @staticmethod
+ def _remove_group(groups, group):
+ return [g for g in groups if g != group and not g.startswith('%s/' % group)]
+
class TargetPatternsNotMatched(ApplicationError):
"""One or more targets were not matched when a match was required."""
@@ -696,4 +777,4 @@ class TargetPatternsNotMatched(ApplicationError):
else:
message = 'Target pattern not matched: %s' % self.patterns[0]
- super(TargetPatternsNotMatched, self).__init__(message)
+ super().__init__(message)
diff --git a/test/lib/ansible_test/_internal/test.py b/test/lib/ansible_test/_internal/test.py
index 952bc28df0..e2a18b655b 100644
--- a/test/lib/ansible_test/_internal/test.py
+++ b/test/lib/ansible_test/_internal/test.py
@@ -1,11 +1,9 @@
"""Classes for storing and processing test results."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import datetime
import re
-
-from . import types as t
+import typing as t
from .util import (
display,
@@ -22,6 +20,8 @@ from .config import (
TestConfig,
)
+from . import junit_xml
+
def calculate_best_confidence(choices, metadata):
"""
@@ -79,17 +79,8 @@ class TestResult:
if self.python_version:
self.name += '-python-%s' % self.python_version
- try:
- import junit_xml
- except ImportError:
- junit_xml = None
-
- self.junit = junit_xml
-
- def write(self, args):
- """
- :type args: TestConfig
- """
+ def write(self, args): # type: (TestConfig) -> None
+ """Write the test results to various locations."""
self.write_console()
self.write_bot(args)
@@ -97,10 +88,7 @@ class TestResult:
self.write_lint()
if args.junit:
- if self.junit:
- self.write_junit(args)
- else:
- display.warning('Skipping junit xml output because the `junit-xml` python package was not found.', unique=True)
+ self.write_junit(args)
def write_console(self):
"""Write results to console."""
@@ -135,32 +123,19 @@ class TestResult:
return name
- def save_junit(self, args, test_case, properties=None):
- """
- :type args: TestConfig
- :type test_case: junit_xml.TestCase
- :type properties: dict[str, str] | None
- :rtype: str | None
- """
- test_suites = [
- self.junit.TestSuite(
- name='ansible-test',
- test_cases=[test_case],
- timestamp=datetime.datetime.utcnow().replace(microsecond=0).isoformat(),
- properties=properties,
- ),
- ]
-
- # the junit_xml API is changing in version 2.0.0
- # TestSuite.to_xml_string is being replaced with to_xml_report_string
- # see: https://github.com/kyrus/python-junit-xml/blob/63db26da353790500642fd02cae1543eb41aab8b/junit_xml/__init__.py#L249-L261
- try:
- to_xml_string = self.junit.to_xml_report_string
- except AttributeError:
- # noinspection PyDeprecation
- to_xml_string = self.junit.TestSuite.to_xml_string
-
- report = to_xml_string(test_suites=test_suites, prettyprint=True, encoding='utf-8')
+ def save_junit(self, args, test_case): # type: (TestConfig, junit_xml.TestCase) -> None
+ """Save the given test case results to disk as JUnit XML."""
+ suites = junit_xml.TestSuites(
+ suites=[
+ junit_xml.TestSuite(
+ name='ansible-test',
+ cases=[test_case],
+ timestamp=datetime.datetime.utcnow(),
+ ),
+ ],
+ )
+
+ report = suites.to_pretty_xml()
if args.explain:
return
@@ -174,7 +149,7 @@ class TestTimeout(TestResult):
"""
:type timeout_duration: int
"""
- super(TestTimeout, self).__init__(command='timeout', test='')
+ super().__init__(command='timeout', test='')
self.timeout_duration = timeout_duration
@@ -198,21 +173,31 @@ One or more of the following situations may be responsible:
output += '\n\nConsult the console log for additional details on where the timeout occurred.'
- timestamp = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
+ timestamp = datetime.datetime.utcnow()
+
+ suites = junit_xml.TestSuites(
+ suites=[
+ junit_xml.TestSuite(
+ name='ansible-test',
+ timestamp=timestamp,
+ cases=[
+ junit_xml.TestCase(
+ name='timeout',
+ classname='timeout',
+ errors=[
+ junit_xml.TestError(
+ message=message,
+ ),
+ ],
+ ),
+ ],
+ )
+ ],
+ )
- # hack to avoid requiring junit-xml, which may not be pre-installed outside our test containers
- xml = '''
-<?xml version="1.0" encoding="utf-8"?>
-<testsuites disabled="0" errors="1" failures="0" tests="1" time="0.0">
-\t<testsuite disabled="0" errors="1" failures="0" file="None" log="None" name="ansible-test" skipped="0" tests="1" time="0" timestamp="%s" url="None">
-\t\t<testcase classname="timeout" name="timeout">
-\t\t\t<error message="%s" type="error">%s</error>
-\t\t</testcase>
-\t</testsuite>
-</testsuites>
-''' % (timestamp, message, output)
+ report = suites.to_pretty_xml()
- write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), xml.lstrip())
+ write_text_test_results(ResultType.JUNIT, self.create_result_name('.xml'), report)
class TestSuccess(TestResult):
@@ -221,7 +206,7 @@ class TestSuccess(TestResult):
"""
:type args: TestConfig
"""
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
+ test_case = junit_xml.TestCase(classname=self.command, name=self.name)
self.save_junit(args, test_case)
@@ -234,7 +219,7 @@ class TestSkipped(TestResult):
:type test: str
:type python_version: str
"""
- super(TestSkipped, self).__init__(command, test, python_version)
+ super().__init__(command, test, python_version)
self.reason = None # type: t.Optional[str]
@@ -249,8 +234,11 @@ class TestSkipped(TestResult):
"""
:type args: TestConfig
"""
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
- test_case.add_skipped_info(self.reason or 'No tests applicable.')
+ test_case = junit_xml.TestCase(
+ classname=self.command,
+ name=self.name,
+ skipped=self.reason or 'No tests applicable.',
+ )
self.save_junit(args, test_case)
@@ -265,7 +253,7 @@ class TestFailure(TestResult):
:type messages: list[TestMessage] | None
:type summary: unicode | None
"""
- super(TestFailure, self).__init__(command, test, python_version)
+ super().__init__(command, test, python_version)
if messages:
messages = sorted(messages)
@@ -282,7 +270,7 @@ class TestFailure(TestResult):
if args.metadata.changes:
self.populate_confidence(args.metadata)
- super(TestFailure, self).write(args)
+ super().write(args)
def write_console(self):
"""Write results to console."""
@@ -322,11 +310,16 @@ class TestFailure(TestResult):
title = self.format_title()
output = self.format_block()
- test_case = self.junit.TestCase(classname=self.command, name=self.name)
-
- # Include a leading newline to improve readability on Shippable "Tests" tab.
- # Without this, the first line becomes indented.
- test_case.add_failure_info(message=title, output='\n%s' % output)
+ test_case = junit_xml.TestCase(
+ classname=self.command,
+ name=self.name,
+ failures=[
+ junit_xml.TestFailure(
+ message=title,
+ output=output,
+ ),
+ ],
+ )
self.save_junit(args, test_case)
diff --git a/test/lib/ansible_test/_internal/thread.py b/test/lib/ansible_test/_internal/thread.py
index 49fbc1baff..3dfc16b8ce 100644
--- a/test/lib/ansible_test/_internal/thread.py
+++ b/test/lib/ansible_test/_internal/thread.py
@@ -1,16 +1,14 @@
"""Python threading tools."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import threading
+import functools
import sys
+import threading
+import queue
+import typing as t
-try:
- # noinspection PyPep8Naming
- import Queue as queue
-except ImportError:
- # noinspection PyUnresolvedReferences
- import queue # pylint: disable=locally-disabled, import-error
+
+TCallable = t.TypeVar('TCallable', bound=t.Callable)
class WrappedThread(threading.Thread):
@@ -19,8 +17,7 @@ class WrappedThread(threading.Thread):
"""
:type action: () -> any
"""
- # noinspection PyOldStyleClasses
- super(WrappedThread, self).__init__()
+ super().__init__()
self._result = queue.Queue()
self.action = action
self.result = None
@@ -55,3 +52,16 @@ class WrappedThread(threading.Thread):
self.result = result
return result
+
+
+def mutex(func): # type: (TCallable) -> TCallable
+ """Enforce exclusive access on a decorated function."""
+ lock = threading.Lock()
+
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ """Wrapper around `func` which uses a lock to provide exclusive access to the function."""
+ with lock:
+ return func(*args, **kwargs)
+
+ return wrapper
diff --git a/test/lib/ansible_test/_internal/timeout.py b/test/lib/ansible_test/_internal/timeout.py
new file mode 100644
index 0000000000..c255f5ce9f
--- /dev/null
+++ b/test/lib/ansible_test/_internal/timeout.py
@@ -0,0 +1,93 @@
+"""Timeout management for tests."""
+from __future__ import annotations
+
+import datetime
+import functools
+import os
+import signal
+import time
+import typing as t
+
+from .io import (
+ read_json_file,
+)
+
+from .config import (
+ CommonConfig,
+ TestConfig,
+)
+
+from .util import (
+ display,
+ ApplicationError,
+)
+
+from .thread import (
+ WrappedThread,
+)
+
+from .constants import (
+ TIMEOUT_PATH,
+)
+
+from .test import (
+ TestTimeout,
+)
+
+
+def get_timeout(): # type: () -> t.Optional[t.Dict[str, t.Any]]
+ """Return details about the currently set timeout, if any, otherwise return None."""
+ if not os.path.exists(TIMEOUT_PATH):
+ return None
+
+ data = read_json_file(TIMEOUT_PATH)
+ data['deadline'] = datetime.datetime.strptime(data['deadline'], '%Y-%m-%dT%H:%M:%SZ')
+
+ return data
+
+
+def configure_timeout(args): # type: (CommonConfig) -> None
+ """Configure the timeout."""
+ if isinstance(args, TestConfig):
+ configure_test_timeout(args) # only tests are subject to the timeout
+
+
+def configure_test_timeout(args): # type: (TestConfig) -> None
+ """Configure the test timeout."""
+ timeout = get_timeout()
+
+ if not timeout:
+ return
+
+ timeout_start = datetime.datetime.utcnow()
+ timeout_duration = timeout['duration']
+ timeout_deadline = timeout['deadline']
+ timeout_remaining = timeout_deadline - timeout_start
+
+ test_timeout = TestTimeout(timeout_duration)
+
+ if timeout_remaining <= datetime.timedelta():
+ test_timeout.write(args)
+
+ raise ApplicationError('The %d minute test timeout expired %s ago at %s.' % (
+ timeout_duration, timeout_remaining * -1, timeout_deadline))
+
+ display.info('The %d minute test timeout expires in %s at %s.' % (
+ timeout_duration, timeout_remaining, timeout_deadline), verbosity=1)
+
+ def timeout_handler(_dummy1, _dummy2):
+ """Runs when SIGUSR1 is received."""
+ test_timeout.write(args)
+
+ raise ApplicationError('Tests aborted after exceeding the %d minute time limit.' % timeout_duration)
+
+ def timeout_waiter(timeout_seconds): # type: (int) -> None
+ """Background thread which will kill the current process if the timeout elapses."""
+ time.sleep(timeout_seconds)
+ os.kill(os.getpid(), signal.SIGUSR1)
+
+ signal.signal(signal.SIGUSR1, timeout_handler)
+
+ instance = WrappedThread(functools.partial(timeout_waiter, timeout_remaining.seconds))
+ instance.daemon = True
+ instance.start()
diff --git a/test/lib/ansible_test/_internal/types.py b/test/lib/ansible_test/_internal/types.py
deleted file mode 100644
index 46ef70668e..0000000000
--- a/test/lib/ansible_test/_internal/types.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Import wrapper for type hints when available."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-TYPE_CHECKING = False
-
-try:
- from typing import (
- Any,
- AnyStr,
- BinaryIO,
- Callable,
- Dict,
- FrozenSet,
- Generator,
- IO,
- Iterable,
- Iterator,
- List,
- Optional,
- Pattern,
- Set,
- Text,
- TextIO,
- Tuple,
- Type,
- TYPE_CHECKING,
- TypeVar,
- Union,
- )
-except ImportError:
- pass
diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py
index ebb9a68f31..9771c89b9f 100644
--- a/test/lib/ansible_test/_internal/util.py
+++ b/test/lib/ansible_test/_internal/util.py
@@ -1,8 +1,6 @@
"""Miscellaneous utility functions and classes."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
-import contextlib
import errno
import fcntl
import hashlib
@@ -17,35 +15,14 @@ import stat
import string
import subprocess
import sys
-import tempfile
import time
-import zipfile
+import functools
+import shlex
+import typing as t
from struct import unpack, pack
from termios import TIOCGWINSZ
-try:
- from abc import ABC
-except ImportError:
- from abc import ABCMeta
- ABC = ABCMeta('ABC', (), {})
-
-try:
- # noinspection PyCompatibility
- from configparser import ConfigParser
-except ImportError:
- # noinspection PyCompatibility,PyUnresolvedReferences
- from ConfigParser import SafeConfigParser as ConfigParser
-
-try:
- # noinspection PyProtectedMember
- from shlex import quote as cmd_quote
-except ImportError:
- # noinspection PyProtectedMember
- from pipes import quote as cmd_quote
-
-from . import types as t
-
from .encoding import (
to_bytes,
to_optional_bytes,
@@ -58,11 +35,18 @@ from .io import (
read_text_file,
)
-try:
- C = t.TypeVar('C')
-except AttributeError:
- C = None
+from .thread import (
+ mutex,
+)
+
+from .constants import (
+ SUPPORTED_PYTHON_VERSIONS,
+)
+C = t.TypeVar('C')
+TType = t.TypeVar('TType')
+TKey = t.TypeVar('TKey')
+TValue = t.TypeVar('TValue')
PYTHON_PATHS = {} # type: t.Dict[str, str]
@@ -72,13 +56,6 @@ try:
except AttributeError:
MAXFD = -1
-try:
- TKey = t.TypeVar('TKey')
- TValue = t.TypeVar('TValue')
-except AttributeError:
- TKey = None # pylint: disable=invalid-name
- TValue = None # pylint: disable=invalid-name
-
COVERAGE_CONFIG_NAME = 'coveragerc'
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -119,26 +96,47 @@ MODE_FILE_WRITE = MODE_FILE | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
MODE_DIRECTORY = MODE_READ | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
MODE_DIRECTORY_WRITE = MODE_DIRECTORY | stat.S_IWGRP | stat.S_IWOTH
-CONTROLLER_MIN_PYTHON_VERSION = '3.8'
-
-SUPPORTED_PYTHON_VERSIONS = (
- '2.6',
- '2.7',
- '3.5',
- '3.6',
- '3.7',
- '3.8',
- '3.9',
- '3.10',
-)
+def cache(func): # type: (t.Callable[[], TValue]) -> t.Callable[[], TValue]
+ """Enforce exclusive access on a decorated function and cache the result."""
+ storage = {} # type: t.Dict[None, TValue]
+ sentinel = object()
-def remove_file(path):
- """
- :type path: str
- """
- if os.path.isfile(path):
- os.remove(path)
+ @functools.wraps(func)
+ def cache_func():
+ """Cache the return value from func."""
+ if (value := storage.get(None, sentinel)) is sentinel:
+ value = storage[None] = func()
+
+ return value
+
+ wrapper = mutex(cache_func)
+
+ return wrapper
+
+
+def filter_args(args, filters): # type: (t.List[str], t.Dict[str, int]) -> t.List[str]
+ """Return a filtered version of the given command line arguments."""
+ remaining = 0
+ result = []
+
+ for arg in args:
+ if not arg.startswith('-') and remaining:
+ remaining -= 1
+ continue
+
+ remaining = 0
+
+ parts = arg.split('=', 1)
+ key = parts[0]
+
+ if key in filters:
+ remaining = filters[key] - len(parts) + 1
+ continue
+
+ result.append(arg)
+
+ return result
def read_lines_without_comments(path, remove_blank_lines=False, optional=False): # type: (str, bool, bool) -> t.List[str]
@@ -224,7 +222,7 @@ def find_python(version, path=None, required=True):
:type required: bool
:rtype: str
"""
- version_info = tuple(int(n) for n in version.split('.'))
+ version_info = str_to_version(version)
if not path and version_info == sys.version_info[:len(version_info)]:
python_bin = sys.executable
@@ -234,13 +232,9 @@ def find_python(version, path=None, required=True):
return python_bin
+@cache
def get_ansible_version(): # type: () -> str
"""Return the Ansible version."""
- try:
- return get_ansible_version.version
- except AttributeError:
- pass
-
# ansible may not be in our sys.path
# avoids a symlink to release.py since ansible placement relative to ansible-test may change during delegation
load_module(os.path.join(ANSIBLE_LIB_ROOT, 'release.py'), 'ansible_release')
@@ -248,30 +242,13 @@ def get_ansible_version(): # type: () -> str
# noinspection PyUnresolvedReferences
from ansible_release import __version__ as ansible_version # pylint: disable=import-error
- get_ansible_version.version = ansible_version
-
return ansible_version
+@cache
def get_available_python_versions(): # type: () -> t.Dict[str, str]
"""Return a dictionary indicating which supported Python versions are available."""
- try:
- return get_available_python_versions.result
- except AttributeError:
- pass
-
- get_available_python_versions.result = dict((version, path) for version, path in
- ((version, find_python(version, required=False)) for version in SUPPORTED_PYTHON_VERSIONS) if path)
-
- return get_available_python_versions.result
-
-
-def generate_pip_command(python):
- """
- :type python: str
- :rtype: list[str]
- """
- return [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'quiet_pip.py')]
+ return dict((version, path) for version, path in ((version, find_python(version, required=False)) for version in SUPPORTED_PYTHON_VERSIONS) if path)
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None,
@@ -298,7 +275,7 @@ def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False
cmd = list(cmd)
- escaped_cmd = ' '.join(cmd_quote(c) for c in cmd)
+ escaped_cmd = ' '.join(shlex.quote(c) for c in cmd)
display.info('Run command: %s' % escaped_cmd, verbosity=cmd_verbosity, truncate=True)
display.info('Working directory: %s' % cwd, verbosity=2)
@@ -438,27 +415,6 @@ def pass_vars(required, optional):
return env
-def deepest_path(path_a, path_b):
- """Return the deepest of two paths, or None if the paths are unrelated.
- :type path_a: str
- :type path_b: str
- :rtype: str | None
- """
- if path_a == '.':
- path_a = ''
-
- if path_b == '.':
- path_b = ''
-
- if path_a.startswith(path_b):
- return path_a or '.'
-
- if path_b.startswith(path_a):
- return path_b or '.'
-
- return None
-
-
def remove_tree(path):
"""
:type path: str
@@ -475,7 +431,7 @@ def is_binary_file(path):
:type path: str
:rtype: bool
"""
- assume_text = set([
+ assume_text = {
'.cfg',
'.conf',
'.crt',
@@ -497,9 +453,9 @@ def is_binary_file(path):
'.xml',
'.yaml',
'.yml',
- ])
+ }
- assume_binary = set([
+ assume_binary = {
'.bin',
'.eot',
'.gz',
@@ -515,7 +471,7 @@ def is_binary_file(path):
'.woff',
'.woff2',
'.zip',
- ])
+ }
ext = os.path.splitext(path)[1]
@@ -530,6 +486,11 @@ def is_binary_file(path):
return b'\0' in path_fd.read(4096)
+def generate_name(length=8): # type: (int) -> str
+ """Generate and return a random name."""
+ return ''.join(random.choice(string.ascii_letters + string.digits) for _idx in range(length))
+
+
def generate_password():
"""Generate a random password.
:rtype: str
@@ -686,7 +647,7 @@ class SubprocessError(ApplicationError):
:type runtime: float | None
:type error_callback: t.Optional[t.Callable[[SubprocessError], None]]
"""
- message = 'Command "%s" returned exit status %s.\n' % (' '.join(cmd_quote(c) for c in cmd), status)
+ message = 'Command "%s" returned exit status %s.\n' % (' '.join(shlex.quote(c) for c in cmd), status)
if stderr:
message += '>>> Standard Error\n'
@@ -708,7 +669,7 @@ class SubprocessError(ApplicationError):
self.message = self.message.strip()
- super(SubprocessError, self).__init__(self.message)
+ super().__init__(self.message)
class MissingEnvironmentVariable(ApplicationError):
@@ -717,11 +678,22 @@ class MissingEnvironmentVariable(ApplicationError):
"""
:type name: str
"""
- super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
+ super().__init__('Missing environment variable: %s' % name)
self.name = name
+def retry(func, ex_type=SubprocessError, sleep=10, attempts=10):
+ """Retry the specified function on failure."""
+ for dummy in range(1, attempts):
+ try:
+ return func()
+ except ex_type:
+ time.sleep(sleep)
+
+ return func()
+
+
def parse_to_list_of_dict(pattern, value):
"""
:type pattern: str
@@ -745,8 +717,8 @@ def parse_to_list_of_dict(pattern, value):
return matched
-def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
- """Returns the set of types that are concrete subclasses of the given type."""
+def get_subclasses(class_type): # type: (t.Type[C]) -> t.List[t.Type[C]]
+ """Returns a list of types that are concrete subclasses of the given type."""
subclasses = set() # type: t.Set[t.Type[C]]
queue = [class_type] # type: t.List[t.Type[C]]
@@ -759,7 +731,7 @@ def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
subclasses.add(child)
queue.append(child)
- return subclasses
+ return sorted(subclasses, key=lambda sc: sc.__name__)
def is_subdir(candidate_path, path): # type: (str, str) -> bool
@@ -799,6 +771,11 @@ def version_to_str(version): # type: (t.Tuple[int, ...]) -> str
return '.'.join(str(n) for n in version)
+def sorted_versions(versions): # type: (t.List[str]) -> t.List[str]
+ """Return a sorted copy of the given list of versions."""
+ return [version_to_str(version) for version in sorted(str_to_version(version) for version in versions)]
+
+
def import_plugins(directory, root=None): # type: (str, t.Optional[str]) -> None
"""
Import plugins from the given directory relative to the given root.
@@ -851,37 +828,11 @@ def load_module(path, name): # type: (str, str) -> None
imp.load_module(name, module_file, path, ('.py', 'r', imp.PY_SOURCE))
-@contextlib.contextmanager
-def tempdir(): # type: () -> str
- """Creates a temporary directory that is deleted outside the context scope."""
- temp_path = tempfile.mkdtemp()
- yield temp_path
- shutil.rmtree(temp_path)
-
-
-@contextlib.contextmanager
-def open_zipfile(path, mode='r'):
- """Opens a zip file and closes the file automatically."""
- zib_obj = zipfile.ZipFile(path, mode=mode)
- yield zib_obj
- zib_obj.close()
-
-
def sanitize_host_name(name):
"""Return a sanitized version of the given name, suitable for use as a hostname."""
return re.sub('[^A-Za-z0-9]+', '-', name)[:63].strip('-')
-def devnull():
- """Return a file descriptor for /dev/null, using a previously cached version if available."""
- try:
- return devnull.fd
- except AttributeError:
- devnull.fd = os.open('/dev/null', os.O_RDONLY)
-
- return devnull.fd
-
-
def get_hash(path):
"""
:type path: str
@@ -897,13 +848,9 @@ def get_hash(path):
return file_hash.hexdigest()
+@cache
def get_host_ip():
"""Return the host's IP address."""
- try:
- return get_host_ip.ip
- except AttributeError:
- pass
-
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:
sock.connect(('10.255.255.255', 22))
host_ip = get_host_ip.ip = sock.getsockname()[0]
@@ -913,7 +860,37 @@ def get_host_ip():
return host_ip
-display = Display() # pylint: disable=locally-disabled, invalid-name
+def get_generic_type(base_type, generic_base_type): # type: (t.Type, t.Type[TType]) -> t.Optional[t.Type[TType]]
+ """Return the generic type arg derived from the generic_base_type type that is associated with the base_type type, if any, otherwise return None."""
+ # noinspection PyUnresolvedReferences
+ type_arg = t.get_args(base_type.__orig_bases__[0])[0]
+ return None if isinstance(type_arg, generic_base_type) else type_arg
+
+
+def get_type_associations(base_type, generic_base_type): # type: (t.Type[TType], t.Type[TValue]) -> t.List[t.Tuple[t.Type[TValue], t.Type[TType]]]
+ """Create and return a list of tuples associating generic_base_type derived types with a corresponding base_type derived type."""
+ return [item for item in [(get_generic_type(sc_type, generic_base_type), sc_type) for sc_type in get_subclasses(base_type)] if item[1]]
+
+
+def get_type_map(base_type, generic_base_type): # type: (t.Type[TType], t.Type[TValue]) -> t.Dict[t.Type[TValue], t.Type[TType]]
+ """Create and return a mapping of generic_base_type derived types to base_type derived types."""
+ return {item[0]: item[1] for item in get_type_associations(base_type, generic_base_type)}
+
-CONTROLLER_PYTHON_VERSIONS = tuple(version for version in SUPPORTED_PYTHON_VERSIONS if str_to_version(version) >= str_to_version(CONTROLLER_MIN_PYTHON_VERSION))
-REMOTE_ONLY_PYTHON_VERSIONS = tuple(version for version in SUPPORTED_PYTHON_VERSIONS if str_to_version(version) < str_to_version(CONTROLLER_MIN_PYTHON_VERSION))
+def verify_sys_executable(path): # type: (str) -> t.Optional[str]
+ """Verify that the given path references the current Python interpreter. If not, return the expected path, otherwise return None."""
+ if path == sys.executable:
+ return None
+
+ if os.path.realpath(path) == os.path.realpath(sys.executable):
+ return None
+
+ expected_executable = raw_command([path, '-c', 'import sys; print(sys.executable)'], capture=True)[0]
+
+ if expected_executable == sys.executable:
+ return None
+
+ return expected_executable
+
+
+display = Display() # pylint: disable=locally-disabled, invalid-name
diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py
index 50f804ce0f..1850d8049f 100644
--- a/test/lib/ansible_test/_internal/util_common.py
+++ b/test/lib/ansible_test/_internal/util_common.py
@@ -1,41 +1,37 @@
"""Common utility code that depends on CommonConfig."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import atexit
import contextlib
+import json
import os
import re
-import shutil
+import shlex
import sys
import tempfile
import textwrap
-
-from . import types as t
+import typing as t
from .encoding import (
to_bytes,
)
from .util import (
- common_environment,
- COVERAGE_CONFIG_NAME,
display,
- find_python,
remove_tree,
MODE_DIRECTORY,
MODE_FILE_EXECUTE,
PYTHON_PATHS,
raw_command,
- read_lines_without_comments,
ANSIBLE_TEST_DATA_ROOT,
ANSIBLE_TEST_TARGET_ROOT,
+ ANSIBLE_TEST_TOOLS_ROOT,
ApplicationError,
- cmd_quote,
- SubprocessError,
+ generate_name,
)
from .io import (
+ make_dirs,
write_text_file,
write_json_file,
)
@@ -48,9 +44,12 @@ from .provider.layout import (
LayoutMessages,
)
-DOCKER_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
-REMOTE_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
-NETWORK_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
+from .host_configs import (
+ PythonConfig,
+ VirtualPythonConfig,
+)
+
+CHECK_YAML_VERSIONS = {}
class ShellScriptTemplate:
@@ -58,13 +57,21 @@ class ShellScriptTemplate:
def __init__(self, template): # type: (t.Text) -> None
self.template = template
- def substitute(self, **kwargs):
+ def substitute(self, **kwargs): # type: (t.Dict[str, t.Union[str, t.List[str]]]) -> str
"""Return a string templated with the given arguments."""
- kvp = dict((k, cmd_quote(v)) for k, v in kwargs.items())
+ kvp = dict((k, self.quote(v)) for k, v in kwargs.items())
pattern = re.compile(r'#{(?P<name>[^}]+)}')
value = pattern.sub(lambda match: kvp[match.group('name')], self.template)
return value
+ @staticmethod
+ def quote(value): # type: (t.Union[str, t.List[str]]) -> str
+ """Return a shell quoted version of the given value."""
+ if isinstance(value, list):
+ return shlex.quote(' '.join(value))
+
+ return shlex.quote(value)
+
class ResultType:
"""Test result type."""
@@ -115,6 +122,7 @@ class CommonConfig:
:type command: str
"""
self.command = command
+ self.success = None # type: t.Optional[bool]
self.color = args.color # type: bool
self.explain = args.explain # type: bool
@@ -125,6 +133,8 @@ class CommonConfig:
self.info_stderr = False # type: bool
+ self.session_name = generate_name()
+
self.cache = {}
def get_ansible_config(self): # type: () -> str
@@ -132,67 +142,13 @@ class CommonConfig:
return os.path.join(ANSIBLE_TEST_DATA_ROOT, 'ansible.cfg')
-def get_docker_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(DOCKER_COMPLETION, 'docker')
-
-
-def get_remote_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(REMOTE_COMPLETION, 'remote')
-
-
-def get_network_completion():
- """
- :rtype: dict[str, dict[str, str]]
- """
- return get_parameterized_completion(NETWORK_COMPLETION, 'network')
-
-
-def get_parameterized_completion(cache, name):
- """
- :type cache: dict[str, dict[str, str]]
- :type name: str
- :rtype: dict[str, dict[str, str]]
- """
- if not cache:
- if data_context().content.collection:
- context = 'collection'
- else:
- context = 'ansible-core'
-
- images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
-
- cache.update(dict(kvp for kvp in [parse_parameterized_completion(i) for i in images] if kvp and kvp[1].get('context', context) == context))
-
- return cache
-
-
-def parse_parameterized_completion(value): # type: (str) -> t.Optional[t.Tuple[str, t.Dict[str, str]]]
- """Parse the given completion entry, returning the entry name and a dictionary of key/value settings."""
- values = value.split()
-
- if not values:
- return None
-
- name = values[0]
- data = dict((kvp[0], kvp[1] if len(kvp) > 1 else '') for kvp in [item.split('=', 1) for item in values[1:]])
-
- return name, data
-
-
-def docker_qualify_image(name):
- """
- :type name: str
- :rtype: str
- """
- config = get_docker_completion().get(name, {})
+def create_result_directories(args): # type: (CommonConfig) -> None
+ """Create result directories."""
+ if args.explain:
+ return
- return config.get('name', name)
+ make_dirs(ResultType.COVERAGE.path)
+ make_dirs(ResultType.DATA.path)
def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> None
@@ -210,6 +166,29 @@ def handle_layout_messages(messages): # type: (t.Optional[LayoutMessages]) -> N
raise ApplicationError('\n'.join(messages.error))
+def process_scoped_temporary_file(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str
+ """Return the path to a temporary file that will be automatically removed when the process exits."""
+ if args.explain:
+ path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}')
+ else:
+ temp_fd, path = tempfile.mkstemp(prefix=prefix, suffix=suffix)
+ os.close(temp_fd)
+ atexit.register(lambda: os.remove(path))
+
+ return path
+
+
+def process_scoped_temporary_directory(args, prefix='ansible-test-', suffix=None): # type: (CommonConfig, t.Optional[str], t.Optional[str]) -> str
+ """Return the path to a temporary directory that will be automatically removed when the process exits."""
+ if args.explain:
+ path = os.path.join(tempfile.gettempdir(), f'{prefix or tempfile.gettempprefix()}{generate_name()}{suffix or ""}')
+ else:
+ path = tempfile.mkdtemp(prefix=prefix, suffix=suffix)
+ atexit.register(lambda: remove_tree(path))
+
+ return path
+
+
@contextlib.contextmanager
def named_temporary_file(args, prefix, suffix, directory, content):
"""
@@ -247,12 +226,8 @@ def write_text_test_results(category, name, content): # type: (ResultType, str,
write_text_file(path, content, create_directories=True)
-def get_python_path(args, interpreter):
- """
- :type args: TestConfig
- :type interpreter: str
- :rtype: str
- """
+def get_python_path(interpreter): # type: (str) -> str
+ """Return the path to a directory which contains a `python` executable that runs the specified interpreter."""
python_path = PYTHON_PATHS.get(interpreter)
if python_path:
@@ -263,9 +238,6 @@ def get_python_path(args, interpreter):
root_temp_dir = '/tmp'
- if args.explain:
- return os.path.join(root_temp_dir, ''.join((prefix, 'temp', suffix)))
-
python_path = tempfile.mkdtemp(prefix=prefix, suffix=suffix, dir=root_temp_dir)
injected_interpreter = os.path.join(python_path, 'python')
@@ -326,136 +298,39 @@ def cleanup_python_paths():
"""Clean up all temporary python directories."""
for path in sorted(PYTHON_PATHS.values()):
display.info('Cleaning up temporary python directory: %s' % path, verbosity=2)
- shutil.rmtree(path)
-
-
-def get_coverage_environment(args, target_name, version, temp_path, module_coverage, remote_temp_path=None):
- """
- :type args: TestConfig
- :type target_name: str
- :type version: str
- :type temp_path: str
- :type module_coverage: bool
- :type remote_temp_path: str | None
- :rtype: dict[str, str]
- """
- if temp_path:
- # integration tests (both localhost and the optional testhost)
- # config and results are in a temporary directory
- coverage_config_base_path = temp_path
- coverage_output_base_path = temp_path
- elif args.coverage_config_base_path:
- # unit tests, sanity tests and other special cases (localhost only)
- # config is in a temporary directory
- # results are in the source tree
- coverage_config_base_path = args.coverage_config_base_path
- coverage_output_base_path = os.path.join(data_context().content.root, data_context().content.results_path)
- else:
- raise Exception('No temp path and no coverage config base path. Check for missing coverage_context usage.')
-
- config_file = os.path.join(coverage_config_base_path, COVERAGE_CONFIG_NAME)
- coverage_file = os.path.join(coverage_output_base_path, ResultType.COVERAGE.name, '%s=%s=%s=%s=coverage' % (
- args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version))
-
- if not args.explain and not os.path.exists(config_file):
- raise Exception('Missing coverage config file: %s' % config_file)
-
- if args.coverage_check:
- # cause the 'coverage' module to be found, but not imported or enabled
- coverage_file = ''
-
- # Enable code coverage collection on local Python programs (this does not include Ansible modules).
- # Used by the injectors to support code coverage.
- # Used by the pytest unit test plugin to support code coverage.
- # The COVERAGE_FILE variable is also used directly by the 'coverage' module.
- env = dict(
- COVERAGE_CONF=config_file,
- COVERAGE_FILE=coverage_file,
- )
-
- if module_coverage:
- # Enable code coverage collection on Ansible modules (both local and remote).
- # Used by the AnsiballZ wrapper generator in lib/ansible/executor/module_common.py to support code coverage.
- env.update(dict(
- _ANSIBLE_COVERAGE_CONFIG=config_file,
- _ANSIBLE_COVERAGE_OUTPUT=coverage_file,
- ))
-
- if remote_temp_path:
- # Include the command, target and label so the remote host can create a filename with that info. The remote
- # is responsible for adding '={language version}=coverage.{hostname}.{pid}.{id}'
- env['_ANSIBLE_COVERAGE_REMOTE_OUTPUT'] = os.path.join(remote_temp_path, '%s=%s=%s' % (
- args.command, target_name, args.coverage_label or 'remote'))
- env['_ANSIBLE_COVERAGE_REMOTE_PATH_FILTER'] = os.path.join(data_context().content.root, '*')
-
- return env
-
-
-def intercept_command(args, cmd, target_name, env, capture=False, data=None, cwd=None, python_version=None, temp_path=None, module_coverage=True,
- virtualenv=None, disable_coverage=False, remote_temp_path=None):
+ remove_tree(path)
+
+
+def intercept_python(
+ args, # type: CommonConfig
+ python, # type: PythonConfig
+ cmd, # type: t.List[str]
+ env, # type: t.Dict[str, str]
+ capture=False, # type: bool
+ data=None, # type: t.Optional[str]
+ cwd=None, # type: t.Optional[str]
+ always=False, # type: bool
+): # type: (...) -> t.Tuple[t.Optional[str], t.Optional[str]]
"""
- :type args: TestConfig
- :type cmd: collections.Iterable[str]
- :type target_name: str
- :type env: dict[str, str]
- :type capture: bool
- :type data: str | None
- :type cwd: str | None
- :type python_version: str | None
- :type temp_path: str | None
- :type module_coverage: bool
- :type virtualenv: str | None
- :type disable_coverage: bool
- :type remote_temp_path: str | None
- :rtype: str | None, str | None
+ Run a command while intercepting invocations of Python to control the version used.
+ If the specified Python is an ansible-test managed virtual environment, it will be added to PATH to activate it.
+ Otherwise a temporary directory will be created to ensure the correct Python can be found in PATH.
"""
- if not env:
- env = common_environment()
- else:
- env = env.copy()
-
+ env = env.copy()
cmd = list(cmd)
- version = python_version or args.python_version
- interpreter = virtualenv or find_python(version)
inject_path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'injector')
- if not virtualenv:
- # injection of python into the path is required when not activating a virtualenv
- # otherwise scripts may find the wrong interpreter or possibly no interpreter
- python_path = get_python_path(args, interpreter)
- inject_path = python_path + os.path.pathsep + inject_path
-
- env['PATH'] = inject_path + os.path.pathsep + env['PATH']
- env['ANSIBLE_TEST_PYTHON_VERSION'] = version
- env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
-
- if not disable_coverage and args.coverage:
- # add the necessary environment variables to enable code coverage collection
- env.update(get_coverage_environment(args, target_name, version, temp_path, module_coverage,
- remote_temp_path=remote_temp_path))
-
- return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
-
-
-def resolve_csharp_ps_util(import_name, path):
- """
- :type import_name: str
- :type path: str
- """
- if data_context().content.is_ansible or not import_name.startswith('.'):
- # We don't support relative paths for builtin utils, there's no point.
- return import_name
-
- packages = import_name.split('.')
- module_packages = path.split(os.path.sep)
+ # make sure scripts (including injector.py) find the correct Python interpreter
+ if isinstance(python, VirtualPythonConfig):
+ python_path = os.path.dirname(python.path)
+ else:
+ python_path = get_python_path(python.path)
- for package in packages:
- if not module_packages or package:
- break
- del module_packages[-1]
+ env['PATH'] = os.path.pathsep.join([inject_path, python_path, env['PATH']])
+ env['ANSIBLE_TEST_PYTHON_VERSION'] = python.version
+ env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = python.path
- return 'ansible_collections.%s%s' % (data_context().content.prefix,
- '.'.join(module_packages + [p for p in packages if p]))
+ return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd, always=always)
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
@@ -478,3 +353,40 @@ def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=
explain = args.explain and not always
return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout,
cmd_verbosity=cmd_verbosity, str_errors=str_errors, error_callback=error_callback)
+
+
+def yamlcheck(python):
+ """Return True if PyYAML has libyaml support, False if it does not and None if it was not found."""
+ result = json.loads(raw_command([python.path, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yamlcheck.py')], capture=True)[0])
+
+ if not result['yaml']:
+ return None
+
+ return result['cloader']
+
+
+def check_pyyaml(python, required=True, quiet=False): # type: (PythonConfig, bool, bool) -> t.Optional[bool]
+ """
+ Return True if PyYAML has libyaml support, False if it does not and None if it was not found.
+ The result is cached if True or required.
+ """
+ try:
+ return CHECK_YAML_VERSIONS[python.path]
+ except KeyError:
+ pass
+
+ state = yamlcheck(python)
+
+ if state is not None or required:
+ # results are cached only if pyyaml is required or present
+ # it is assumed that tests will not uninstall/re-install pyyaml -- if they do, those changes will go undetected
+ CHECK_YAML_VERSIONS[python.path] = state
+
+ if not quiet:
+ if state is None:
+ if required:
+ display.warning('PyYAML is not installed for interpreter: %s' % python.path)
+ elif not state:
+ display.warning('PyYAML will be slow due to installation without libyaml support for interpreter: %s' % python.path)
+
+ return state
diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py
index 181c01ba95..2cfd978dd4 100644
--- a/test/lib/ansible_test/_internal/venv.py
+++ b/test/lib/ansible_test/_internal/venv.py
@@ -1,12 +1,10 @@
"""Virtual environment management."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
+from __future__ import annotations
import json
import os
import sys
-
-from . import types as t
+import typing as t
from .config import (
EnvironmentConfig,
@@ -19,65 +17,93 @@ from .util import (
ANSIBLE_TEST_TOOLS_ROOT,
display,
remove_tree,
+ ApplicationError,
+ str_to_version,
)
from .util_common import (
run_command,
+ ResultType,
+)
+
+from .host_configs import (
+ VirtualPythonConfig,
+ PythonConfig,
)
+def get_virtual_python(
+ args, # type: EnvironmentConfig
+ python, # type: VirtualPythonConfig
+):
+ """Create a virtual environment for the given Python and return the path to its root."""
+ if python.system_site_packages:
+ suffix = '-ssp'
+ else:
+ suffix = ''
+
+ virtual_environment_path = os.path.join(ResultType.TMP.path, 'delegation', f'python{python.version}{suffix}')
+
+ if not create_virtual_environment(args, python, virtual_environment_path, python.system_site_packages):
+ raise ApplicationError(f'Python {python.version} does not provide virtual environment support.')
+
+ return virtual_environment_path
+
+
def create_virtual_environment(args, # type: EnvironmentConfig
- version, # type: str
+ python, # type: PythonConfig
path, # type: str
system_site_packages=False, # type: bool
pip=True, # type: bool
): # type: (...) -> bool
"""Create a virtual environment using venv or virtualenv for the requested Python version."""
if os.path.isdir(path):
- display.info('Using existing Python %s virtual environment: %s' % (version, path), verbosity=1)
+ display.info('Using existing Python %s virtual environment: %s' % (python.version, path), verbosity=1)
return True
- python = find_python(version, required=False)
- python_version = tuple(int(v) for v in version.split('.'))
-
- if not python:
+ if not os.path.exists(python.path):
# the requested python version could not be found
return False
- if python_version >= (3, 0):
+ if str_to_version(python.version) >= (3, 0):
# use the built-in 'venv' module on Python 3.x
# creating a virtual environment using 'venv' when running in a virtual environment created by 'virtualenv' results
# in a copy of the original virtual environment instead of creation of a new one
# avoid this issue by only using "real" python interpreters to invoke 'venv'
- for real_python in iterate_real_pythons(args, version):
+ for real_python in iterate_real_pythons(args, python.version):
if run_venv(args, real_python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "venv": %s' % (version, path), verbosity=1)
+ display.info('Created Python %s virtual environment using "venv": %s' % (python.version, path), verbosity=1)
return True
# something went wrong, most likely the package maintainer for the Python installation removed ensurepip
# which will prevent creation of a virtual environment without installation of other OS packages
# use the installed 'virtualenv' module on the Python requested version
- if run_virtualenv(args, python, python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "virtualenv": %s' % (version, path), verbosity=1)
+ if run_virtualenv(args, python.path, python.path, system_site_packages, pip, path):
+ display.info('Created Python %s virtual environment using "virtualenv": %s' % (python.version, path), verbosity=1)
return True
available_pythons = get_available_python_versions()
for available_python_version, available_python_interpreter in sorted(available_pythons.items()):
+ if available_python_interpreter == python.path:
+ # already attempted to use this interpreter
+ continue
+
virtualenv_version = get_virtualenv_version(args, available_python_interpreter)
if not virtualenv_version:
# virtualenv not available for this Python or we were unable to detect the version
continue
- if python_version == (2, 6) and virtualenv_version >= (16, 0, 0):
+ if python.version == '2.6' and virtualenv_version >= (16, 0, 0):
# virtualenv 16.0.0 dropped python 2.6 support: https://virtualenv.pypa.io/en/latest/changes/#v16-0-0-2018-05-16
continue
# try using 'virtualenv' from another Python to setup the desired version
- if run_virtualenv(args, available_python_interpreter, python, system_site_packages, pip, path):
- display.info('Created Python %s virtual environment using "virtualenv" on Python %s: %s' % (version, available_python_version, path), verbosity=1)
+ if run_virtualenv(args, available_python_interpreter, python.path, system_site_packages, pip, path):
+ display.info('Created Python %s virtual environment using "virtualenv" on Python %s: %s' % (python.version, available_python_version, path),
+ verbosity=1)
return True
# no suitable 'virtualenv' available
@@ -89,7 +115,7 @@ def iterate_real_pythons(args, version): # type: (EnvironmentConfig, str) -> t.
Iterate through available real python interpreters of the requested version.
The current interpreter will be checked and then the path will be searched.
"""
- version_info = tuple(int(n) for n in version.split('.'))
+ version_info = str_to_version(version)
current_python = None
if version_info == sys.version_info[:len(version_info)]:
@@ -124,11 +150,11 @@ def iterate_real_pythons(args, version): # type: (EnvironmentConfig, str) -> t.
yield found_python
-def get_python_real_prefix(args, path): # type: (EnvironmentConfig, str) -> t.Optional[str]
+def get_python_real_prefix(args, python_path): # type: (EnvironmentConfig, str) -> t.Optional[str]
"""
Return the real prefix of the specified interpreter or None if the interpreter is not a virtual environment created by 'virtualenv'.
"""
- cmd = [path, os.path.join(os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'virtualenvcheck.py'))]
+ cmd = [python_path, os.path.join(os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'virtualenvcheck.py'))]
check_result = json.loads(run_command(args, cmd, capture=True, always=True)[0])
real_prefix = check_result['real_prefix']
return real_prefix
@@ -172,7 +198,7 @@ def run_virtualenv(args, # type: EnvironmentConfig
path, # type: str
): # type: (...) -> bool
"""Create a virtual environment using the 'virtualenv' module."""
- # always specify --python to guarantee the desired interpreter is provided
+ # always specify which interpreter to use to guarantee the desired interpreter is provided
# otherwise virtualenv may select a different interpreter than the one running virtualenv
cmd = [run_python, '-m', 'virtualenv', '--python', env_python]
@@ -198,29 +224,32 @@ def run_virtualenv(args, # type: EnvironmentConfig
def get_virtualenv_version(args, python): # type: (EnvironmentConfig, str) -> t.Optional[t.Tuple[int, ...]]
- """Get the virtualenv version for the given python intepreter, if available."""
+ """Get the virtualenv version for the given python intepreter, if available, otherwise return None."""
try:
- return get_virtualenv_version.result
+ cache = get_virtualenv_version.cache
except AttributeError:
- pass
+ cache = get_virtualenv_version.cache = {}
- get_virtualenv_version.result = None
+ if python not in cache:
+ try:
+ stdout = run_command(args, [python, '-m', 'virtualenv', '--version'], capture=True)[0]
+ except SubprocessError as ex:
+ stdout = ''
- cmd = [python, '-m', 'virtualenv', '--version']
+ if args.verbosity > 1:
+ display.error(ex)
- try:
- stdout = run_command(args, cmd, capture=True)[0]
- except SubprocessError as ex:
- if args.verbosity > 1:
- display.error(ex)
+ version = None
- stdout = ''
+ if stdout:
+ # noinspection PyBroadException
+ try:
+ version = str_to_version(stdout.strip())
+ except Exception: # pylint: disable=broad-except
+ pass
- if stdout:
- # noinspection PyBroadException
- try:
- get_virtualenv_version.result = tuple(int(v) for v in stdout.strip().split('.'))
- except Exception: # pylint: disable=broad-except
- pass
+ cache[python] = version
+
+ version = cache[python]
- return get_virtualenv_version.result
+ return version
diff --git a/test/lib/ansible_test/_util/__init__.py b/test/lib/ansible_test/_util/__init__.py
new file mode 100644
index 0000000000..d6fc0a8614
--- /dev/null
+++ b/test/lib/ansible_test/_util/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py
deleted file mode 100755
index d12b6334ef..0000000000
--- a/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-# PYTHON_ARGCOMPLETE_OK
-"""Command line entry point for ansible-test."""
-
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import sys
-
-
-def main():
- """Main program entry point."""
- ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
- source_root = os.path.join(ansible_root, 'test', 'lib')
-
- if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', 'cli.py')):
- # running from source, use that version of ansible-test instead of any version that may already be installed
- sys.path.insert(0, source_root)
-
- # noinspection PyProtectedMember
- from ansible_test._internal.cli import main as cli_main
-
- cli_main()
-
-
-if __name__ == '__main__':
- main()
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
index 65142e0033..e19b4d98a4 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Test to verify action plugins have an associated module to provide documentation."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
index 2ccfb24f23..1875ab3aa4 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
index 8bcd7f9ed9..806c0e6ed1 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
index 81081eed7b..cdad96551e 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
index 1e4212d1b8..660b0fce85 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
index 28d06f363b..e3fba1f5dd 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
index 78561d966e..d6d710aeff 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -10,8 +9,8 @@ ASSERT_RE = re.compile(r'^\s*assert[^a-z0-9_:]')
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- with open(path, 'r') as f:
- for i, line in enumerate(f.readlines()):
+ with open(path, 'r') as file:
+ for i, line in enumerate(file.readlines()):
matches = ASSERT_RE.findall(line)
if matches:
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
index a35650efad..18a3f6d1d1 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
index e28b24f4a9..7dfd5b2601 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
index 237ee5b1c1..8925e831d2 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
index 4bf92ea990..1813415438 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
index c925f5b729..5a267ba0df 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
index 99432ea133..421bbd6229 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py
@@ -1,5 +1,3 @@
-#!/usr/bin/env python
-
# a script to check for illegal filenames on various Operating Systems. The
# main rules are derived from restrictions on Windows
# https://msdn.microsoft.com/en-us/library/aa365247#naming_conventions
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
index 74a36ecc58..e5abd64db8 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -9,8 +8,8 @@ MAIN_DISPLAY_IMPORT = 'from __main__ import display'
def main():
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- with open(path, 'r') as f:
- for i, line in enumerate(f.readlines()):
+ with open(path, 'r') as file:
+ for i, line in enumerate(file.readlines()):
if MAIN_DISPLAY_IMPORT in line:
lineno = i + 1
colno = line.index(MAIN_DISPLAY_IMPORT) + 1
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
index e44005a55f..8399a36e0b 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
index e2201ab106..bb8c8f01d8 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
index b2de1ba85d..87575f5189 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
index 7db04ced7b..cad82a5575 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -7,7 +6,6 @@ import datetime
import os
import re
import sys
-import warnings
from functools import partial
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
index b945734cf7..6f210651c6 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -69,9 +68,7 @@ def main():
is_module = True
elif re.search('^test/support/[^/]+/collections/ansible_collections/[^/]+/[^/]+/plugins/modules/', path):
is_module = True
- elif path.startswith('test/lib/ansible_test/_data/'):
- pass
- elif path.startswith('test/lib/ansible_test/_util/'):
+ elif path.startswith('test/lib/ansible_test/_util/target/'):
pass
elif path.startswith('lib/') or path.startswith('test/lib/'):
if executable:
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
index 0585c6b1e5..5603051ac5 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
index 687136dcdb..68f380b0a9 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
index 49cb76c5e2..a8f0b87950 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
+++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
index 1ef2743acd..9138a29904 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
#Requires -Version 6
#Requires -Modules PSScriptAnalyzer, PSSA-PSCustomUseLiteralPath
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
index bcf9549fd7..30e40ba1f4 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test-target.cfg
@@ -7,8 +7,8 @@ disable=
duplicate-code, # consistent results require running with --jobs 1 and testing all files
import-error, # inconsistent results which depend on the availability of imports
import-outside-toplevel, # common pattern in ansible related code
- missing-docstring,
no-name-in-module, # inconsistent results which depend on the availability of imports
+ no-self-use,
raise-missing-from, # Python 2.x does not support raise from
super-with-arguments, # Python 2.x does not support super without arguments
too-few-public-methods,
@@ -21,7 +21,7 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- unused-import, # pylint does not understand PEP 484 type hints
+ useless-return, # complains about returning None when the return type is optional
[BASIC]
@@ -37,17 +37,16 @@ bad-names=
good-names=
__metaclass__,
C,
- e,
ex,
- f,
i,
j,
k,
Run,
-module-rgx=[a-z_][a-z0-9_-]{2,40}$
-method-rgx=[a-z_][a-z0-9_]{2,40}$
-function-rgx=[a-z_][a-z0-9_]{2,40}$
+class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$
+attr-rgx=[a-z_][a-z0-9_]{1,40}$
+method-rgx=[a-z_][a-z0-9_]{1,40}$
+function-rgx=[a-z_][a-z0-9_]{1,40}$
[IMPORTS]
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
index 187758f409..3c60aa77fe 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg
@@ -1,8 +1,6 @@
[MESSAGES CONTROL]
disable=
- consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
- consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support
cyclic-import, # consistent results require running with --jobs 1 and testing all files
duplicate-code, # consistent results require running with --jobs 1 and testing all files
import-error, # inconsistent results which depend on the availability of imports
@@ -10,9 +8,7 @@ disable=
no-name-in-module, # inconsistent results which depend on the availability of imports
no-self-use,
raise-missing-from, # Python 2.x does not support raise from
- super-with-arguments, # Python 2.x does not support super without arguments
too-few-public-methods,
- too-many-ancestors, # inconsistent results between python 3.6 and 3.7+
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
@@ -21,7 +17,7 @@ disable=
too-many-nested-blocks,
too-many-return-statements,
too-many-statements,
- unused-import, # pylint does not understand PEP 484 type hints
+ useless-return, # complains about returning None when the return type is optional
[BASIC]
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
new file mode 100644
index 0000000000..739d37576d
--- /dev/null
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/code-smell.cfg
@@ -0,0 +1,55 @@
+[MESSAGES CONTROL]
+
+disable=
+ cyclic-import, # consistent results require running with --jobs 1 and testing all files
+ duplicate-code, # consistent results require running with --jobs 1 and testing all files
+ import-error, # inconsistent results which depend on the availability of imports
+ import-outside-toplevel, # common pattern in ansible related code
+ no-name-in-module, # inconsistent results which depend on the availability of imports
+ no-self-use,
+ raise-missing-from, # Python 2.x does not support raise from
+ too-few-public-methods,
+ too-many-arguments,
+ too-many-branches,
+ too-many-instance-attributes,
+ too-many-lines,
+ too-many-locals,
+ too-many-nested-blocks,
+ too-many-return-statements,
+ too-many-statements,
+ useless-return, # complains about returning None when the return type is optional
+ # code-smell tests should be updated so the following rules can be enabled
+ # once that happens the pylint sanity test can be updated to no longer special-case the code-smell tests (use standard ansible-test config instead)
+ missing-module-docstring,
+ missing-function-docstring,
+
+[BASIC]
+
+bad-names=
+ _,
+ bar,
+ baz,
+ foo,
+ tata,
+ toto,
+ tutu,
+
+good-names=
+ __metaclass__,
+ C,
+ ex,
+ i,
+ j,
+ k,
+ Run,
+
+class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$
+attr-rgx=[a-z_][a-z0-9_]{1,40}$
+method-rgx=[a-z_][a-z0-9_]{1,40}$
+function-rgx=[a-z_][a-z0-9_]{1,40}$
+module-rgx=[a-z_][a-z0-9_-]{2,40}$
+
+[IMPORTS]
+
+preferred-modules =
+ distutils.version:ansible.module_utils.compat.version,
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
index e39e5214bf..234ec217cd 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py
@@ -1,3 +1,4 @@
+"""Ansible specific plyint plugin for checking deprecations."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
@@ -106,6 +107,7 @@ def _get_expr_name(node):
def parse_isodate(value):
+ """Parse an ISO 8601 date string."""
msg = 'Expected ISO 8601 date string (YYYY-MM-DD)'
if not isinstance(value, string_types):
raise ValueError(msg)
@@ -146,10 +148,10 @@ class AnsibleDeprecatedChecker(BaseChecker):
def __init__(self, *args, **kwargs):
self.collection_version = None
self.collection_name = None
- super(AnsibleDeprecatedChecker, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def set_option(self, optname, value, action=None, optdict=None):
- super(AnsibleDeprecatedChecker, self).set_option(optname, value, action, optdict)
+ super().set_option(optname, value, action, optdict)
if optname == 'collection-version' and value is not None:
self.collection_version = SemanticVersion(self.config.collection_version)
if optname == 'collection-name' and value is not None:
@@ -202,6 +204,7 @@ class AnsibleDeprecatedChecker(BaseChecker):
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
+ """Visit a call node."""
version = None
date = None
collection_name = None
diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
index 1c22a08b97..3b9a37e549 100644
--- a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
+++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py
@@ -1,3 +1,4 @@
+"""Ansible specific pylint plugin for checking format string usage."""
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
@@ -41,6 +42,7 @@ class AnsibleStringFormatChecker(BaseChecker):
@check_messages(*(MSGS.keys()))
def visit_call(self, node):
+ """Visit a call node."""
func = utils.safe_infer(node.func)
if (isinstance(func, astroid.BoundMethod)
and isinstance(func.bound, astroid.Instance)
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
index c1e2bdaaeb..e6749cdc61 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
index 8cd0e5e560..1f925bef28 100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py
@@ -123,7 +123,7 @@ def get_ps_argument_spec(filename, collection):
})
script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1')
- proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ proc = subprocess.Popen(['pwsh', script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=False)
stdout, stderr = proc.communicate()
diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
index 5ceb9d50b7..fb4a61740a 100755..100644
--- a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
+++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1
@@ -1,4 +1,3 @@
-#!/usr/bin/env pwsh
#Requires -Version 6
Set-StrictMode -Version 2.0
diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
index b9fc73e59d..34d2fde99f 100644
--- a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
+++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -29,9 +28,9 @@ def main():
class TestConstructor(SafeConstructor):
- """Yaml Safe Constructor that knows about Ansible tags"""
-
+ """Yaml Safe Constructor that knows about Ansible tags."""
def construct_yaml_unsafe(self, node):
+ """Construct an unsafe tag."""
try:
constructor = getattr(node, 'id', 'object')
if constructor is not None:
@@ -60,6 +59,7 @@ TestConstructor.add_constructor(
class TestLoader(CParser, TestConstructor, Resolver):
+ """Custom YAML loader that recognizes custom Ansible tags."""
def __init__(self, stream):
CParser.__init__(self, stream)
TestConstructor.__init__(self)
@@ -92,8 +92,8 @@ class YamlChecker:
for path in paths:
extension = os.path.splitext(path)[1]
- with open(path) as f:
- contents = f.read()
+ with open(path) as file:
+ contents = file.read()
if extension in ('.yml', '.yaml'):
self.check_yaml(yaml_conf, path, contents)
@@ -150,12 +150,12 @@ class YamlChecker:
"""
try:
yaml.load(contents, Loader=TestLoader)
- except MarkedYAMLError as e:
+ except MarkedYAMLError as ex:
self.messages += [{'code': 'unparsable-with-libyaml',
- 'message': '%s - %s' % (e.args[0], e.args[2]),
+ 'message': '%s - %s' % (ex.args[0], ex.args[2]),
'path': path,
- 'line': e.problem_mark.line + lineno,
- 'column': e.problem_mark.column + 1,
+ 'line': ex.problem_mark.line + lineno,
+ 'column': ex.problem_mark.column + 1,
'level': 'error',
}]
diff --git a/test/lib/ansible_test/_util/controller/tools/sslcheck.py b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
index 37b8227936..115c5ed25a 100755..100644
--- a/test/lib/ansible_test/_util/controller/tools/sslcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/sslcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Show openssl version."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/tools/versions.py b/test/lib/ansible_test/_util/controller/tools/versions.py
deleted file mode 100755
index 4babef0162..0000000000
--- a/test/lib/ansible_test/_util/controller/tools/versions.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-"""Show python and pip versions."""
-from __future__ import (absolute_import, division, print_function)
-__metaclass__ = type
-
-import os
-import sys
-import warnings
-
-warnings.simplefilter('ignore') # avoid python version deprecation warnings when using newer pip dependencies
-
-try:
- import pip
-except ImportError:
- pip = None
-
-print(sys.version)
-
-if pip:
- print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__)))
diff --git a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
index 0c8f768034..90dfa39410 100755..100644
--- a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Detect the real python interpreter when running in a virtual environment created by the 'virtualenv' module."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/controller/tools/yamlcheck.py b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
index 591842f4ad..dfd08e581c 100755..100644
--- a/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
+++ b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py
@@ -1,5 +1,4 @@
-#!/usr/bin/env python
-"""Show python and pip versions."""
+"""Show availability of PyYAML and libyaml support."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/__init__.py b/test/lib/ansible_test/_util/target/__init__.py
new file mode 100644
index 0000000000..d6fc0a8614
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
new file mode 100755
index 0000000000..dc31095a81
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/cli/ansible_test_cli_stub.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# PYTHON_ARGCOMPLETE_OK
+"""Command line entry point for ansible-test."""
+
+# NOTE: This file resides in the _util/target directory to ensure compatibility with all supported Python versions.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os
+import sys
+
+
+def main():
+ """Main program entry point."""
+ ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ source_root = os.path.join(ansible_root, 'test', 'lib')
+
+ if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', '__init__.py')):
+ # running from source, use that version of ansible-test instead of any version that may already be installed
+ sys.path.insert(0, source_root)
+
+ # noinspection PyProtectedMember
+ from ansible_test._util.target.common.constants import CONTROLLER_PYTHON_VERSIONS
+
+ if version_to_str(sys.version_info[:2]) not in CONTROLLER_PYTHON_VERSIONS:
+ raise SystemExit('This version of ansible-test cannot be executed with Python version %s. Supported Python versions are: %s' % (
+ version_to_str(sys.version_info[:3]), ', '.join(CONTROLLER_PYTHON_VERSIONS)))
+
+ # noinspection PyProtectedMember
+ from ansible_test._internal import main as cli_main
+
+ cli_main()
+
+
+def version_to_str(version):
+ """Return a version string from a version tuple."""
+ return '.'.join(str(n) for n in version)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_util/target/common/__init__.py b/test/lib/ansible_test/_util/target/common/__init__.py
new file mode 100644
index 0000000000..d6fc0a8614
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/common/__init__.py
@@ -0,0 +1,3 @@
+"""Nearly empty __init__.py to allow importing under Python 2.x."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
diff --git a/test/lib/ansible_test/_util/target/common/constants.py b/test/lib/ansible_test/_util/target/common/constants.py
new file mode 100644
index 0000000000..9902b046a6
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/common/constants.py
@@ -0,0 +1,45 @@
+"""Constants used by ansible-test. Imports should not be used in this file."""
+
+# NOTE: This file resides in the _util/target directory to ensure compatibility with all supported Python versions.
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
+# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
+SOFT_RLIMIT_NOFILE = 1024
+
+# File used to track the ansible-test test execution timeout.
+TIMEOUT_PATH = '.ansible-test-timeout.json'
+
+REMOTE_ONLY_PYTHON_VERSIONS = (
+ '2.6',
+ '2.7',
+ '3.5',
+ '3.6',
+ '3.7',
+)
+
+CONTROLLER_PYTHON_VERSIONS = (
+ '3.8',
+ '3.9',
+ '3.10',
+)
+
+CONTROLLER_MIN_PYTHON_VERSION = CONTROLLER_PYTHON_VERSIONS[0]
+SUPPORTED_PYTHON_VERSIONS = REMOTE_ONLY_PYTHON_VERSIONS + CONTROLLER_PYTHON_VERSIONS
+
+COVERAGE_REQUIRED_VERSION = '4.5.4'
+
+REMOTE_PROVIDERS = [
+ 'default',
+ 'aws',
+ 'azure',
+ 'ibmps',
+ 'parallels',
+]
+
+SECCOMP_CHOICES = [
+ 'default',
+ 'unconfined',
+]
diff --git a/test/lib/ansible_test/_util/target/sanity/compile/compile.py b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
index 3f6fc96260..e2302fc0e7 100755..100644
--- a/test/lib/ansible_test/_util/target/sanity/compile/compile.py
+++ b/test/lib/ansible_test/_util/target/sanity/compile/compile.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Python syntax checker with lint friendly output."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -11,6 +10,7 @@ Text = type(u'')
def main():
+ """Main program entry point."""
for path in sys.argv[1:] or sys.stdin.read().splitlines():
with open(path, 'rb') as source_fd:
source = source_fd.read()
diff --git a/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py
index 09be9576d9..1164168e3e 100644
--- a/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py
+++ b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py
@@ -18,6 +18,7 @@ ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:'
def default(value):
+ """Custom default serializer which supports datetime.date types."""
if isinstance(value, datetime.date):
return '%s%s' % (ISO_DATE_MARKER, value.isoformat())
diff --git a/test/lib/ansible_test/_util/target/setup/bootstrap.sh b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
new file mode 100644
index 0000000000..36ca68f494
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/setup/bootstrap.sh
@@ -0,0 +1,323 @@
+#!/bin/sh
+
+set -eu
+
+install_ssh_keys()
+{
+ if [ ! -f "${ssh_private_key_path}" ]; then
+ # write public/private ssh key pair
+ public_key_path="${ssh_private_key_path}.pub"
+
+ # shellcheck disable=SC2174
+ mkdir -m 0700 -p "${ssh_path}"
+ touch "${public_key_path}" "${ssh_private_key_path}"
+ chmod 0600 "${public_key_path}" "${ssh_private_key_path}"
+ echo "${ssh_public_key}" > "${public_key_path}"
+ echo "${ssh_private_key}" > "${ssh_private_key_path}"
+
+ # add public key to authorized_keys
+ authoried_keys_path="${HOME}/.ssh/authorized_keys"
+
+ # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login)
+ cat "${public_key_path}" > "${authoried_keys_path}"
+ chmod 0600 "${authoried_keys_path}"
+
+ # add localhost's server keys to known_hosts
+ known_hosts_path="${HOME}/.ssh/known_hosts"
+
+ for key in /etc/ssh/ssh_host_*_key.pub; do
+ echo "localhost $(cat "${key}")" >> "${known_hosts_path}"
+ done
+ fi
+}
+
+customize_bashrc()
+{
+ true > ~/.bashrc
+
+ # Show color `ls` results when available.
+ if ls --color > /dev/null 2>&1; then
+ echo "alias ls='ls --color'" >> ~/.bashrc
+ elif ls -G > /dev/null 2>&1; then
+ echo "alias ls='ls -G'" >> ~/.bashrc
+ fi
+
+ # Improve shell prompts for interactive use.
+ echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc
+}
+
+install_pip() {
+ if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then
+ case "${python_version}" in
+ *)
+ pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py"
+ ;;
+ esac
+
+ while true; do
+ curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py && \
+ "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet && \
+ rm /tmp/get-pip.py \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+ fi
+}
+
+pip_install() {
+ pip_packages="$1"
+
+ while true; do
+ # shellcheck disable=SC2086
+ "${python_interpreter}" -m pip install --disable-pip-version-check ${pip_packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_aix()
+{
+ chfs -a size=1G /
+ chfs -a size=4G /usr
+ chfs -a size=1G /var
+ chfs -a size=1G /tmp
+ chfs -a size=2G /opt
+
+ if [ "${python_version}" = "2.7" ]; then
+ python_package_version=""
+ else
+ python_package_version="3"
+ fi
+
+ packages="
+ gcc
+ python${python_package_version}
+ python${python_package_version}-devel
+ python${python_package_version}-pip
+ "
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_freebsd()
+{
+ if [ "${python_version}" = "2.7" ]; then
+ # on Python 2.7 our only option is to use virtualenv
+ virtualenv_pkg="py27-virtualenv"
+ else
+ # on Python 3.x we'll use the built-in venv instead
+ virtualenv_pkg=""
+ fi
+
+ packages="
+ python${python_package_version}
+ ${virtualenv_pkg}
+ bash
+ curl
+ gtar
+ sudo
+ "
+
+ if [ "${controller}" ]; then
+ # Declare platform/python version combinations which do not have supporting OS packages available.
+ # For these combinations ansible-test will use pip to install the requirements instead.
+ case "${platform_version}/${python_version}" in
+ "11.4/3.8")
+ have_os_packages=""
+ ;;
+ "12.2/3.8")
+ have_os_packages=""
+ ;;
+ "13.0/3.8")
+ have_os_packages=""
+ ;;
+ "13.0/3.9")
+ have_os_packages=""
+ ;;
+ *)
+ have_os_packages="yes"
+ ;;
+ esac
+
+ # PyYAML is never installed with an OS package since it does not include libyaml support.
+ # Instead, ansible-test will install it using pip.
+ if [ "${have_os_packages}" ]; then
+ jinja2_pkg="py${python_package_version}-Jinja2"
+ cryptography_pkg="py${python_package_version}-cryptography"
+ else
+ jinja2_pkg=""
+ cryptography_pkg=""
+ fi
+
+ packages="
+ ${packages}
+ libyaml
+ ${jinja2_pkg}
+ ${cryptography_pkg}
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ env ASSUME_ALWAYS_YES=YES pkg bootstrap && \
+ pkg install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+
+ install_pip
+
+ if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then
+ sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config
+ service sshd restart
+ fi
+}
+
+bootstrap_remote_macos()
+{
+ # Silence macOS deprecation warning for bash.
+ echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc
+
+ # Make sure ~/ansible/ is the starting directory for interactive shells on the control node.
+ # The root home directory is under a symlink. Without this the real path will be displayed instead.
+ if [ "${controller}" ]; then
+ echo "cd ~/ansible/" >> ~/.bashrc
+ fi
+
+ # Make sure commands like 'brew' can be found.
+ # This affects users with the 'zsh' shell, as well as 'root' accessed using 'sudo' from a user with 'zsh' for a shell.
+ # shellcheck disable=SC2016
+ echo 'PATH="/usr/local/bin:$PATH"' > /etc/zshenv
+}
+
+bootstrap_remote_rhel_7()
+{
+ packages="
+ gcc
+ python-devel
+ python-virtualenv
+ "
+
+ if [ "${controller}" ]; then
+ packages="
+ ${packages}
+ python2-cryptography
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+
+ install_pip
+}
+
+bootstrap_remote_rhel_8()
+{
+ if [ "${python_version}" = "3.6" ]; then
+ py_pkg_prefix="python3"
+ else
+ py_pkg_prefix="python${python_package_version}"
+ fi
+
+ packages="
+ gcc
+ ${py_pkg_prefix}-devel
+ "
+
+ if [ "${controller}" ]; then
+ packages="
+ ${packages}
+ ${py_pkg_prefix}-jinja2
+ ${py_pkg_prefix}-cryptography
+ "
+ fi
+
+ while true; do
+ # shellcheck disable=SC2086
+ yum module install -q -y "python${python_package_version}" && \
+ yum install -q -y ${packages} \
+ && break
+ echo "Failed to install packages. Sleeping before trying again..."
+ sleep 10
+ done
+}
+
+bootstrap_remote_rhel()
+{
+ case "${platform_version}" in
+ 7.*) bootstrap_remote_rhel_7 ;;
+ 8.*) bootstrap_remote_rhel_8 ;;
+ esac
+
+ # pin packaging and pyparsing to match the downstream vendored versions
+ pip_packages="
+ packaging==20.4
+ pyparsing==2.4.7
+ "
+
+ pip_install "${pip_packages}"
+}
+
+bootstrap_docker()
+{
+ # Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04.
+ rm -f /usr/sbin/policy-rc.d
+}
+
+bootstrap_remote()
+{
+ for python_version in ${python_versions}; do
+ echo "Bootstrapping Python ${python_version}"
+
+ python_interpreter="python${python_version}"
+ python_package_version="$(echo "${python_version}" | tr -d '.')"
+
+ case "${platform}" in
+ "aix") bootstrap_remote_aix ;;
+ "freebsd") bootstrap_remote_freebsd ;;
+ "macos") bootstrap_remote_macos ;;
+ "rhel") bootstrap_remote_rhel ;;
+ esac
+ done
+}
+
+bootstrap()
+{
+ ssh_path="${HOME}/.ssh"
+ ssh_private_key_path="${ssh_path}/id_${ssh_key_type}"
+
+ install_ssh_keys
+ customize_bashrc
+
+ case "${bootstrap_type}" in
+ "docker") bootstrap_docker ;;
+ "remote") bootstrap_remote ;;
+ esac
+}
+
+# These variables will be templated before sending the script to the host.
+# They are at the end of the script to maintain line numbers for debugging purposes.
+bootstrap_type=#{bootstrap_type}
+controller=#{controller}
+platform=#{platform}
+platform_version=#{platform_version}
+python_versions=#{python_versions}
+ssh_key_type=#{ssh_key_type}
+ssh_private_key=#{ssh_private_key}
+ssh_public_key=#{ssh_public_key}
+
+bootstrap
diff --git a/test/lib/ansible_test/_util/target/setup/docker.sh b/test/lib/ansible_test/_util/target/setup/docker.sh
deleted file mode 100644
index ea60e1a6f3..0000000000
--- a/test/lib/ansible_test/_util/target/setup/docker.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/sh
-
-set -eu
-
-# Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04.
-rm -f /usr/sbin/policy-rc.d
-
-# Improve prompts on remote host for interactive use.
-# `cat << EOF > ~/.bashrc` flakes sometimes since /tmp may not be ready yet in
-# the container. So don't do that
-echo "alias ls='ls --color=auto'" > ~/.bashrc
-echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc
-echo "cd ~/ansible/" >> ~/.bashrc
diff --git a/test/lib/ansible_test/_util/controller/tools/quiet_pip.py b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
index e1bb824646..83d4576b08 100644
--- a/test/lib/ansible_test/_util/controller/tools/quiet_pip.py
+++ b/test/lib/ansible_test/_util/target/setup/quiet_pip.py
@@ -11,6 +11,7 @@ BUILTIN_FILTERER_FILTER = logging.Filterer.filter
LOGGING_MESSAGE_FILTER = re.compile("^("
".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1]
+ ".*Running pip as the 'root' user can result in broken permissions .*|" # pip 21.1
"DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3
"Ignoring .*: markers .* don't match your environment|"
"Looking in indexes: .*|" # pypi-test-container
diff --git a/test/lib/ansible_test/_util/target/setup/remote.sh b/test/lib/ansible_test/_util/target/setup/remote.sh
deleted file mode 100644
index 9348ac6f9f..0000000000
--- a/test/lib/ansible_test/_util/target/setup/remote.sh
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/bin/sh
-
-set -eu
-
-platform=#{platform}
-platform_version=#{platform_version}
-python_version=#{python_version}
-
-python_interpreter="python${python_version}"
-
-cd ~/
-
-install_pip () {
- if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then
- case "${python_version}" in
- *)
- pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py"
- ;;
- esac
- curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py
- "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet
- rm /tmp/get-pip.py
- fi
-}
-
-if [ "${platform}" = "freebsd" ]; then
- py_version="$(echo "${python_version}" | tr -d '.')"
-
- if [ "${py_version}" = "27" ]; then
- # on Python 2.7 our only option is to use virtualenv
- virtualenv_pkg="py27-virtualenv"
- else
- # on Python 3.x we'll use the built-in venv instead
- virtualenv_pkg=""
- fi
-
- # Declare platform/python version combinations which do not have supporting OS packages available.
- # For these combinations ansible-test will use pip to install the requirements instead.
- case "${platform_version}/${python_version}" in
- "11.4/3.8")
- have_os_packages=""
- ;;
- "12.2/3.8")
- have_os_packages=""
- ;;
- *)
- have_os_packages="yes"
- ;;
- esac
-
- # PyYAML is never installed with an OS package since it does not include libyaml support.
- # Instead, ansible-test will always install it using pip.
- if [ "${have_os_packages}" ]; then
- jinja2_pkg="py${py_version}-Jinja2"
- cryptography_pkg="py${py_version}-cryptography"
- else
- jinja2_pkg=""
- cryptography_pkg=""
- fi
-
- while true; do
- # shellcheck disable=SC2086
- env ASSUME_ALWAYS_YES=YES pkg bootstrap && \
- pkg install -q -y \
- bash \
- curl \
- gtar \
- libyaml \
- "python${py_version}" \
- ${jinja2_pkg} \
- ${cryptography_pkg} \
- ${virtualenv_pkg} \
- sudo \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
-
- if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then
- sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config
- service sshd restart
- fi
-elif [ "${platform}" = "rhel" ]; then
- if grep '8\.' /etc/redhat-release; then
- py_version="$(echo "${python_version}" | tr -d '.')"
-
- if [ "${py_version}" = "36" ]; then
- py_pkg_prefix="python3"
- else
- py_pkg_prefix="python${py_version}"
- fi
-
- while true; do
- yum module install -q -y "python${py_version}" && \
- yum install -q -y \
- gcc \
- "${py_pkg_prefix}-devel" \
- "${py_pkg_prefix}-jinja2" \
- "${py_pkg_prefix}-cryptography" \
- iptables \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
- else
- while true; do
- yum install -q -y \
- gcc \
- python-devel \
- python-virtualenv \
- python2-cryptography \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
- fi
-
- # pin packaging and pyparsing to match the downstream vendored versions
- "${python_interpreter}" -m pip install packaging==20.4 pyparsing==2.4.7 --disable-pip-version-check
-elif [ "${platform}" = "centos" ]; then
- while true; do
- yum install -q -y \
- gcc \
- python-devel \
- python-virtualenv \
- python2-cryptography \
- libffi-devel \
- openssl-devel \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-
- install_pip
-elif [ "${platform}" = "osx" ]; then
- while true; do
- pip install --disable-pip-version-check --quiet \
- 'virtualenv==16.7.10' \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-elif [ "${platform}" = "aix" ]; then
- chfs -a size=1G /
- chfs -a size=4G /usr
- chfs -a size=1G /var
- chfs -a size=1G /tmp
- chfs -a size=2G /opt
- while true; do
- yum install -q -y \
- gcc \
- libffi-devel \
- python-jinja2 \
- python-cryptography \
- python-pip && \
- pip install --disable-pip-version-check --quiet \
- 'virtualenv==16.7.10' \
- && break
- echo "Failed to install packages. Sleeping before trying again..."
- sleep 10
- done
-fi
-
-# Improve prompts on remote host for interactive use.
-# shellcheck disable=SC1117
-cat << EOF > ~/.bashrc
-if ls --color > /dev/null 2>&1; then
- alias ls='ls --color'
-elif ls -G > /dev/null 2>&1; then
- alias ls='ls -G'
-fi
-export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
-EOF
-
-# Make sure ~/ansible/ is the starting directory for interactive shells.
-if [ "${platform}" = "osx" ]; then
- echo "cd ~/ansible/" >> ~/.bashrc
-elif [ "${platform}" = "macos" ] ; then
- echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc
- echo "cd ~/ansible/" >> ~/.bashrc
-fi
diff --git a/test/lib/ansible_test/_util/target/setup/requirements.py b/test/lib/ansible_test/_util/target/setup/requirements.py
new file mode 100644
index 0000000000..0e3b1e634a
--- /dev/null
+++ b/test/lib/ansible_test/_util/target/setup/requirements.py
@@ -0,0 +1,252 @@
+"""A tool for installing test requirements on the controller and target host."""
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+# pylint: disable=wrong-import-position
+
+import resource
+
+# Setting a low soft RLIMIT_NOFILE value will improve the performance of subprocess.Popen on Python 2.x when close_fds=True.
+# This will affect all Python subprocesses. It will also affect the current Python process if set before subprocess is imported for the first time.
+SOFT_RLIMIT_NOFILE = 1024
+
+CURRENT_RLIMIT_NOFILE = resource.getrlimit(resource.RLIMIT_NOFILE)
+DESIRED_RLIMIT_NOFILE = (SOFT_RLIMIT_NOFILE, CURRENT_RLIMIT_NOFILE[1])
+
+if DESIRED_RLIMIT_NOFILE < CURRENT_RLIMIT_NOFILE:
+ resource.setrlimit(resource.RLIMIT_NOFILE, DESIRED_RLIMIT_NOFILE)
+ CURRENT_RLIMIT_NOFILE = DESIRED_RLIMIT_NOFILE
+
+import base64
+import errno
+import io
+import json
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+try:
+ import typing as t
+except ImportError:
+ t = None
+
+try:
+ from shlex import quote as cmd_quote
+except ImportError:
+ # noinspection PyProtectedMember
+ from pipes import quote as cmd_quote
+
+ENCODING = 'utf-8'
+PAYLOAD = b'{payload}' # base-64 encoded JSON payload which will be populated before this script is executed
+
+Text = type(u'')
+
+VERBOSITY = 0
+CONSOLE = sys.stderr
+
+
+def main(): # type: () -> None
+ """Main program entry point."""
+ global VERBOSITY # pylint: disable=global-statement
+
+ payload = json.loads(to_text(base64.b64decode(PAYLOAD)))
+
+ VERBOSITY = payload['verbosity']
+
+ script = payload['script']
+ commands = payload['commands']
+
+ with tempfile.NamedTemporaryFile(prefix='ansible-test-', suffix='-pip.py') as pip:
+ pip.write(to_bytes(script))
+ pip.flush()
+
+ for name, options in commands:
+ try:
+ globals()[name](pip.name, options)
+ except ApplicationError as ex:
+ print(ex)
+ sys.exit(1)
+
+
+def install(pip, options): # type: (str, t.Dict[str, t.Any]) -> None
+ """Perform a pip install."""
+ requirements = options['requirements']
+ constraints = options['constraints']
+ packages = options['packages']
+
+ tempdir = tempfile.mkdtemp(prefix='ansible-test-', suffix='-requirements')
+
+ try:
+ options = common_pip_options()
+ options.extend(packages)
+
+ for path, content in requirements:
+ write_text_file(os.path.join(tempdir, path), content, True)
+ options.extend(['-r', path])
+
+ for path, content in constraints:
+ write_text_file(os.path.join(tempdir, path), content, True)
+ options.extend(['-c', path])
+
+ command = [sys.executable, pip, 'install'] + options
+
+ execute_command(command, tempdir)
+ finally:
+ remove_tree(tempdir)
+
+
+def uninstall(pip, options): # type: (str, t.Dict[str, t.Any]) -> None
+ """Perform a pip uninstall."""
+ packages = options['packages']
+ ignore_errors = options['ignore_errors']
+
+ options = common_pip_options()
+ options.extend(packages)
+
+ command = [sys.executable, pip, 'uninstall', '-y'] + options
+
+ try:
+ execute_command(command, capture=True)
+ except SubprocessError:
+ if not ignore_errors:
+ raise
+
+
+def common_pip_options(): # type: () -> t.List[str]
+ """Return a list of common pip options."""
+ return [
+ '--disable-pip-version-check',
+ ]
+
+
+def devnull(): # type: () -> t.IO[bytes]
+ """Return a file object that references devnull."""
+ try:
+ return devnull.file
+ except AttributeError:
+ devnull.file = open(os.devnull, 'w+b') # pylint: disable=consider-using-with
+
+ return devnull.file
+
+
+class ApplicationError(Exception):
+ """Base class for application exceptions."""
+
+
+class SubprocessError(ApplicationError):
+ """A command returned a non-zero status."""
+ def __init__(self, cmd, status, stdout, stderr): # type: (t.List[str], int, str, str) -> None
+ message = 'A command failed with status %d: %s' % (status, ' '.join(cmd_quote(c) for c in cmd))
+
+ if stderr:
+ message += '\n>>> Standard Error\n%s' % stderr.strip()
+
+ if stdout:
+ message += '\n>>> Standard Output\n%s' % stdout.strip()
+
+ super(SubprocessError, self).__init__(message)
+
+
+def log(message, verbosity=0): # type: (str, int) -> None
+ """Log a message to the console if the verbosity is high enough."""
+ if verbosity > VERBOSITY:
+ return
+
+ print(message, file=CONSOLE)
+ CONSOLE.flush()
+
+
+def execute_command(cmd, cwd=None, capture=False): # type: (t.List[str], t.Optional[str], bool) -> None
+ """Execute the specified command."""
+ log('Execute command: %s' % ' '.join(cmd_quote(c) for c in cmd), verbosity=1)
+
+ cmd_bytes = [to_bytes(c) for c in cmd]
+
+ if capture:
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE
+ else:
+ stdout = None
+ stderr = None
+
+ process = subprocess.Popen(cmd_bytes, cwd=to_optional_bytes(cwd), stdin=devnull(), stdout=stdout, stderr=stderr) # pylint: disable=consider-using-with
+ stdout_bytes, stderr_bytes = process.communicate()
+ stdout_text = to_optional_text(stdout_bytes) or u''
+ stderr_text = to_optional_text(stderr_bytes) or u''
+
+ if process.returncode != 0:
+ raise SubprocessError(cmd, process.returncode, stdout_text, stderr_text)
+
+
+def write_text_file(path, content, create_directories=False): # type: (str, str, bool) -> None
+ """Write the given text content to the specified path, optionally creating missing directories."""
+ if create_directories:
+ make_dirs(os.path.dirname(path))
+
+ with open_binary_file(path, 'wb') as file_obj:
+ file_obj.write(to_bytes(content))
+
+
+def remove_tree(path): # type: (str) -> None
+ """Remove the specified directory tree."""
+ try:
+ shutil.rmtree(to_bytes(path))
+ except OSError as ex:
+ if ex.errno != errno.ENOENT:
+ raise
+
+
+def make_dirs(path): # type: (str) -> None
+ """Create a directory at path, including any necessary parent directories."""
+ try:
+ os.makedirs(to_bytes(path))
+ except OSError as ex:
+ if ex.errno != errno.EEXIST:
+ raise
+
+
+def open_binary_file(path, mode='rb'): # type: (str, str) -> t.BinaryIO
+ """Open the given path for binary access."""
+ if 'b' not in mode:
+ raise Exception('mode must include "b" for binary files: %s' % mode)
+
+ # noinspection PyTypeChecker
+ return io.open(to_bytes(path), mode) # pylint: disable=consider-using-with
+
+
+def to_optional_bytes(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[bytes]
+ """Return the given value as bytes encoded using UTF-8 if not already bytes, or None if the value is None."""
+ return None if value is None else to_bytes(value, errors)
+
+
+def to_optional_text(value, errors='strict'): # type: (t.Optional[t.AnyStr], str) -> t.Optional[t.Text]
+ """Return the given value as text decoded using UTF-8 if not already text, or None if the value is None."""
+ return None if value is None else to_text(value, errors)
+
+
+def to_bytes(value, errors='strict'): # type: (t.AnyStr, str) -> bytes
+ """Return the given value as bytes encoded using UTF-8 if not already bytes."""
+ if isinstance(value, bytes):
+ return value
+
+ if isinstance(value, Text):
+ return value.encode(ENCODING, errors)
+
+ raise Exception('value is not bytes or text: %s' % type(value))
+
+
+def to_text(value, errors='strict'): # type: (t.AnyStr, str) -> t.Text
+ """Return the given value as text decoded using UTF-8 if not already text."""
+ if isinstance(value, bytes):
+ return value.decode(ENCODING, errors)
+
+ if isinstance(value, Text):
+ return value
+
+ raise Exception('value is not bytes or text: %s' % type(value))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/lib/ansible_test/_util/target/setup/ssh-keys.sh b/test/lib/ansible_test/_util/target/setup/ssh-keys.sh
deleted file mode 100644
index 7846f3fef0..0000000000
--- a/test/lib/ansible_test/_util/target/setup/ssh-keys.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-# Configure SSH keys.
-
-ssh_public_key=#{ssh_public_key}
-ssh_private_key=#{ssh_private_key}
-ssh_key_type=#{ssh_key_type}
-
-ssh_path="${HOME}/.ssh"
-private_key_path="${ssh_path}/id_${ssh_key_type}"
-
-if [ ! -f "${private_key_path}" ]; then
- # write public/private ssh key pair
- public_key_path="${private_key_path}.pub"
-
- # shellcheck disable=SC2174
- mkdir -m 0700 -p "${ssh_path}"
- touch "${public_key_path}" "${private_key_path}"
- chmod 0600 "${public_key_path}" "${private_key_path}"
- echo "${ssh_public_key}" > "${public_key_path}"
- echo "${ssh_private_key}" > "${private_key_path}"
-
- # add public key to authorized_keys
- authoried_keys_path="${HOME}/.ssh/authorized_keys"
-
- # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login)
- cat "${public_key_path}" > "${authoried_keys_path}"
- chmod 0600 "${authoried_keys_path}"
-
- # add localhost's server keys to known_hosts
- known_hosts_path="${HOME}/.ssh/known_hosts"
-
- for key in /etc/ssh/ssh_host_*_key.pub; do
- echo "localhost $(cat "${key}")" >> "${known_hosts_path}"
- done
-fi
diff --git a/test/sanity/code-smell/ansible-requirements.json b/test/sanity/code-smell/ansible-requirements.json
index 4bc356be18..b4b7f2b164 100644
--- a/test/sanity/code-smell/ansible-requirements.json
+++ b/test/sanity/code-smell/ansible-requirements.json
@@ -1,7 +1,7 @@
{
"prefixes": [
"requirements.txt",
- "test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt"
+ "test/lib/ansible_test/_data/requirements/ansible.txt"
],
"output": "path-line-column-message"
}
diff --git a/test/sanity/code-smell/ansible-requirements.py b/test/sanity/code-smell/ansible-requirements.py
index c270b32d52..48ecbaafdb 100755..100644
--- a/test/sanity/code-smell/ansible-requirements.py
+++ b/test/sanity/code-smell/ansible-requirements.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -17,7 +16,7 @@ def read_file(path):
def main():
ORIGINAL_FILE = 'requirements.txt'
- VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt'
+ VENDORED_COPY = 'test/lib/ansible_test/_data/requirements/ansible.txt'
original_requirements = read_file(ORIGINAL_FILE)
vendored_requirements = read_file(VENDORED_COPY)
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.json b/test/sanity/code-smell/ansible-test-future-boilerplate.json
new file mode 100644
index 0000000000..e689ba5da1
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.json
@@ -0,0 +1,9 @@
+{
+ "extensions": [
+ ".py"
+ ],
+ "prefixes": [
+ "test/lib/ansible_test/_internal/"
+ ],
+ "output": "path-message"
+}
diff --git a/test/sanity/code-smell/ansible-test-future-boilerplate.py b/test/sanity/code-smell/ansible-test-future-boilerplate.py
new file mode 100644
index 0000000000..55092a73a3
--- /dev/null
+++ b/test/sanity/code-smell/ansible-test-future-boilerplate.py
@@ -0,0 +1,44 @@
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ast
+import sys
+
+
+def main():
+ for path in sys.argv[1:] or sys.stdin.read().splitlines():
+ with open(path, 'rb') as path_fd:
+ lines = path_fd.read().splitlines()
+
+ missing = True
+ if not lines:
+ # Files are allowed to be empty of everything including boilerplate
+ missing = False
+
+ for text in lines:
+ if text == b'from __future__ import annotations':
+ missing = False
+ break
+
+ if missing:
+ with open(path) as file:
+ contents = file.read()
+
+ # noinspection PyBroadException
+ try:
+ node = ast.parse(contents)
+
+ # files consisting of only assignments have no need for future import boilerplate
+ # the only exception would be division during assignment, but we'll overlook that for simplicity
+ # the most likely case is that of a documentation only python file
+ if all(isinstance(statement, ast.Assign) for statement in node.body):
+ missing = False
+ except Exception: # pylint: disable=broad-except
+ pass # the compile sanity test will report this error
+
+ if missing:
+ print('%s: missing: from __future__ import annotations' % path)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test/sanity/code-smell/botmeta.py b/test/sanity/code-smell/botmeta.py
index a1ab926e8f..0ab5bf97ed 100755..100644
--- a/test/sanity/code-smell/botmeta.py
+++ b/test/sanity/code-smell/botmeta.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Make sure the data in BOTMETA.yml is valid"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/botmeta.requirements.txt b/test/sanity/code-smell/botmeta.requirements.txt
index d0ed4f2d5b..1281a04528 100644
--- a/test/sanity/code-smell/botmeta.requirements.txt
+++ b/test/sanity/code-smell/botmeta.requirements.txt
@@ -1,2 +1,2 @@
-pyyaml
+pyyaml == 5.4.1
voluptuous == 0.12.1
diff --git a/test/sanity/code-smell/configure-remoting-ps1.py b/test/sanity/code-smell/configure-remoting-ps1.py
index 159004c06b..bd2161067f 100755..100644
--- a/test/sanity/code-smell/configure-remoting-ps1.py
+++ b/test/sanity/code-smell/configure-remoting-ps1.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/deprecated-config.py b/test/sanity/code-smell/deprecated-config.py
index e8a2d8d418..53cb2b93f2 100755..100644
--- a/test/sanity/code-smell/deprecated-config.py
+++ b/test/sanity/code-smell/deprecated-config.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2018, Matt Martz <matt@sivel.net>
#
diff --git a/test/sanity/code-smell/deprecated-config.requirements.txt b/test/sanity/code-smell/deprecated-config.requirements.txt
index 859c4ee7e6..a3a33e6c20 100644
--- a/test/sanity/code-smell/deprecated-config.requirements.txt
+++ b/test/sanity/code-smell/deprecated-config.requirements.txt
@@ -1,2 +1,5 @@
-jinja2 # ansible-core requirement
-pyyaml
+jinja2 == 3.0.1 # ansible-core requirement
+pyyaml == 5.4.1
+
+# dependencies
+MarkupSafe == 2.0.1
diff --git a/test/sanity/code-smell/docs-build.json b/test/sanity/code-smell/docs-build.json
index 0218bfc5cf..a43fa923b2 100644
--- a/test/sanity/code-smell/docs-build.json
+++ b/test/sanity/code-smell/docs-build.json
@@ -1,5 +1,4 @@
{
- "intercept": true,
"disabled": true,
"no_targets": true,
"output": "path-line-column-message"
diff --git a/test/sanity/code-smell/docs-build.py b/test/sanity/code-smell/docs-build.py
index 80eca15f7e..ff7d427a05 100755..100644
--- a/test/sanity/code-smell/docs-build.py
+++ b/test/sanity/code-smell/docs-build.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/docs-build.requirements.txt b/test/sanity/code-smell/docs-build.requirements.txt
index 2e143b037b..36fc363a0e 100644
--- a/test/sanity/code-smell/docs-build.requirements.txt
+++ b/test/sanity/code-smell/docs-build.requirements.txt
@@ -1,8 +1,50 @@
-jinja2
-pyyaml
-resolvelib
-sphinx
-sphinx-notfound-page
-sphinx_ansible_theme
-straight.plugin
+jinja2 == 3.0.1
+pyyaml == 5.4.1
+resolvelib == 0.5.4
+sphinx == 2.1.2
+sphinx-notfound-page == 0.7.1
+sphinx-ansible-theme == 0.8.0
+straight.plugin == 1.5.0
antsibull == 0.26.0
+
+# dependencies
+MarkupSafe == 2.0.1
+aiofiles == 0.7.0
+aiohttp == 3.7.4.post0
+alabaster == 0.7.12
+ansible-pygments == 0.1.0
+antsibull-changelog == 0.9.0
+async-timeout == 3.0.1
+asyncio-pool == 0.5.2
+attrs == 21.2.0
+babel == 2.9.1
+certifi == 2021.5.30
+chardet == 4.0.0
+charset-normalizer == 2.0.5
+docutils == 0.17.1
+idna == 2.5
+imagesize == 1.2.0
+multidict == 5.1.0
+packaging == 21.0
+perky == 0.5.5
+pydantic == 1.8.2
+pygments == 2.10.0
+pyparsing == 2.4.7
+pytz == 2021.1
+requests == 2.26.0
+rstcheck == 3.3.1
+semantic-version == 2.8.5
+sh == 1.14.2
+six == 1.16.0
+snowballstemmer == 2.1.0
+sphinx-rtd-theme == 1.0.0
+sphinxcontrib-applehelp == 1.0.2
+sphinxcontrib-devhelp == 1.0.2
+sphinxcontrib-htmlhelp == 2.0.0
+sphinxcontrib-jsmath == 1.0.1
+sphinxcontrib-qthelp == 1.0.3
+sphinxcontrib-serializinghtml == 1.1.5
+twiggy == 0.5.1
+typing-extensions == 3.10.0.2
+urllib3 == 1.26.6
+yarl == 1.6.3
diff --git a/test/sanity/code-smell/no-unwanted-files.py b/test/sanity/code-smell/no-unwanted-files.py
index bff0915277..1b55c23e6e 100755..100644
--- a/test/sanity/code-smell/no-unwanted-files.py
+++ b/test/sanity/code-smell/no-unwanted-files.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Prevent unwanted files from being added to the source tree."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/obsolete-files.py b/test/sanity/code-smell/obsolete-files.py
index e9ddc8a5de..1fd980271c 100755..100644
--- a/test/sanity/code-smell/obsolete-files.py
+++ b/test/sanity/code-smell/obsolete-files.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Prevent files from being added to directories that are now obsolete."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/package-data.json b/test/sanity/code-smell/package-data.json
index 2b8a5326a8..0aa70a3c9b 100644
--- a/test/sanity/code-smell/package-data.json
+++ b/test/sanity/code-smell/package-data.json
@@ -1,5 +1,4 @@
{
- "intercept": true,
"disabled": true,
"all_targets": true,
"output": "path-message"
diff --git a/test/sanity/code-smell/package-data.py b/test/sanity/code-smell/package-data.py
index d655b923d3..921cb1971f 100755..100644
--- a/test/sanity/code-smell/package-data.py
+++ b/test/sanity/code-smell/package-data.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -33,6 +32,11 @@ def assemble_files_to_ship(complete_file_list):
'test/utils/*',
'test/utils/*/*',
'test/utils/*/*/*',
+ 'test/results/.tmp/*',
+ 'test/results/.tmp/*/*',
+ 'test/results/.tmp/*/*/*',
+ 'test/results/.tmp/*/*/*/*',
+ 'test/results/.tmp/*/*/*/*/*',
'.git*',
)
ignore_files = frozenset((
diff --git a/test/sanity/code-smell/package-data.requirements.txt b/test/sanity/code-smell/package-data.requirements.txt
index 41b3b5772a..8055b3c5c7 100644
--- a/test/sanity/code-smell/package-data.requirements.txt
+++ b/test/sanity/code-smell/package-data.requirements.txt
@@ -1,9 +1,13 @@
-docutils
-jinja2
-packaging
-pyyaml # ansible-core requirement
-resolvelib # ansible-core requirement
-rstcheck
-setuptools
-straight.plugin
+docutils == 0.17.1
+jinja2 == 3.0.1
+packaging == 21.0
+pyyaml == 5.4.1 # ansible-core requirement
+resolvelib == 0.5.4 # ansible-core requirement
+rstcheck == 3.3.1
+straight.plugin == 1.5.0
antsibull-changelog == 0.9.0
+
+# dependencies
+MarkupSafe == 2.0.1
+pyparsing == 2.4.7
+semantic-version == 2.8.5
diff --git a/test/sanity/code-smell/release-names.py b/test/sanity/code-smell/release-names.py
index f8003320a5..4e14506202 100755..100644
--- a/test/sanity/code-smell/release-names.py
+++ b/test/sanity/code-smell/release-names.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2019, Ansible Project
#
diff --git a/test/sanity/code-smell/release-names.requirements.txt b/test/sanity/code-smell/release-names.requirements.txt
index c3726e8bfe..cc530e42c2 100644
--- a/test/sanity/code-smell/release-names.requirements.txt
+++ b/test/sanity/code-smell/release-names.requirements.txt
@@ -1 +1 @@
-pyyaml
+pyyaml == 5.4.1
diff --git a/test/sanity/code-smell/required-and-default-attributes.py b/test/sanity/code-smell/required-and-default-attributes.py
index 5ef410bd04..d71ddeeb29 100755..100644
--- a/test/sanity/code-smell/required-and-default-attributes.py
+++ b/test/sanity/code-smell/required-and-default-attributes.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/rstcheck.py b/test/sanity/code-smell/rstcheck.py
index 885659c3e4..7f7028469f 100755..100644
--- a/test/sanity/code-smell/rstcheck.py
+++ b/test/sanity/code-smell/rstcheck.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
"""Sanity test using rstcheck and sphinx."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/test/sanity/code-smell/rstcheck.requirements.txt b/test/sanity/code-smell/rstcheck.requirements.txt
index 4674b8dffe..071bc5a1f1 100644
--- a/test/sanity/code-smell/rstcheck.requirements.txt
+++ b/test/sanity/code-smell/rstcheck.requirements.txt
@@ -1,2 +1,27 @@
-rstcheck
-sphinx # required for full functionality
+rstcheck == 3.3.1
+sphinx == 2.1.2 # required for full functionality
+
+# dependencies
+Jinja2 == 3.0.1
+MarkupSafe == 2.0.1
+Pygments == 2.10.0
+alabaster == 0.7.12
+babel == 2.9.1
+certifi == 2021.5.30
+charset-normalizer == 2.0.5
+docutils == 0.17.1
+idna == 2.5
+imagesize == 1.2.0
+packaging == 21.0
+pyparsing == 2.4.7
+pytz == 2021.1
+requests == 2.26.0
+rstcheck == 3.3.1
+snowballstemmer == 2.1.0
+sphinxcontrib-applehelp == 1.0.2
+sphinxcontrib-devhelp == 1.0.2
+sphinxcontrib-htmlhelp == 2.0.0
+sphinxcontrib-jsmath == 1.0.1
+sphinxcontrib-qthelp == 1.0.3
+sphinxcontrib-serializinghtml == 1.1.5
+urllib3 == 1.26.6
diff --git a/test/sanity/code-smell/test-constraints.py b/test/sanity/code-smell/test-constraints.py
index 21dea5fab2..8383235e15 100755..100644
--- a/test/sanity/code-smell/test-constraints.py
+++ b/test/sanity/code-smell/test-constraints.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -12,11 +11,8 @@ def main():
requirements = {}
for path in sys.argv[1:] or sys.stdin.read().splitlines():
- if path == 'test/lib/ansible_test/_data/requirements/sanity.import-plugins.txt':
- # This file is an exact copy of requirements.txt that is used in the import
- # sanity test. There is a code-smell test which ensures that the two files
- # are identical, and it is only used inside an empty venv, so we can ignore
- # it here.
+ if path == 'test/lib/ansible_test/_data/requirements/ansible.txt':
+ # This file is an exact copy of the ansible requirements.txt and should not conflict with other constraints.
continue
with open(path, 'r') as path_fd:
requirements[path] = parse_requirements(path_fd.read().splitlines())
@@ -38,7 +34,7 @@ def main():
comment = requirement.group('comment')
is_sanity = path.startswith('test/lib/ansible_test/_data/requirements/sanity.') or path.startswith('test/sanity/code-smell/')
- is_pinned = re.search('^ *== *[0-9.]+$', constraints)
+ is_pinned = re.search('^ *== *[0-9.]+(\\.post[0-9]+)?$', constraints)
is_constraints = path == constraints_path
if is_sanity:
@@ -63,11 +59,6 @@ def main():
print('%s:%d:%d: put the constraint (%s%s) in `%s`' % (path, lineno, 1, name, raw_constraints, constraints_path))
for name, requirements in frozen_sanity.items():
- for req in requirements:
- if name in non_sanity_requirements and req[3].group('constraints').strip():
- print('%s:%d:%d: sanity constraint (%s) for package `%s` is not allowed because `%s` is used outside sanity tests' % (
- req[0], req[1], req[3].start('constraints') + 1, req[3].group('constraints'), name, name))
-
if len(set(req[3].group('constraints').strip() for req in requirements)) != 1:
for req in requirements:
print('%s:%d:%d: sanity constraint (%s) does not match others for package `%s`' % (
diff --git a/test/sanity/code-smell/update-bundled.py b/test/sanity/code-smell/update-bundled.py
index 85ba8532f1..009f801bfd 100755..100644
--- a/test/sanity/code-smell/update-bundled.py
+++ b/test/sanity/code-smell/update-bundled.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2018, Ansible Project
#
diff --git a/test/sanity/code-smell/update-bundled.requirements.txt b/test/sanity/code-smell/update-bundled.requirements.txt
index 748809f75c..101e3fdb55 100644
--- a/test/sanity/code-smell/update-bundled.requirements.txt
+++ b/test/sanity/code-smell/update-bundled.requirements.txt
@@ -1 +1,4 @@
-packaging
+packaging == 21.0
+
+# dependencies
+pyparsing == 2.4.7
diff --git a/test/sanity/ignore.txt b/test/sanity/ignore.txt
index 1b3fe15ff4..76baca0745 100644
--- a/test/sanity/ignore.txt
+++ b/test/sanity/ignore.txt
@@ -182,8 +182,7 @@ test/integration/targets/win_script/files/test_script_removes_file.ps1 pslint:PS
test/integration/targets/win_script/files/test_script_with_args.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/win_script/files/test_script_with_splatting.ps1 pslint:PSAvoidUsingWriteHost # Keep
test/integration/targets/windows-minimal/library/win_ping_syntax_error.ps1 pslint!skip
-test/lib/ansible_test/_data/requirements/integration.cloud.azure.txt test-constraints
-test/lib/ansible_test/_data/requirements/sanity.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
+test/lib/ansible_test/_data/requirements/sanity.pslint.ps1 pslint:PSCustomUseLiteralPath # Uses wildcards on purpose
test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 pslint:PSCustomUseLiteralPath
test/support/integration/plugins/inventory/aws_ec2.py pylint:use-a-generator
test/support/integration/plugins/module_utils/network/common/utils.py pylint:use-a-generator
diff --git a/test/units/requirements.txt b/test/units/requirements.txt
index 226ebee066..9ed7268fab 100644
--- a/test/units/requirements.txt
+++ b/test/units/requirements.txt
@@ -1,5 +1,6 @@
-passlib
-pywinrm
+bcrypt ; python_version >= '3.8' # controller only
+passlib ; python_version >= '3.8' # controller only
+pexpect ; python_version >= '3.8' # controller only
pytz
+pywinrm ; python_version >= '3.8' # controller only
unittest2 ; python_version < '2.7'
-pexpect
diff --git a/test/utils/shippable/incidental/network.sh b/test/utils/shippable/incidental/network.sh
index 4575476a92..1c489f9e31 100755
--- a/test/utils/shippable/incidental/network.sh
+++ b/test/utils/shippable/incidental/network.sh
@@ -17,7 +17,7 @@ provider="${P:-default}"
# python versions to test in order
# all versions run full tests
IFS=' ' read -r -a python_versions <<< \
- "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import util; print(" ".join(util.CONTROLLER_PYTHON_VERSIONS))')"
+ "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import constants; print(" ".join(constants.CONTROLLER_PYTHON_VERSIONS))')"
if [ "${python_version}" ]; then
# limit tests to a single python version
diff --git a/test/utils/shippable/incidental/windows.sh b/test/utils/shippable/incidental/windows.sh
index ed9645e1cf..0ca11bfac6 100755
--- a/test/utils/shippable/incidental/windows.sh
+++ b/test/utils/shippable/incidental/windows.sh
@@ -14,10 +14,10 @@ provider="${P:-default}"
# python versions to test in order
IFS=' ' read -r -a python_versions <<< \
- "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import util; print(" ".join(util.CONTROLLER_PYTHON_VERSIONS))')"
+ "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import constants; print(" ".join(constants.CONTROLLER_PYTHON_VERSIONS))')"
# python version to run full tests on while other versions run minimal tests
-python_default="$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import util; print(util.CONTROLLER_MIN_PYTHON_VERSION)')"
+python_default="$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import constants; print(constants.CONTROLLER_MIN_PYTHON_VERSION)')"
# version to test when only testing a single version
single_version=2012-R2
diff --git a/test/utils/shippable/sanity.sh b/test/utils/shippable/sanity.sh
index d58924b4aa..6dc4d1da9a 100755
--- a/test/utils/shippable/sanity.sh
+++ b/test/utils/shippable/sanity.sh
@@ -23,5 +23,5 @@ esac
# shellcheck disable=SC2086
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
- --docker --docker-keep-git --base-branch "${base_branch}" \
+ --docker --keep-git --base-branch "${base_branch}" \
"${options[@]}" --allow-disabled
diff --git a/test/utils/shippable/windows.sh b/test/utils/shippable/windows.sh
index 1e76864959..cbb9ea9348 100755
--- a/test/utils/shippable/windows.sh
+++ b/test/utils/shippable/windows.sh
@@ -15,10 +15,10 @@ provider="${P:-default}"
# python versions to test in order
IFS=' ' read -r -a python_versions <<< \
- "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import util; print(" ".join(util.CONTROLLER_PYTHON_VERSIONS))')"
+ "$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import constants; print(" ".join(constants.CONTROLLER_PYTHON_VERSIONS))')"
# python version to run full tests on while other versions run minimal tests
-python_default="$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import util; print(util.CONTROLLER_MIN_PYTHON_VERSION)')"
+python_default="$(PYTHONPATH="${PWD}/test/lib" python -c 'from ansible_test._internal import constants; print(constants.CONTROLLER_MIN_PYTHON_VERSION)')"
# version to test when only testing a single version
single_version=2012-R2