From 99a79e1969472ab4ba5c0bd0cab61927b9aa93d9 Mon Sep 17 00:00:00 2001 From: Matt Clay Date: Thu, 12 Aug 2021 15:07:06 -0700 Subject: ansible-test - Move code from _data to _util. (#75495) * Update paths to match relocated files. * Update ansible-test symlink paths. * Update path classification. * Update MANIFEST.in * Update sanity test context paths. * Update sanity ignores. * Update shebang sanity test. * Update configure-remoting-ps1 sanity test. * Update BOTMETA.yml to reflect new paths. * Update paths in collection loader comments. * Update Makefile for ansible-test. * Update docs. * Add changelog fragment. --- .../_data/cli/ansible_test_cli_stub.py | 28 - test/lib/ansible_test/_data/collection_detail.py | 95 - test/lib/ansible_test/_data/coverage_stub.ps1 | 38 - test/lib/ansible_test/_data/injector/ansible | 1 - .../lib/ansible_test/_data/injector/ansible-config | 1 - .../ansible_test/_data/injector/ansible-connection | 1 - .../ansible_test/_data/injector/ansible-console | 1 - test/lib/ansible_test/_data/injector/ansible-doc | 1 - .../lib/ansible_test/_data/injector/ansible-galaxy | 1 - .../ansible_test/_data/injector/ansible-inventory | 1 - .../ansible_test/_data/injector/ansible-playbook | 1 - test/lib/ansible_test/_data/injector/ansible-pull | 1 - test/lib/ansible_test/_data/injector/ansible-test | 1 - test/lib/ansible_test/_data/injector/ansible-vault | 1 - test/lib/ansible_test/_data/injector/importer.py | 1 - test/lib/ansible_test/_data/injector/pytest | 1 - test/lib/ansible_test/_data/injector/python.py | 80 - test/lib/ansible_test/_data/injector/virtualenv.sh | 14 - .../_data/legacy_collection_loader/__init__.py | 31 - .../legacy_collection_loader/_collection_config.py | 107 - .../legacy_collection_loader/_collection_finder.py | 1067 --------- .../legacy_collection_loader/_collection_meta.py | 37 - .../pytest/plugins/ansible_pytest_collections.py | 75 - .../pytest/plugins/ansible_pytest_coverage.py | 68 - test/lib/ansible_test/_data/quiet_pip.py | 75 - .../sanity/code-smell/action-plugin-docs.json | 13 - .../_data/sanity/code-smell/action-plugin-docs.py | 68 - .../_data/sanity/code-smell/changelog.json | 8 - .../_data/sanity/code-smell/changelog.py | 56 - .../_data/sanity/code-smell/changelog/sphinx.py | 5 - .../_data/sanity/code-smell/empty-init.json | 14 - .../_data/sanity/code-smell/empty-init.py | 16 - .../code-smell/future-import-boilerplate.json | 7 - .../sanity/code-smell/future-import-boilerplate.py | 46 - .../_data/sanity/code-smell/line-endings.json | 4 - .../_data/sanity/code-smell/line-endings.py | 18 - .../sanity/code-smell/metaclass-boilerplate.json | 7 - .../sanity/code-smell/metaclass-boilerplate.py | 44 - .../_data/sanity/code-smell/no-assert.json | 10 - .../_data/sanity/code-smell/no-assert.py | 24 - .../_data/sanity/code-smell/no-basestring.json | 7 - .../_data/sanity/code-smell/no-basestring.py | 21 - .../_data/sanity/code-smell/no-dict-iteritems.json | 7 - .../_data/sanity/code-smell/no-dict-iteritems.py | 21 - .../_data/sanity/code-smell/no-dict-iterkeys.json | 7 - .../_data/sanity/code-smell/no-dict-iterkeys.py | 21 - .../sanity/code-smell/no-dict-itervalues.json | 7 - .../_data/sanity/code-smell/no-dict-itervalues.py | 21 - .../_data/sanity/code-smell/no-get-exception.json | 7 - .../_data/sanity/code-smell/no-get-exception.py | 28 - .../sanity/code-smell/no-illegal-filenames.json | 5 - .../sanity/code-smell/no-illegal-filenames.py | 82 - .../_data/sanity/code-smell/no-main-display.json | 10 - .../_data/sanity/code-smell/no-main-display.py | 21 - .../_data/sanity/code-smell/no-smart-quotes.json | 5 - .../_data/sanity/code-smell/no-smart-quotes.py | 28 - .../sanity/code-smell/no-unicode-literals.json | 7 - .../_data/sanity/code-smell/no-unicode-literals.py | 21 - .../_data/sanity/code-smell/replace-urlopen.json | 7 - .../_data/sanity/code-smell/replace-urlopen.py | 21 - .../_data/sanity/code-smell/runtime-metadata.json | 11 - .../_data/sanity/code-smell/runtime-metadata.py | 281 --- .../_data/sanity/code-smell/shebang.json | 4 - .../_data/sanity/code-smell/shebang.py | 120 - .../_data/sanity/code-smell/symlinks.json | 5 - .../_data/sanity/code-smell/symlinks.py | 32 - .../sanity/code-smell/use-argspec-type-path.json | 10 - .../sanity/code-smell/use-argspec-type-path.py | 21 - .../_data/sanity/code-smell/use-compat-six.json | 6 - .../_data/sanity/code-smell/use-compat-six.py | 21 - .../ansible_test/_data/sanity/compile/compile.py | 47 - .../ansible_test/_data/sanity/import/importer.py | 541 ----- .../_data/sanity/import/yaml_to_json.py | 27 - .../sanity/integration-aliases/yaml_to_json.py | 15 - .../_data/sanity/pep8/current-ignore.txt | 4 - .../ansible_test/_data/sanity/pslint/pslint.ps1 | 43 - .../ansible_test/_data/sanity/pslint/settings.psd1 | 13 - .../_data/sanity/pylint/config/ansible-test.cfg | 54 - .../_data/sanity/pylint/config/collection.cfg | 143 -- .../_data/sanity/pylint/config/default.cfg | 148 -- .../_data/sanity/pylint/config/sanity.cfg | 55 - .../_data/sanity/pylint/plugins/deprecated.py | 258 --- .../_data/sanity/pylint/plugins/string_format.py | 84 - .../_data/sanity/pylint/plugins/unwanted.py | 240 -- .../_data/sanity/shellcheck/exclude.txt | 3 - .../_data/sanity/validate-modules/main.py | 8 - .../_data/sanity/validate-modules/validate-modules | 1 - .../validate-modules/validate_modules/__init__.py | 20 - .../validate-modules/validate_modules/main.py | 2425 -------------------- .../validate_modules/module_args.py | 179 -- .../validate_modules/ps_argspec.ps1 | 110 - .../validate-modules/validate_modules/schema.py | 587 ----- .../validate-modules/validate_modules/utils.py | 225 -- .../_data/sanity/yamllint/config/default.yml | 19 - .../_data/sanity/yamllint/config/modules.yml | 19 - .../_data/sanity/yamllint/config/plugins.yml | 19 - .../_data/sanity/yamllint/yamllinter.py | 274 --- .../_data/setup/ConfigureRemotingForAnsible.ps1 | 453 ---- test/lib/ansible_test/_data/setup/docker.sh | 13 - test/lib/ansible_test/_data/setup/remote.sh | 185 -- test/lib/ansible_test/_data/setup/ssh-keys.sh | 35 - test/lib/ansible_test/_data/sslcheck.py | 24 - test/lib/ansible_test/_data/versions.py | 20 - test/lib/ansible_test/_data/virtualenvcheck.py | 15 - test/lib/ansible_test/_data/yamlcheck.py | 21 - test/lib/ansible_test/_internal/ansible_util.py | 8 +- .../_internal/classification/__init__.py | 30 +- .../_internal/classification/python.py | 3 +- .../_internal/commands/coverage/combine.py | 4 +- .../_internal/commands/integration/__init__.py | 4 +- .../_internal/commands/sanity/__init__.py | 6 +- .../_internal/commands/sanity/bin_symlinks.py | 4 +- .../_internal/commands/sanity/compile.py | 4 +- .../_internal/commands/sanity/import.py | 6 +- .../_internal/commands/sanity/pylint.py | 7 +- .../_internal/commands/units/__init__.py | 5 +- test/lib/ansible_test/_internal/core_ci.py | 4 +- test/lib/ansible_test/_internal/delegation.py | 4 +- test/lib/ansible_test/_internal/executor.py | 3 +- test/lib/ansible_test/_internal/manage_ci.py | 6 +- test/lib/ansible_test/_internal/payload.py | 2 +- test/lib/ansible_test/_internal/util.py | 8 +- test/lib/ansible_test/_internal/util_common.py | 3 +- test/lib/ansible_test/_internal/venv.py | 4 +- .../_util/controller/cli/ansible_test_cli_stub.py | 28 + .../sanity/code-smell/action-plugin-docs.json | 13 + .../sanity/code-smell/action-plugin-docs.py | 68 + .../controller/sanity/code-smell/changelog.json | 8 + .../controller/sanity/code-smell/changelog.py | 56 + .../sanity/code-smell/changelog/sphinx.py | 5 + .../controller/sanity/code-smell/empty-init.json | 14 + .../controller/sanity/code-smell/empty-init.py | 16 + .../code-smell/future-import-boilerplate.json | 7 + .../sanity/code-smell/future-import-boilerplate.py | 46 + .../controller/sanity/code-smell/line-endings.json | 4 + .../controller/sanity/code-smell/line-endings.py | 18 + .../sanity/code-smell/metaclass-boilerplate.json | 7 + .../sanity/code-smell/metaclass-boilerplate.py | 44 + .../controller/sanity/code-smell/no-assert.json | 10 + .../controller/sanity/code-smell/no-assert.py | 24 + .../sanity/code-smell/no-basestring.json | 7 + .../controller/sanity/code-smell/no-basestring.py | 21 + .../sanity/code-smell/no-dict-iteritems.json | 7 + .../sanity/code-smell/no-dict-iteritems.py | 21 + .../sanity/code-smell/no-dict-iterkeys.json | 7 + .../sanity/code-smell/no-dict-iterkeys.py | 21 + .../sanity/code-smell/no-dict-itervalues.json | 7 + .../sanity/code-smell/no-dict-itervalues.py | 21 + .../sanity/code-smell/no-get-exception.json | 7 + .../sanity/code-smell/no-get-exception.py | 28 + .../sanity/code-smell/no-illegal-filenames.json | 5 + .../sanity/code-smell/no-illegal-filenames.py | 82 + .../sanity/code-smell/no-main-display.json | 10 + .../sanity/code-smell/no-main-display.py | 21 + .../sanity/code-smell/no-smart-quotes.json | 5 + .../sanity/code-smell/no-smart-quotes.py | 28 + .../sanity/code-smell/no-unicode-literals.json | 7 + .../sanity/code-smell/no-unicode-literals.py | 21 + .../sanity/code-smell/replace-urlopen.json | 7 + .../sanity/code-smell/replace-urlopen.py | 21 + .../sanity/code-smell/runtime-metadata.json | 11 + .../sanity/code-smell/runtime-metadata.py | 280 +++ .../controller/sanity/code-smell/shebang.json | 4 + .../_util/controller/sanity/code-smell/shebang.py | 122 + .../controller/sanity/code-smell/symlinks.json | 5 + .../_util/controller/sanity/code-smell/symlinks.py | 32 + .../sanity/code-smell/use-argspec-type-path.json | 10 + .../sanity/code-smell/use-argspec-type-path.py | 21 + .../sanity/code-smell/use-compat-six.json | 6 + .../controller/sanity/code-smell/use-compat-six.py | 21 + .../sanity/integration-aliases/yaml_to_json.py | 15 + .../controller/sanity/pep8/current-ignore.txt | 4 + .../_util/controller/sanity/pslint/pslint.ps1 | 43 + .../_util/controller/sanity/pslint/settings.psd1 | 13 + .../sanity/pylint/config/ansible-test.cfg | 54 + .../controller/sanity/pylint/config/collection.cfg | 143 ++ .../controller/sanity/pylint/config/default.cfg | 148 ++ .../controller/sanity/pylint/config/sanity.cfg | 55 + .../controller/sanity/pylint/plugins/deprecated.py | 258 +++ .../sanity/pylint/plugins/string_format.py | 84 + .../controller/sanity/pylint/plugins/unwanted.py | 240 ++ .../_util/controller/sanity/shellcheck/exclude.txt | 3 + .../controller/sanity/validate-modules/main.py | 8 + .../sanity/validate-modules/validate-modules | 1 + .../validate-modules/validate_modules/__init__.py | 20 + .../validate-modules/validate_modules/main.py | 2425 ++++++++++++++++++++ .../validate_modules/module_args.py | 179 ++ .../validate_modules/ps_argspec.ps1 | 110 + .../validate-modules/validate_modules/schema.py | 587 +++++ .../validate-modules/validate_modules/utils.py | 225 ++ .../controller/sanity/yamllint/config/default.yml | 19 + .../controller/sanity/yamllint/config/modules.yml | 19 + .../controller/sanity/yamllint/config/plugins.yml | 19 + .../_util/controller/sanity/yamllint/yamllinter.py | 274 +++ .../_util/controller/tools/collection_detail.py | 95 + .../_util/controller/tools/coverage_stub.ps1 | 38 + .../_util/controller/tools/quiet_pip.py | 75 + .../_util/controller/tools/sslcheck.py | 24 + .../_util/controller/tools/versions.py | 20 + .../_util/controller/tools/virtualenvcheck.py | 15 + .../_util/controller/tools/yamlcheck.py | 21 + .../lib/ansible_test/_util/target/injector/ansible | 1 + .../_util/target/injector/ansible-config | 1 + .../_util/target/injector/ansible-connection | 1 + .../_util/target/injector/ansible-console | 1 + .../ansible_test/_util/target/injector/ansible-doc | 1 + .../_util/target/injector/ansible-galaxy | 1 + .../_util/target/injector/ansible-inventory | 1 + .../_util/target/injector/ansible-playbook | 1 + .../_util/target/injector/ansible-pull | 1 + .../_util/target/injector/ansible-test | 1 + .../_util/target/injector/ansible-vault | 1 + .../ansible_test/_util/target/injector/importer.py | 1 + test/lib/ansible_test/_util/target/injector/pytest | 1 + .../ansible_test/_util/target/injector/python.py | 80 + .../_util/target/injector/virtualenv.sh | 14 + .../target/legacy_collection_loader/__init__.py | 31 + .../legacy_collection_loader/_collection_config.py | 107 + .../legacy_collection_loader/_collection_finder.py | 1067 +++++++++ .../legacy_collection_loader/_collection_meta.py | 37 + .../pytest/plugins/ansible_pytest_collections.py | 75 + .../pytest/plugins/ansible_pytest_coverage.py | 68 + .../_util/target/sanity/compile/compile.py | 47 + .../_util/target/sanity/import/importer.py | 541 +++++ .../_util/target/sanity/import/yaml_to_json.py | 27 + .../target/setup/ConfigureRemotingForAnsible.ps1 | 453 ++++ test/lib/ansible_test/_util/target/setup/docker.sh | 13 + test/lib/ansible_test/_util/target/setup/remote.sh | 185 ++ .../ansible_test/_util/target/setup/ssh-keys.sh | 35 + 229 files changed, 9323 insertions(+), 9313 deletions(-) delete mode 100755 test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py delete mode 100644 test/lib/ansible_test/_data/collection_detail.py delete mode 100644 test/lib/ansible_test/_data/coverage_stub.ps1 delete mode 120000 test/lib/ansible_test/_data/injector/ansible delete mode 120000 test/lib/ansible_test/_data/injector/ansible-config delete mode 120000 test/lib/ansible_test/_data/injector/ansible-connection delete mode 120000 test/lib/ansible_test/_data/injector/ansible-console delete mode 120000 test/lib/ansible_test/_data/injector/ansible-doc delete mode 120000 test/lib/ansible_test/_data/injector/ansible-galaxy delete mode 120000 test/lib/ansible_test/_data/injector/ansible-inventory delete mode 120000 test/lib/ansible_test/_data/injector/ansible-playbook delete mode 120000 test/lib/ansible_test/_data/injector/ansible-pull delete mode 120000 test/lib/ansible_test/_data/injector/ansible-test delete mode 120000 test/lib/ansible_test/_data/injector/ansible-vault delete mode 120000 test/lib/ansible_test/_data/injector/importer.py delete mode 120000 test/lib/ansible_test/_data/injector/pytest delete mode 100755 test/lib/ansible_test/_data/injector/python.py delete mode 100644 test/lib/ansible_test/_data/injector/virtualenv.sh delete mode 100644 test/lib/ansible_test/_data/legacy_collection_loader/__init__.py delete mode 100644 test/lib/ansible_test/_data/legacy_collection_loader/_collection_config.py delete mode 100644 test/lib/ansible_test/_data/legacy_collection_loader/_collection_finder.py delete mode 100644 test/lib/ansible_test/_data/legacy_collection_loader/_collection_meta.py delete mode 100644 test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py delete mode 100644 test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py delete mode 100644 test/lib/ansible_test/_data/quiet_pip.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/changelog.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/changelog.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/empty-init.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/empty-init.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/line-endings.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/line-endings.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-assert.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-assert.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-dict-iterkeys.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-dict-itervalues.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-get-exception.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-illegal-filenames.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/shebang.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/shebang.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/symlinks.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/symlinks.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py delete mode 100644 test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json delete mode 100755 test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py delete mode 100755 test/lib/ansible_test/_data/sanity/compile/compile.py delete mode 100755 test/lib/ansible_test/_data/sanity/import/importer.py delete mode 100644 test/lib/ansible_test/_data/sanity/import/yaml_to_json.py delete mode 100644 test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py delete mode 100644 test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt delete mode 100755 test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 delete mode 100644 test/lib/ansible_test/_data/sanity/pslint/settings.psd1 delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/config/default.cfg delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py delete mode 100644 test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py delete mode 100644 test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt delete mode 100755 test/lib/ansible_test/_data/sanity/validate-modules/main.py delete mode 120000 test/lib/ansible_test/_data/sanity/validate-modules/validate-modules delete mode 100644 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py delete mode 100644 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py delete mode 100644 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py delete mode 100755 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 delete mode 100644 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py delete mode 100644 test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py delete mode 100644 test/lib/ansible_test/_data/sanity/yamllint/config/default.yml delete mode 100644 test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml delete mode 100644 test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml delete mode 100644 test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py delete mode 100644 test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 delete mode 100644 test/lib/ansible_test/_data/setup/docker.sh delete mode 100644 test/lib/ansible_test/_data/setup/remote.sh delete mode 100644 test/lib/ansible_test/_data/setup/ssh-keys.sh delete mode 100755 test/lib/ansible_test/_data/sslcheck.py delete mode 100755 test/lib/ansible_test/_data/versions.py delete mode 100755 test/lib/ansible_test/_data/virtualenvcheck.py delete mode 100755 test/lib/ansible_test/_data/yamlcheck.py create mode 100755 test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iterkeys.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-itervalues.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-get-exception.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-illegal-filenames.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json create mode 100755 test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt create mode 100755 test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 create mode 100644 test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt create mode 100755 test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py create mode 120000 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules create mode 100644 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py create mode 100755 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 create mode 100644 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py create mode 100644 test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml create mode 100644 test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml create mode 100644 test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml create mode 100644 test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py create mode 100644 test/lib/ansible_test/_util/controller/tools/collection_detail.py create mode 100644 test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 create mode 100644 test/lib/ansible_test/_util/controller/tools/quiet_pip.py create mode 100755 test/lib/ansible_test/_util/controller/tools/sslcheck.py create mode 100755 test/lib/ansible_test/_util/controller/tools/versions.py create mode 100755 test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py create mode 100755 test/lib/ansible_test/_util/controller/tools/yamlcheck.py create mode 120000 test/lib/ansible_test/_util/target/injector/ansible create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-config create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-connection create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-console create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-doc create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-galaxy create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-inventory create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-playbook create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-pull create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-test create mode 120000 test/lib/ansible_test/_util/target/injector/ansible-vault create mode 120000 test/lib/ansible_test/_util/target/injector/importer.py create mode 120000 test/lib/ansible_test/_util/target/injector/pytest create mode 100755 test/lib/ansible_test/_util/target/injector/python.py create mode 100644 test/lib/ansible_test/_util/target/injector/virtualenv.sh create mode 100644 test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py create mode 100644 test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py create mode 100644 test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py create mode 100644 test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py create mode 100644 test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py create mode 100644 test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py create mode 100755 test/lib/ansible_test/_util/target/sanity/compile/compile.py create mode 100755 test/lib/ansible_test/_util/target/sanity/import/importer.py create mode 100644 test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py create mode 100644 test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 create mode 100644 test/lib/ansible_test/_util/target/setup/docker.sh create mode 100644 test/lib/ansible_test/_util/target/setup/remote.sh create mode 100644 test/lib/ansible_test/_util/target/setup/ssh-keys.sh (limited to 'test/lib') diff --git a/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py deleted file mode 100755 index d12b6334ef..0000000000 --- a/test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python -# PYTHON_ARGCOMPLETE_OK -"""Command line entry point for ansible-test.""" - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - - -def main(): - """Main program entry point.""" - ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - source_root = os.path.join(ansible_root, 'test', 'lib') - - if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', 'cli.py')): - # running from source, use that version of ansible-test instead of any version that may already be installed - sys.path.insert(0, source_root) - - # noinspection PyProtectedMember - from ansible_test._internal.cli import main as cli_main - - cli_main() - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/collection_detail.py b/test/lib/ansible_test/_data/collection_detail.py deleted file mode 100644 index e7c883ca01..0000000000 --- a/test/lib/ansible_test/_data/collection_detail.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Retrieve collection detail.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import os -import re -import sys - -import yaml - - -# See semantic versioning specification (https://semver.org/) -NUMERIC_IDENTIFIER = r'(?:0|[1-9][0-9]*)' -ALPHANUMERIC_IDENTIFIER = r'(?:[0-9]*[a-zA-Z-][a-zA-Z0-9-]*)' - -PRE_RELEASE_IDENTIFIER = r'(?:' + NUMERIC_IDENTIFIER + r'|' + ALPHANUMERIC_IDENTIFIER + r')' -BUILD_IDENTIFIER = r'[a-zA-Z0-9-]+' # equivalent to r'(?:[0-9]+|' + ALPHANUMERIC_IDENTIFIER + r')' - -VERSION_CORE = NUMERIC_IDENTIFIER + r'\.' + NUMERIC_IDENTIFIER + r'\.' + NUMERIC_IDENTIFIER -PRE_RELEASE = r'(?:-' + PRE_RELEASE_IDENTIFIER + r'(?:\.' + PRE_RELEASE_IDENTIFIER + r')*)?' -BUILD = r'(?:\+' + BUILD_IDENTIFIER + r'(?:\.' + BUILD_IDENTIFIER + r')*)?' - -SEMVER_REGULAR_EXPRESSION = r'^' + VERSION_CORE + PRE_RELEASE + BUILD + r'$' - - -def validate_version(version): - """Raise exception if the provided version is not None or a valid semantic version.""" - if version is None: - return - if not re.match(SEMVER_REGULAR_EXPRESSION, version): - raise Exception('Invalid version number "{0}". Collection version numbers must ' - 'follow semantic versioning (https://semver.org/).'.format(version)) - - -def read_manifest_json(collection_path): - """Return collection information from the MANIFEST.json file.""" - manifest_path = os.path.join(collection_path, 'MANIFEST.json') - - if not os.path.exists(manifest_path): - return None - - try: - with open(manifest_path) as manifest_file: - manifest = json.load(manifest_file) - - collection_info = manifest.get('collection_info') or dict() - - result = dict( - version=collection_info.get('version'), - ) - validate_version(result['version']) - except Exception as ex: # pylint: disable=broad-except - raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex)) - - return result - - -def read_galaxy_yml(collection_path): - """Return collection information from the galaxy.yml file.""" - galaxy_path = os.path.join(collection_path, 'galaxy.yml') - - if not os.path.exists(galaxy_path): - return None - - try: - with open(galaxy_path) as galaxy_file: - galaxy = yaml.safe_load(galaxy_file) - - result = dict( - version=galaxy.get('version'), - ) - validate_version(result['version']) - except Exception as ex: # pylint: disable=broad-except - raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex)) - - return result - - -def main(): - """Retrieve collection detail.""" - collection_path = sys.argv[1] - - try: - result = read_manifest_json(collection_path) or read_galaxy_yml(collection_path) or dict() - except Exception as ex: # pylint: disable=broad-except - result = dict( - error='{0}'.format(ex), - ) - - print(json.dumps(result)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/coverage_stub.ps1 b/test/lib/ansible_test/_data/coverage_stub.ps1 deleted file mode 100644 index 83c27ff73c..0000000000 --- a/test/lib/ansible_test/_data/coverage_stub.ps1 +++ /dev/null @@ -1,38 +0,0 @@ -<# -.SYNOPSIS -Gets the lines to hit from a sourcefile for coverage stubs. -#> -[CmdletBinding()] -param ( - [Parameter(Mandatory, ValueFromRemainingArguments)] - [String[]] - $Path -) - -$stubInfo = @(foreach ($sourcePath in $Path) { - # Default is to just no lines for missing files - [Collections.Generic.HashSet[int]]$lines = @() - - if (Test-Path -LiteralPath $sourcePath) { - $code = [ScriptBlock]::Create([IO.File]::ReadAllText($sourcePath)) - - # We set our breakpoints with this predicate so our stubs should match - # that logic. - $predicate = { - $args[0] -is [System.Management.Automation.Language.CommandBaseAst] - } - $cmds = $code.Ast.FindAll($predicate, $true) - - # We only care about unique lines not multiple commands on 1 line. - $lines = @(foreach ($cmd in $cmds) { - $cmd.Extent.StartLineNumber - }) - } - - [PSCustomObject]@{ - Path = $sourcePath - Lines = $lines - } -}) - -ConvertTo-Json -InputObject $stubInfo -Depth 2 -Compress diff --git a/test/lib/ansible_test/_data/injector/ansible b/test/lib/ansible_test/_data/injector/ansible deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-config b/test/lib/ansible_test/_data/injector/ansible-config deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-config +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-connection b/test/lib/ansible_test/_data/injector/ansible-connection deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-connection +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-console b/test/lib/ansible_test/_data/injector/ansible-console deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-console +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-doc b/test/lib/ansible_test/_data/injector/ansible-doc deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-doc +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-galaxy b/test/lib/ansible_test/_data/injector/ansible-galaxy deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-galaxy +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-inventory b/test/lib/ansible_test/_data/injector/ansible-inventory deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-inventory +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-playbook b/test/lib/ansible_test/_data/injector/ansible-playbook deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-playbook +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-pull b/test/lib/ansible_test/_data/injector/ansible-pull deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-pull +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-test b/test/lib/ansible_test/_data/injector/ansible-test deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-test +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/ansible-vault b/test/lib/ansible_test/_data/injector/ansible-vault deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/ansible-vault +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/importer.py b/test/lib/ansible_test/_data/injector/importer.py deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/importer.py +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/pytest b/test/lib/ansible_test/_data/injector/pytest deleted file mode 120000 index 6bbbfe4d91..0000000000 --- a/test/lib/ansible_test/_data/injector/pytest +++ /dev/null @@ -1 +0,0 @@ -python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/injector/python.py b/test/lib/ansible_test/_data/injector/python.py deleted file mode 100755 index 1063d1f011..0000000000 --- a/test/lib/ansible_test/_data/injector/python.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python -"""Provides an entry point for python scripts and python modules on the controller with the current python interpreter and optional code coverage collection.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - - -def main(): - """Main entry point.""" - name = os.path.basename(__file__) - args = [sys.executable] - - coverage_config = os.environ.get('COVERAGE_CONF') - coverage_output = os.environ.get('COVERAGE_FILE') - - if coverage_config: - if coverage_output: - args += ['-m', 'coverage.__main__', 'run', '--rcfile', coverage_config] - else: - if sys.version_info >= (3, 4): - # noinspection PyUnresolvedReferences - import importlib.util - - # noinspection PyUnresolvedReferences - found = bool(importlib.util.find_spec('coverage')) - else: - # noinspection PyDeprecation - import imp # pylint: disable=deprecated-module - - try: - # noinspection PyDeprecation - imp.find_module('coverage') - found = True - except ImportError: - found = False - - if not found: - sys.exit('ERROR: Could not find `coverage` module. ' - 'Did you use a virtualenv created without --system-site-packages or with the wrong interpreter?') - - if name == 'python.py': - if sys.argv[1] == '-c': - # prevent simple misuse of python.py with -c which does not work with coverage - sys.exit('ERROR: Use `python -c` instead of `python.py -c` to avoid errors when code coverage is collected.') - elif name == 'pytest': - args += ['-m', 'pytest'] - else: - args += [find_executable(name)] - - args += sys.argv[1:] - - os.execv(args[0], args) - - -def find_executable(name): - """ - :type name: str - :rtype: str - """ - path = os.environ.get('PATH', os.path.defpath) - seen = set([os.path.abspath(__file__)]) - - for base in path.split(os.path.pathsep): - candidate = os.path.abspath(os.path.join(base, name)) - - if candidate in seen: - continue - - seen.add(candidate) - - if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK): - return candidate - - raise Exception('Executable "%s" not found in path: %s' % (name, path)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/injector/virtualenv.sh b/test/lib/ansible_test/_data/injector/virtualenv.sh deleted file mode 100644 index cb19a7ce47..0000000000 --- a/test/lib/ansible_test/_data/injector/virtualenv.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash -# Create and activate a fresh virtual environment with `source virtualenv.sh`. - -rm -rf "${OUTPUT_DIR}/venv" - -# Try to use 'venv' if it is available, then fallback to 'virtualenv' since some systems provide 'venv' although it is non-functional. -if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || ! "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m venv --system-site-packages "${OUTPUT_DIR}/venv" > /dev/null 2>&1; then - rm -rf "${OUTPUT_DIR}/venv" - "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m virtualenv --system-site-packages --python "${ANSIBLE_TEST_PYTHON_INTERPRETER}" "${OUTPUT_DIR}/venv" -fi - -set +ux -source "${OUTPUT_DIR}/venv/bin/activate" -set -ux diff --git a/test/lib/ansible_test/_data/legacy_collection_loader/__init__.py b/test/lib/ansible_test/_data/legacy_collection_loader/__init__.py deleted file mode 100644 index b63c239d20..0000000000 --- a/test/lib/ansible_test/_data/legacy_collection_loader/__init__.py +++ /dev/null @@ -1,31 +0,0 @@ -# (c) 2019 Ansible Project -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -# CAUTION: There are two implementations of the collection loader. -# They must be kept functionally identical, although their implementations may differ. -# -# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. -# It must function on all Python versions supported on the controller. -# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_data/legacy_collection_loader/" directory. -# It must function on all Python versions supported on managed hosts which are not supported by the controller. - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class? -from ._collection_config import AnsibleCollectionConfig -from ._collection_finder import AnsibleCollectionRef -from ansible.module_utils.common.text.converters import to_text - - -def resource_from_fqcr(ref): - """ - Return resource from a fully-qualified collection reference, - or from a simple resource name. - For fully-qualified collection references, this is equivalent to - ``AnsibleCollectionRef.from_fqcr(ref).resource``. - :param ref: collection reference to parse - :return: the resource as a unicode string - """ - ref = to_text(ref, errors='strict') - return ref.split(u'.')[-1] diff --git a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_config.py b/test/lib/ansible_test/_data/legacy_collection_loader/_collection_config.py deleted file mode 100644 index c0680942b4..0000000000 --- a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_config.py +++ /dev/null @@ -1,107 +0,0 @@ -# (c) 2019 Ansible Project -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -# CAUTION: There are two implementations of the collection loader. -# They must be kept functionally identical, although their implementations may differ. -# -# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. -# It must function on all Python versions supported on the controller. -# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_data/legacy_collection_loader/" directory. -# It must function on all Python versions supported on managed hosts which are not supported by the controller. - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from ansible.module_utils.common.text.converters import to_text -from ansible.module_utils.six import with_metaclass - - -class _EventSource: - def __init__(self): - self._handlers = set() - - def __iadd__(self, handler): - if not callable(handler): - raise ValueError('handler must be callable') - self._handlers.add(handler) - return self - - def __isub__(self, handler): - try: - self._handlers.remove(handler) - except KeyError: - pass - - return self - - def _on_exception(self, handler, exc, *args, **kwargs): - # if we return True, we want the caller to re-raise - return True - - def fire(self, *args, **kwargs): - for h in self._handlers: - try: - h(*args, **kwargs) - except Exception as ex: - if self._on_exception(h, ex, *args, **kwargs): - raise - - -class _AnsibleCollectionConfig(type): - def __init__(cls, meta, name, bases): - cls._collection_finder = None - cls._default_collection = None - cls._on_collection_load = _EventSource() - - @property - def collection_finder(cls): - return cls._collection_finder - - @collection_finder.setter - def collection_finder(cls, value): - if cls._collection_finder: - raise ValueError('an AnsibleCollectionFinder has already been configured') - - cls._collection_finder = value - - @property - def collection_paths(cls): - cls._require_finder() - return [to_text(p) for p in cls._collection_finder._n_collection_paths] - - @property - def default_collection(cls): - return cls._default_collection - - @default_collection.setter - def default_collection(cls, value): - - cls._default_collection = value - - @property - def on_collection_load(cls): - return cls._on_collection_load - - @on_collection_load.setter - def on_collection_load(cls, value): - if value is not cls._on_collection_load: - raise ValueError('on_collection_load is not directly settable (use +=)') - - @property - def playbook_paths(cls): - cls._require_finder() - return [to_text(p) for p in cls._collection_finder._n_playbook_paths] - - @playbook_paths.setter - def playbook_paths(cls, value): - cls._require_finder() - cls._collection_finder.set_playbook_paths(value) - - def _require_finder(cls): - if not cls._collection_finder: - raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process') - - -# concrete class of our metaclass type that defines the class properties we want -class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)): - pass diff --git a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_finder.py b/test/lib/ansible_test/_data/legacy_collection_loader/_collection_finder.py deleted file mode 100644 index 8c1cc57d9c..0000000000 --- a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_finder.py +++ /dev/null @@ -1,1067 +0,0 @@ -# (c) 2019 Ansible Project -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -# CAUTION: There are two implementations of the collection loader. -# They must be kept functionally identical, although their implementations may differ. -# -# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. -# It must function on all Python versions supported on the controller. -# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_data/legacy_collection_loader/" directory. -# It must function on all Python versions supported on managed hosts which are not supported by the controller. - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import os.path -import pkgutil -import re -import sys -from keyword import iskeyword -from tokenize import Name as _VALID_IDENTIFIER_REGEX - - -# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity) -# that only allow stdlib and module_utils -from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes -from ansible.module_utils.six import string_types, PY3 -from ._collection_config import AnsibleCollectionConfig - -from contextlib import contextmanager -from types import ModuleType - -try: - from importlib import import_module -except ImportError: - def import_module(name): - __import__(name) - return sys.modules[name] - -try: - from importlib import reload as reload_module -except ImportError: - # 2.7 has a global reload function instead... - reload_module = reload # pylint:disable=undefined-variable - -# NB: this supports import sanity test providing a different impl -try: - from ._collection_meta import _meta_yml_to_dict -except ImportError: - _meta_yml_to_dict = None - - -if not hasattr(__builtins__, 'ModuleNotFoundError'): - # this was introduced in Python 3.6 - ModuleNotFoundError = ImportError - - -_VALID_IDENTIFIER_STRING_REGEX = re.compile( - ''.join((_VALID_IDENTIFIER_REGEX, r'\Z')), -) - - -try: # NOTE: py3/py2 compat - # py2 mypy can't deal with try/excepts - is_python_identifier = str.isidentifier # type: ignore[attr-defined] -except AttributeError: # Python 2 - def is_python_identifier(tested_str): # type: (str) -> bool - """Determine whether the given string is a Python identifier.""" - # Ref: https://stackoverflow.com/a/55802320/595220 - return bool(re.match(_VALID_IDENTIFIER_STRING_REGEX, tested_str)) - - -PB_EXTENSIONS = ('.yml', '.yaml') - - -class _AnsibleCollectionFinder: - def __init__(self, paths=None, scan_sys_paths=True): - # TODO: accept metadata loader override - self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__))) - - if isinstance(paths, string_types): - paths = [paths] - elif paths is None: - paths = [] - - # expand any placeholders in configured paths - paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths] - - # add syspaths if needed - if scan_sys_paths: - paths.extend(sys.path) - - good_paths = [] - # expand any placeholders in configured paths - for p in paths: - - # ensure we always have ansible_collections - if os.path.basename(p) == 'ansible_collections': - p = os.path.dirname(p) - - if p not in good_paths and os.path.isdir(to_bytes(os.path.join(p, 'ansible_collections'), errors='surrogate_or_strict')): - good_paths.append(p) - - self._n_configured_paths = good_paths - self._n_cached_collection_paths = None - self._n_cached_collection_qualified_paths = None - - self._n_playbook_paths = [] - - @classmethod - def _remove(cls): - for mps in sys.meta_path: - if isinstance(mps, _AnsibleCollectionFinder): - sys.meta_path.remove(mps) - - # remove any path hooks that look like ours - for ph in sys.path_hooks: - if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder): - sys.path_hooks.remove(ph) - - # zap any cached path importer cache entries that might refer to us - sys.path_importer_cache.clear() - - AnsibleCollectionConfig._collection_finder = None - - # validate via the public property that we really killed it - if AnsibleCollectionConfig.collection_finder is not None: - raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder') - - def _install(self): - self._remove() - sys.meta_path.insert(0, self) - - sys.path_hooks.insert(0, self._ansible_collection_path_hook) - - AnsibleCollectionConfig.collection_finder = self - - def _ansible_collection_path_hook(self, path): - path = to_native(path) - interesting_paths = self._n_cached_collection_qualified_paths - if not interesting_paths: - interesting_paths = [] - for p in self._n_collection_paths: - if os.path.basename(p) != 'ansible_collections': - p = os.path.join(p, 'ansible_collections') - - if p not in interesting_paths: - interesting_paths.append(p) - - interesting_paths.insert(0, self._ansible_pkg_path) - self._n_cached_collection_qualified_paths = interesting_paths - - if any(path.startswith(p) for p in interesting_paths): - return _AnsiblePathHookFinder(self, path) - - raise ImportError('not interested') - - @property - def _n_collection_paths(self): - paths = self._n_cached_collection_paths - if not paths: - self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths - return paths - - def set_playbook_paths(self, playbook_paths): - if isinstance(playbook_paths, string_types): - playbook_paths = [playbook_paths] - - # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins) - added_paths = set() - - # de-dupe - self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))] - self._n_cached_collection_paths = None - # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up. - # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init - # to prevent this from occurring - for pkg in ['ansible_collections', 'ansible_collections.ansible']: - self._reload_hack(pkg) - - def _reload_hack(self, fullname): - m = sys.modules.get(fullname) - if not m: - return - reload_module(m) - - def find_module(self, fullname, path=None): - # Figure out what's being asked for, and delegate to a special-purpose loader - - split_name = fullname.split('.') - toplevel_pkg = split_name[0] - module_to_find = split_name[-1] - part_count = len(split_name) - - if toplevel_pkg not in ['ansible', 'ansible_collections']: - # not interested in anything other than ansible_collections (and limited cases under ansible) - return None - - # sanity check what we're getting from import, canonicalize path values - if part_count == 1: - if path: - raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname)) - else: - # seed the path to the configured collection roots - path = self._n_collection_paths - - if part_count > 1 and path is None: - raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname)) - - # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found - try: - if toplevel_pkg == 'ansible': - # something under the ansible package, delegate to our internal loader in case of redirections - return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path) - if part_count == 1: - return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path) - if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens - return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path) - elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll - return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path) - # anything below the collection - return _AnsibleCollectionLoader(fullname=fullname, path_list=path) - except ImportError: - # TODO: log attempt to load context - return None - - -# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually -# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except -# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet. -class _AnsiblePathHookFinder: - def __init__(self, collection_finder, pathctx): - # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context - self._pathctx = to_native(pathctx) - self._collection_finder = collection_finder - if PY3: - # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests) - self._file_finder = None - - # class init is fun- this method has a self arg that won't get used - def _get_filefinder_path_hook(self=None): - _file_finder_hook = None - if PY3: - # try to find the FileFinder hook to call for fallback path-based imports in Py3 - _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)] - if len(_file_finder_hook) != 1: - raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook))) - _file_finder_hook = _file_finder_hook[0] - - return _file_finder_hook - - _filefinder_path_hook = _get_filefinder_path_hook() - - def find_module(self, fullname, path=None): - # we ignore the passed in path here- use what we got from the path hook init - split_name = fullname.split('.') - toplevel_pkg = split_name[0] - - if toplevel_pkg == 'ansible_collections': - # collections content? delegate to the collection finder - return self._collection_finder.find_module(fullname, path=[self._pathctx]) - else: - # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader - # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test - # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and - # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure - # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the - # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's - # built-in FS caching and byte-compilation for most things. - if PY3: - # create or consult our cached file finder for this path - if not self._file_finder: - try: - self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx) - except ImportError: - # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but - # might not be in some other situation... - return None - - spec = self._file_finder.find_spec(fullname) - if not spec: - return None - return spec.loader - else: - # call py2's internal loader - # noinspection PyDeprecation - return pkgutil.ImpImporter(self._pathctx).find_module(fullname) # pylint: disable=deprecated-class - - def iter_modules(self, prefix): - # NB: this currently represents only what's on disk, and does not handle package redirection - return _iter_modules_impl([self._pathctx], prefix) - - def __repr__(self): - return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx) - - -class _AnsibleCollectionPkgLoaderBase: - _allows_package_code = False - - def __init__(self, fullname, path_list=None): - self._fullname = fullname - self._redirect_module = None - self._split_name = fullname.split('.') - self._rpart_name = fullname.rpartition('.') - self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel - self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens - - self._source_code_path = None - self._decoded_source = None - self._compiled_code = None - - self._validate_args() - - self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list]) - self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths) - - self._validate_final() - - # allow subclasses to validate args and sniff split values before we start digging around - def _validate_args(self): - if self._split_name[0] != 'ansible_collections': - raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname)) - - # allow subclasses to customize candidate path filtering - def _get_candidate_paths(self, path_list): - return [os.path.join(p, self._package_to_load) for p in path_list] - - # allow subclasses to customize finding paths - def _get_subpackage_search_paths(self, candidate_paths): - # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules) - return [p for p in candidate_paths if os.path.isdir(to_bytes(p))] - - # allow subclasses to customize state validation/manipulation before we return the loader instance - def _validate_final(self): - return - - @staticmethod - @contextmanager - def _new_or_existing_module(name, **kwargs): - # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior - created_module = False - module = sys.modules.get(name) - try: - if not module: - module = ModuleType(name) - created_module = True - sys.modules[name] = module - # always override the values passed, except name (allow reference aliasing) - for attr, value in kwargs.items(): - setattr(module, attr, value) - yield module - except Exception: - if created_module: - if sys.modules.get(name): - sys.modules.pop(name) - raise - - # basic module/package location support - # NB: this does not support distributed packages! - @staticmethod - def _module_file_from_path(leaf_name, path): - has_code = True - package_path = os.path.join(to_native(path), to_native(leaf_name)) - module_path = None - - # if the submodule is a package, assemble valid submodule paths, but stop looking for a module - if os.path.isdir(to_bytes(package_path)): - # is there a package init? - module_path = os.path.join(package_path, '__init__.py') - if not os.path.isfile(to_bytes(module_path)): - module_path = os.path.join(package_path, '__synthetic__') - has_code = False - else: - module_path = package_path + '.py' - package_path = None - if not os.path.isfile(to_bytes(module_path)): - raise ImportError('{0} not found at {1}'.format(leaf_name, path)) - - return module_path, has_code, package_path - - def load_module(self, fullname): - # short-circuit redirect; we've already imported the redirected module, so just alias it and return it - if self._redirect_module: - sys.modules[self._fullname] = self._redirect_module - return self._redirect_module - - # we're actually loading a module/package - module_attrs = dict( - __loader__=self, - __file__=self.get_filename(fullname), - __package__=self._parent_package_name # sane default for non-packages - ) - - # eg, I am a package - if self._subpackage_search_paths is not None: # empty is legal - module_attrs['__path__'] = self._subpackage_search_paths - module_attrs['__package__'] = fullname # per PEP366 - - with self._new_or_existing_module(fullname, **module_attrs) as module: - # execute the module's code in its namespace - code_obj = self.get_code(fullname) - if code_obj is not None: # things like NS packages that can't have code on disk will return None - exec(code_obj, module.__dict__) - - return module - - def is_package(self, fullname): - if fullname != self._fullname: - raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname)) - return self._subpackage_search_paths is not None - - def get_source(self, fullname): - if self._decoded_source: - return self._decoded_source - if fullname != self._fullname: - raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname)) - if not self._source_code_path: - return None - # FIXME: what do we want encoding/newline requirements to be? - self._decoded_source = self.get_data(self._source_code_path) - return self._decoded_source - - def get_data(self, path): - if not path: - raise ValueError('a path must be specified') - - # TODO: ensure we're being asked for a path below something we own - # TODO: try to handle redirects internally? - - if not path[0] == '/': - # relative to current package, search package paths if possible (this may not be necessary) - # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths] - raise ValueError('relative resource paths not supported') - else: - candidate_paths = [path] - - for p in candidate_paths: - b_path = to_bytes(p) - if os.path.isfile(b_path): - with open(b_path, 'rb') as fd: - return fd.read() - # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency - # with "collection subpackages don't require __init__.py" working everywhere with get_data - elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)): - return '' - - return None - - def _synthetic_filename(self, fullname): - return '' - - def get_filename(self, fullname): - if fullname != self._fullname: - raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname)) - - filename = self._source_code_path - - if not filename and self.is_package(fullname): - if len(self._subpackage_search_paths) == 1: - filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__') - else: - filename = self._synthetic_filename(fullname) - - return filename - - def get_code(self, fullname): - if self._compiled_code: - return self._compiled_code - - # this may or may not be an actual filename, but it's the value we'll use for __file__ - filename = self.get_filename(fullname) - if not filename: - filename = '' - - source_code = self.get_source(fullname) - - # for things like synthetic modules that really have no source on disk, don't return a code object at all - # vs things like an empty package init (which has an empty string source on disk) - if source_code is None: - return None - - self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True) - - return self._compiled_code - - def iter_modules(self, prefix): - return _iter_modules_impl(self._subpackage_search_paths, prefix) - - def __repr__(self): - return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path) - - -class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase): - def _validate_args(self): - super(_AnsibleCollectionRootPkgLoader, self)._validate_args() - if len(self._split_name) != 1: - raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname)) - - -# Implements Ansible's custom namespace package support. -# The ansible_collections package and one level down (collections namespaces) are Python namespace packages -# that search across all configured collection roots. The collection package (two levels down) is the first one found -# on the configured collection root path, and Python namespace package aggregation is not allowed at or below -# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored -# by this loader. -class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase): - def _validate_args(self): - super(_AnsibleCollectionNSPkgLoader, self)._validate_args() - if len(self._split_name) != 2: - raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname)) - - def _validate_final(self): - # special-case the `ansible` namespace, since `ansible.builtin` is magical - if not self._subpackage_search_paths and self._package_to_load != 'ansible': - raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths)) - - -# handles locating the actual collection package and associated metadata -class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase): - def _validate_args(self): - super(_AnsibleCollectionPkgLoader, self)._validate_args() - if len(self._split_name) != 3: - raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname)) - - def _validate_final(self): - if self._split_name[1:3] == ['ansible', 'builtin']: - # we don't want to allow this one to have on-disk search capability - self._subpackage_search_paths = [] - elif not self._subpackage_search_paths: - raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths)) - else: - # only search within the first collection we found - self._subpackage_search_paths = [self._subpackage_search_paths[0]] - - def load_module(self, fullname): - if not _meta_yml_to_dict: - raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set') - - module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname) - - module._collection_meta = {} - # TODO: load collection metadata, cache in __loader__ state - - collection_name = '.'.join(self._split_name[1:3]) - - if collection_name == 'ansible.builtin': - # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro - ansible_pkg_path = os.path.dirname(import_module('ansible').__file__) - metadata_path = os.path.join(ansible_pkg_path, 'config/ansible_builtin_runtime.yml') - with open(to_bytes(metadata_path), 'rb') as fd: - raw_routing = fd.read() - else: - b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml')) - if os.path.isfile(b_routing_meta_path): - with open(b_routing_meta_path, 'rb') as fd: - raw_routing = fd.read() - else: - raw_routing = '' - try: - if raw_routing: - routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml')) - module._collection_meta = self._canonicalize_meta(routing_dict) - except Exception as ex: - raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex))) - - AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__)) - - return module - - def _canonicalize_meta(self, meta_dict): - # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection) - # OR we could do it all on the fly? - # if not meta_dict: - # return {} - # - # ns_name = '.'.join(self._split_name[0:2]) - # collection_name = '.'.join(self._split_name[0:3]) - # - # # - # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})): - # for plugin_key, plugin_dict in iteritems(routing_type_dict): - # redirect = plugin_dict.get('redirect', '') - # if redirect.startswith('..'): - # redirect = redirect[2:] - - return meta_dict - - -# loads everything under a collection, including handling redirections defined by the collection -class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase): - # HACK: stash this in a better place - _redirected_package_map = {} - _allows_package_code = True - - def _validate_args(self): - super(_AnsibleCollectionLoader, self)._validate_args() - if len(self._split_name) < 4: - raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname)) - - def _get_candidate_paths(self, path_list): - if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']: - raise ValueError('this loader requires exactly one path to search') - - return path_list - - def _get_subpackage_search_paths(self, candidate_paths): - collection_name = '.'.join(self._split_name[1:3]) - collection_meta = _get_collection_metadata(collection_name) - - # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!) - redirect = None - explicit_redirect = False - - routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname]) - if routing_entry: - redirect = routing_entry.get('redirect') - - if redirect: - explicit_redirect = True - else: - redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname) - - # NB: package level redirection requires hooking all future imports beneath the redirected source package - # in order to ensure sanity on future relative imports. We always import everything under its "real" name, - # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported - # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module - # (one for each name), and relative imports that ascend above the redirected package would break (since they'd - # see the redirected ancestor package contents instead of the package where they actually live). - if redirect: - # FIXME: wrap this so we can be explicit about a failed redirection - self._redirect_module = import_module(redirect) - if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__: - # if the import target looks like a package, store its name so we can rewrite future descendent loads - self._redirected_package_map[self._fullname] = redirect - - # if we redirected, don't do any further custom package logic - return None - - # we're not doing a redirect- try to find what we need to actually load a module/package - - # this will raise ImportError if we can't find the requested module/package at all - if not candidate_paths: - # noplace to look, just ImportError - raise ImportError('package has no paths') - - found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0]) - - # still here? we found something to load... - if has_code: - self._source_code_path = found_path - - if package_path: - return [package_path] # always needs to be a list - - return None - - -# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later -# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur) -class _AnsibleInternalRedirectLoader: - def __init__(self, fullname, path_list): - self._redirect = None - - split_name = fullname.split('.') - toplevel_pkg = split_name[0] - module_to_load = split_name[-1] - - if toplevel_pkg != 'ansible': - raise ImportError('not interested') - - builtin_meta = _get_collection_metadata('ansible.builtin') - - routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname]) - if routing_entry: - self._redirect = routing_entry.get('redirect') - - if not self._redirect: - raise ImportError('not redirected, go ask path_hook') - - def load_module(self, fullname): - # since we're delegating to other loaders, this should only be called for internal redirects where we answered - # find_module with this loader, in which case we'll just directly import the redirection target, insert it into - # sys.modules under the name it was requested by, and return the original module. - - # should never see this - if not self._redirect: - raise ValueError('no redirect found for {0}'.format(fullname)) - - # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect - mod = import_module(self._redirect) - sys.modules[fullname] = mod - return mod - - -class AnsibleCollectionRef: - # FUTURE: introspect plugin loaders to get these dynamically? - VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection', - 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup', - 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy', - 'terminal', 'test', 'vars', 'playbook']) - - # FIXME: tighten this up to match Python identifier reqs, etc - VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$')) - VALID_FQCR_RE = re.compile(to_text(r'^\w+(\.\w+){2,}$')) # can have 0-N included subdirs as well - - def __init__(self, collection_name, subdirs, resource, ref_type): - """ - Create an AnsibleCollectionRef from components - :param collection_name: a collection name of the form 'namespace.collectionname' - :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2') - :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role') - :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' - """ - collection_name = to_text(collection_name, errors='strict') - if subdirs is not None: - subdirs = to_text(subdirs, errors='strict') - resource = to_text(resource, errors='strict') - ref_type = to_text(ref_type, errors='strict') - - if not self.is_valid_collection_name(collection_name): - raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name))) - - if ref_type not in self.VALID_REF_TYPES: - raise ValueError('invalid collection ref_type: {0}'.format(ref_type)) - - self.collection = collection_name - if subdirs: - if not re.match(self.VALID_SUBDIRS_RE, subdirs): - raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs))) - self.subdirs = subdirs - else: - self.subdirs = u'' - - self.resource = resource - self.ref_type = ref_type - - package_components = [u'ansible_collections', self.collection] - fqcr_components = [self.collection] - - self.n_python_collection_package_name = to_native('.'.join(package_components)) - - if self.ref_type == u'role': - package_components.append(u'roles') - elif self.ref_type == u'playbook': - package_components.append(u'playbooks') - else: - # we assume it's a plugin - package_components += [u'plugins', self.ref_type] - - if self.subdirs: - package_components.append(self.subdirs) - fqcr_components.append(self.subdirs) - - if self.ref_type in (u'role', u'playbook'): - # playbooks and roles are their own resource - package_components.append(self.resource) - - fqcr_components.append(self.resource) - - self.n_python_package_name = to_native('.'.join(package_components)) - self._fqcr = u'.'.join(fqcr_components) - - def __repr__(self): - return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource) - - @property - def fqcr(self): - return self._fqcr - - @staticmethod - def from_fqcr(ref, ref_type): - """ - Parse a string as a fully-qualified collection reference, raises ValueError if invalid - :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') - :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' - :return: a populated AnsibleCollectionRef object - """ - # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name), - # we split the resource name off the right, split ns and coll off the left, and we're left with any optional - # subdirs that need to be added back below the plugin-specific subdir we'll add. So: - # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource - # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource - # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename - if not AnsibleCollectionRef.is_valid_fqcr(ref): - raise ValueError('{0} is not a valid collection reference'.format(to_native(ref))) - - ref = to_text(ref, errors='strict') - ref_type = to_text(ref_type, errors='strict') - ext = '' - - if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS): - resource_splitname = ref.rsplit(u'.', 2) - package_remnant = resource_splitname[0] - resource = resource_splitname[1] - ext = '.' + resource_splitname[2] - else: - resource_splitname = ref.rsplit(u'.', 1) - package_remnant = resource_splitname[0] - resource = resource_splitname[1] - - # split the left two components of the collection package name off, anything remaining is plugin-type - # specific subdirs to be added back on below the plugin type - package_splitname = package_remnant.split(u'.', 2) - if len(package_splitname) == 3: - subdirs = package_splitname[2] - else: - subdirs = u'' - - collection_name = u'.'.join(package_splitname[0:2]) - - return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type) - - @staticmethod - def try_parse_fqcr(ref, ref_type): - """ - Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error) - :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') - :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' - :return: a populated AnsibleCollectionRef object on successful parsing, else None - """ - try: - return AnsibleCollectionRef.from_fqcr(ref, ref_type) - except ValueError: - pass - - @staticmethod - def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name): - """ - Utility method to convert from a PluginLoader dir name to a plugin ref_type - :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library') - :return: the corresponding plugin ref_type (eg, 'action', 'role') - """ - legacy_plugin_dir_name = to_text(legacy_plugin_dir_name) - - plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'') - - if plugin_type == u'library': - plugin_type = u'modules' - - if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES: - raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name))) - - return plugin_type - - @staticmethod - def is_valid_fqcr(ref, ref_type=None): - """ - Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself) - :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') - :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment' - :return: True if the collection ref passed is well-formed, False otherwise - """ - - ref = to_text(ref) - - if not ref_type: - return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref)) - - return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)) - - @staticmethod - def is_valid_collection_name(collection_name): - """ - Validates if the given string is a well-formed collection name (does not look up the collection itself) - :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname') - :return: True if the collection name passed is well-formed, False otherwise - """ - - collection_name = to_text(collection_name) - - if collection_name.count(u'.') != 1: - return False - - return all( - # NOTE: keywords and identifiers are different in differnt Pythons - not iskeyword(ns_or_name) and is_python_identifier(ns_or_name) - for ns_or_name in collection_name.split(u'.') - ) - - -def _get_collection_playbook_path(playbook): - - acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook') - if acr: - try: - # get_collection_path - pkg = import_module(acr.n_python_collection_package_name) - except (IOError, ModuleNotFoundError) as e: - # leaving e as debug target, even though not used in normal code - pkg = None - - if pkg: - cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks')) - - if acr.subdirs: - paths = [to_native(x) for x in acr.subdirs.split(u'.')] - paths.insert(0, cpath) - cpath = os.path.join(*paths) - - path = os.path.join(cpath, to_native(acr.resource)) - if os.path.exists(to_bytes(path)): - return acr.resource, path, acr.collection - elif not acr.resource.endswith(PB_EXTENSIONS): - for ext in PB_EXTENSIONS: - path = os.path.join(cpath, to_native(acr.resource + ext)) - if os.path.exists(to_bytes(path)): - return acr.resource, path, acr.collection - return None - - -def _get_collection_role_path(role_name, collection_list=None): - return _get_collection_resource_path(role_name, u'role', collection_list) - - -def _get_collection_resource_path(name, ref_type, collection_list=None): - - if ref_type == u'playbook': - # they are handled a bit diff due to 'extension variance' and no collection_list - return _get_collection_playbook_path(name) - - acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type) - if acr: - # looks like a valid qualified collection ref; skip the collection_list - collection_list = [acr.collection] - subdirs = acr.subdirs - resource = acr.resource - elif not collection_list: - return None # not a FQ and no collection search list spec'd, nothing to do - else: - resource = name # treat as unqualified, loop through the collection search list to try and resolve - subdirs = '' - - for collection_name in collection_list: - try: - acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type) - # FIXME: error handling/logging; need to catch any import failures and move along - pkg = import_module(acr.n_python_package_name) - - if pkg is not None: - # the package is now loaded, get the collection's package and ask where it lives - path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict')) - return resource, to_text(path, errors='surrogate_or_strict'), collection_name - - except (IOError, ModuleNotFoundError) as e: - continue - except Exception as ex: - # FIXME: pick out typical import errors first, then error logging - continue - - return None - - -def _get_collection_name_from_path(path): - """ - Return the containing collection name for a given path, or None if the path is not below a configured collection, or - the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured - collection roots). - :param path: path to evaluate for collection containment - :return: collection name or None - """ - - # ensure we compare full paths since pkg path will be abspath - path = to_native(os.path.abspath(to_bytes(path))) - - path_parts = path.split('/') - if path_parts.count('ansible_collections') != 1: - return None - - ac_pos = path_parts.index('ansible_collections') - - # make sure it's followed by at least a namespace and collection name - if len(path_parts) < ac_pos + 3: - return None - - candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3]) - - try: - # we've got a name for it, now see if the path prefix matches what the loader sees - imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__))) - except ImportError: - return None - - # reassemble the original path prefix up the collection name, and it should match what we just imported. If not - # this is probably a collection root that's not configured. - - original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3]) - - imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path))) - if original_path_prefix != imported_pkg_path: - return None - - return candidate_collection_name - - -def _get_import_redirect(collection_meta_dict, fullname): - if not collection_meta_dict: - return None - - return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect']) - - -def _get_ancestor_redirect(redirected_package_map, fullname): - # walk the requested module's ancestor packages to see if any have been previously redirected - cur_pkg = fullname - while cur_pkg: - cur_pkg = cur_pkg.rpartition('.')[0] - ancestor_redirect = redirected_package_map.get(cur_pkg) - if ancestor_redirect: - # rewrite the prefix on fullname so we import the target first, then alias it - redirect = ancestor_redirect + fullname[len(cur_pkg):] - return redirect - return None - - -def _nested_dict_get(root_dict, key_list): - cur_value = root_dict - for key in key_list: - cur_value = cur_value.get(key) - if not cur_value: - return None - - return cur_value - - -def _iter_modules_impl(paths, prefix=''): - # NB: this currently only iterates what's on disk- redirected modules are not considered - if not prefix: - prefix = '' - else: - prefix = to_native(prefix) - # yield (module_loader, name, ispkg) for each module/pkg under path - # TODO: implement ignore/silent catch for unreadable? - for b_path in map(to_bytes, paths): - if not os.path.isdir(b_path): - continue - for b_basename in sorted(os.listdir(b_path)): - b_candidate_module_path = os.path.join(b_path, b_basename) - if os.path.isdir(b_candidate_module_path): - # exclude things that obviously aren't Python package dirs - # FIXME: this dir is adjustable in py3.8+, check for it - if b'.' in b_basename or b_basename == b'__pycache__': - continue - - # TODO: proper string handling? - yield prefix + to_native(b_basename), True - else: - # FIXME: match builtin ordering for package/dir/file, support compiled? - if b_basename.endswith(b'.py') and b_basename != b'__init__.py': - yield prefix + to_native(os.path.splitext(b_basename)[0]), False - - -def _get_collection_metadata(collection_name): - collection_name = to_native(collection_name) - if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2: - raise ValueError('collection_name must be a non-empty string of the form namespace.collection') - - try: - collection_pkg = import_module('ansible_collections.' + collection_name) - except ImportError: - raise ValueError('unable to locate collection {0}'.format(collection_name)) - - _collection_meta = getattr(collection_pkg, '_collection_meta', None) - - if _collection_meta is None: - raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name)) - - return _collection_meta diff --git a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_meta.py b/test/lib/ansible_test/_data/legacy_collection_loader/_collection_meta.py deleted file mode 100644 index dd36c3651e..0000000000 --- a/test/lib/ansible_test/_data/legacy_collection_loader/_collection_meta.py +++ /dev/null @@ -1,37 +0,0 @@ -# (c) 2019 Ansible Project -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) - -# CAUTION: There are two implementations of the collection loader. -# They must be kept functionally identical, although their implementations may differ. -# -# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. -# It must function on all Python versions supported on the controller. -# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_data/legacy_collection_loader/" directory. -# It must function on all Python versions supported on managed hosts which are not supported by the controller. - -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -try: - from collections.abc import Mapping # pylint: disable=ansible-bad-import-from -except ImportError: - from collections import Mapping # pylint: disable=ansible-bad-import-from,deprecated-class - -from ansible.module_utils.common.yaml import yaml_load - - -def _meta_yml_to_dict(yaml_string_data, content_id): - """ - Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation - by some tools (eg the import sanity test). - :param yaml_string_data: a bytes-ish YAML dictionary - :param content_id: a unique ID representing the content to allow other implementations to cache the output - :return: a Python dictionary representing the YAML dictionary content - """ - # NB: content_id is passed in, but not used by this implementation - routing_dict = yaml_load(yaml_string_data) - if not routing_dict: - routing_dict = {} - if not isinstance(routing_dict, Mapping): - raise ValueError('collection metadata must be an instance of Python Mapping') - return routing_dict diff --git a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py deleted file mode 100644 index c26971fc5f..0000000000 --- a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_collections.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Enable unit testing of Ansible collections. PYTEST_DONT_REWRITE""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - -# set by ansible-test to a single directory, rather than a list of directories as supported by Ansible itself -ANSIBLE_COLLECTIONS_PATH = os.path.join(os.environ['ANSIBLE_COLLECTIONS_PATH'], 'ansible_collections') - -# set by ansible-test to the minimum python version supported on the controller -ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION = tuple(int(x) for x in os.environ['ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION'].split('.')) - - -# this monkeypatch to _pytest.pathlib.resolve_package_path fixes PEP420 resolution for collections in pytest >= 6.0.0 -# NB: this code should never run under py2 -def collection_resolve_package_path(path): - """Configure the Python package path so that pytest can find our collections.""" - for parent in path.parents: - if str(parent) == ANSIBLE_COLLECTIONS_PATH: - return parent - - raise Exception('File "%s" not found in collection path "%s".' % (path, ANSIBLE_COLLECTIONS_PATH)) - - -# this monkeypatch to py.path.local.LocalPath.pypkgpath fixes PEP420 resolution for collections in pytest < 6.0.0 -def collection_pypkgpath(self): - """Configure the Python package path so that pytest can find our collections.""" - for parent in self.parts(reverse=True): - if str(parent) == ANSIBLE_COLLECTIONS_PATH: - return parent - - raise Exception('File "%s" not found in collection path "%s".' % (self.strpath, ANSIBLE_COLLECTIONS_PATH)) - - -def pytest_configure(): - """Configure this pytest plugin.""" - try: - if pytest_configure.executed: - return - except AttributeError: - pytest_configure.executed = True - - if sys.version_info >= ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION: - # noinspection PyProtectedMember - from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder - else: - # noinspection PyProtectedMember - from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder - - # allow unit tests to import code from collections - - # noinspection PyProtectedMember - _AnsibleCollectionFinder(paths=[os.path.dirname(ANSIBLE_COLLECTIONS_PATH)])._install() # pylint: disable=protected-access - - try: - # noinspection PyProtectedMember - from _pytest import pathlib as _pytest_pathlib - except ImportError: - _pytest_pathlib = None - - if hasattr(_pytest_pathlib, 'resolve_package_path'): - _pytest_pathlib.resolve_package_path = collection_resolve_package_path - else: - # looks like pytest <= 6.0.0, use the old hack against py.path - # noinspection PyProtectedMember - import py._path.local - - # force collections unit tests to be loaded with the ansible_collections namespace - # original idea from https://stackoverflow.com/questions/50174130/how-do-i-pytest-a-project-using-pep-420-namespace-packages/50175552#50175552 - # noinspection PyProtectedMember - py._path.local.LocalPath.pypkgpath = collection_pypkgpath # pylint: disable=protected-access - - -pytest_configure() diff --git a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py b/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py deleted file mode 100644 index b05298ab0b..0000000000 --- a/test/lib/ansible_test/_data/pytest/plugins/ansible_pytest_coverage.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Monkey patch os._exit when running under coverage so we don't lose coverage data in forks, such as with `pytest --boxed`. PYTEST_DONT_REWRITE""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -def pytest_configure(): - """Configure this pytest plugin.""" - try: - if pytest_configure.executed: - return - except AttributeError: - pytest_configure.executed = True - - try: - import coverage - except ImportError: - coverage = None - - try: - coverage.Coverage - except AttributeError: - coverage = None - - if not coverage: - return - - import gc - import os - - coverage_instances = [] - - for obj in gc.get_objects(): - if isinstance(obj, coverage.Coverage): - coverage_instances.append(obj) - - if not coverage_instances: - coverage_config = os.environ.get('COVERAGE_CONF') - - if not coverage_config: - return - - coverage_output = os.environ.get('COVERAGE_FILE') - - if not coverage_output: - return - - cov = coverage.Coverage(config_file=coverage_config) - coverage_instances.append(cov) - else: - cov = None - - # noinspection PyProtectedMember - os_exit = os._exit # pylint: disable=protected-access - - def coverage_exit(*args, **kwargs): - for instance in coverage_instances: - instance.stop() - instance.save() - - os_exit(*args, **kwargs) - - os._exit = coverage_exit # pylint: disable=protected-access - - if cov: - cov.start() - - -pytest_configure() diff --git a/test/lib/ansible_test/_data/quiet_pip.py b/test/lib/ansible_test/_data/quiet_pip.py deleted file mode 100644 index e1bb824646..0000000000 --- a/test/lib/ansible_test/_data/quiet_pip.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Custom entry-point for pip that filters out unwanted logging and warnings.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import logging -import re -import runpy -import warnings - -BUILTIN_FILTERER_FILTER = logging.Filterer.filter - -LOGGING_MESSAGE_FILTER = re.compile("^(" - ".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1] - "DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3 - "Ignoring .*: markers .* don't match your environment|" - "Looking in indexes: .*|" # pypi-test-container - "Requirement already satisfied.*" - ")$") - -# [1] https://src.fedoraproject.org/rpms/python-pip/blob/master/f/emit-a-warning-when-running-with-root-privileges.patch - -WARNING_MESSAGE_FILTERS = ( - # DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python. - # A future version of pip will drop support for Python 2.6 - 'Python 2.6 is no longer supported by the Python core team, ', - - # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:137: InsecurePlatformWarning: - # A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. - # You can upgrade to a newer version of Python to solve this. - # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings - 'A true SSLContext object is not available. ', - - # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:339: SNIMissingWarning: - # An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. - # This may cause the server to present an incorrect TLS certificate, which can cause validation failures. - # You can upgrade to a newer version of Python to solve this. - # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings - 'An HTTPS request has been made, but the SNI ', - - # DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. - # pip 21.0 will drop support for Python 2.7 in January 2021. - # More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support - 'DEPRECATION: Python 2.7 reached the end of its life ', - - # DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained. - # pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality. - 'DEPRECATION: Python 3.5 reached the end of its life ', -) - - -def custom_filterer_filter(self, record): - """Globally omit logging of unwanted messages.""" - if LOGGING_MESSAGE_FILTER.search(record.getMessage()): - return 0 - - return BUILTIN_FILTERER_FILTER(self, record) - - -def main(): - """Main program entry point.""" - # Filtering logging output globally avoids having to intercept stdout/stderr. - # It also avoids problems with loss of color output and mixing up the order of stdout/stderr messages. - logging.Filterer.filter = custom_filterer_filter - - for message_filter in WARNING_MESSAGE_FILTERS: - # Setting filterwarnings in code is necessary because of the following: - # Python 2.6 does not support the PYTHONWARNINGS environment variable. It does support the -W option. - # Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages. - warnings.filterwarnings('ignore', message_filter) - - runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json deleted file mode 100644 index 12bbe0d11e..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "all_targets": true, - "prefixes": [ - "lib/ansible/modules/", - "lib/ansible/plugins/action/", - "plugins/modules/", - "plugins/action/" - ], - "extensions": [ - ".py" - ], - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py deleted file mode 100755 index 65142e0033..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/action-plugin-docs.py +++ /dev/null @@ -1,68 +0,0 @@ -#!/usr/bin/env python -"""Test to verify action plugins have an associated module to provide documentation.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - - -def main(): - """Main entry point.""" - paths = sys.argv[1:] or sys.stdin.read().splitlines() - - module_names = set() - - module_prefixes = { - 'lib/ansible/modules/': True, - 'plugins/modules/': False, - } - - action_prefixes = { - 'lib/ansible/plugins/action/': True, - 'plugins/action/': False, - } - - for path in paths: - full_name = get_full_name(path, module_prefixes) - - if full_name: - module_names.add(full_name) - - for path in paths: - full_name = get_full_name(path, action_prefixes) - - if full_name and full_name not in module_names: - print('%s: action plugin has no matching module to provide documentation' % path) - - -def get_full_name(path, prefixes): - """Return the full name of the plugin at the given path by matching against the given path prefixes, or None if no match is found.""" - for prefix, flat in prefixes.items(): - if path.startswith(prefix): - relative_path = os.path.relpath(path, prefix) - - if flat: - full_name = os.path.basename(relative_path) - else: - full_name = relative_path - - full_name = os.path.splitext(full_name)[0] - - name = os.path.basename(full_name) - - if name == '__init__': - return None - - if name.startswith('_'): - name = name[1:] - - full_name = os.path.join(os.path.dirname(full_name), name).replace(os.path.sep, '.') - - return full_name - - return None - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.json b/test/lib/ansible_test/_data/sanity/code-smell/changelog.json deleted file mode 100644 index 7d19f101f2..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/changelog.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "intercept": true, - "prefixes": [ - "changelogs/config.yaml", - "changelogs/fragments/" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog.py b/test/lib/ansible_test/_data/sanity/code-smell/changelog.py deleted file mode 100755 index 2ccfb24f23..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/changelog.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys -import subprocess - - -def main(): - paths = sys.argv[1:] or sys.stdin.read().splitlines() - - allowed_extensions = ('.yml', '.yaml') - config_path = 'changelogs/config.yaml' - - # config must be detected independent of the file list since the file list only contains files under test (changed) - has_config = os.path.exists(config_path) - paths_to_check = [] - for path in paths: - if path == config_path: - continue - - if path.startswith('changelogs/fragments/.'): - if path in ('changelogs/fragments/.keep', 'changelogs/fragments/.gitkeep'): - continue - - print('%s:%d:%d: file must not be a dotfile' % (path, 0, 0)) - continue - - ext = os.path.splitext(path)[1] - - if ext not in allowed_extensions: - print('%s:%d:%d: extension must be one of: %s' % (path, 0, 0, ', '.join(allowed_extensions))) - - paths_to_check.append(path) - - if not has_config: - print('changelogs/config.yaml:0:0: config file does not exist') - return - - if not paths_to_check: - return - - cmd = [sys.executable, '-m', 'antsibull_changelog', 'lint'] + paths_to_check - - # The sphinx module is a soft dependency for rstcheck, which is used by the changelog linter. - # If sphinx is found it will be loaded by rstcheck, which can affect the results of the test. - # To maintain consistency across environments, loading of sphinx is blocked, since any version (or no version) of sphinx may be present. - env = os.environ.copy() - env.update(PYTHONPATH='%s:%s' % (os.path.join(os.path.dirname(__file__), 'changelog'), env['PYTHONPATH'])) - - subprocess.call(cmd, env=env) # ignore the return code, rely on the output instead - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py b/test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py deleted file mode 100644 index 000c29e4e9..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/changelog/sphinx.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Block the sphinx module from being loaded.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -raise ImportError('The sphinx module has been prevented from loading to maintain consistent test results.') diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json deleted file mode 100644 index 9835f9b6c8..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "prefixes": [ - "lib/ansible/modules/", - "lib/ansible/module_utils/", - "plugins/modules/", - "plugins/module_utils/", - "test/units/", - "tests/unit/" - ], - "files": [ - "__init__.py" - ], - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py deleted file mode 100755 index 8bcd7f9ed9..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/empty-init.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - if os.path.getsize(path) > 0: - print('%s: empty __init__.py required' % path) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json deleted file mode 100644 index 4ebce32c8c..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "py2_compat": true, - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py deleted file mode 100755 index 81081eed7b..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/future-import-boilerplate.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import ast -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as path_fd: - lines = path_fd.read().splitlines() - - missing = True - if not lines: - # Files are allowed to be empty of everything including boilerplate - missing = False - - for text in lines: - if text in (b'from __future__ import (absolute_import, division, print_function)', - b'from __future__ import absolute_import, division, print_function'): - missing = False - break - - if missing: - with open(path) as file: - contents = file.read() - - # noinspection PyBroadException - try: - node = ast.parse(contents) - - # files consisting of only assignments have no need for future import boilerplate - # the only exception would be division during assignment, but we'll overlook that for simplicity - # the most likely case is that of a documentation only python file - if all(isinstance(statement, ast.Assign) for statement in node.body): - missing = False - except Exception: # pylint: disable=broad-except - pass # the compile sanity test will report this error - - if missing: - print('%s: missing: from __future__ import (absolute_import, division, print_function)' % path) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json deleted file mode 100644 index db5c3c9809..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "text": true, - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py deleted file mode 100755 index 1e4212d1b8..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/line-endings.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as path_fd: - contents = path_fd.read() - - if b'\r' in contents: - print('%s: use "\\n" for line endings instead of "\\r\\n"' % path) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json deleted file mode 100644 index 4ebce32c8c..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "py2_compat": true, - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py deleted file mode 100755 index 28d06f363b..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/metaclass-boilerplate.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import ast -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as path_fd: - lines = path_fd.read().splitlines() - - missing = True - if not lines: - # Files are allowed to be empty of everything including boilerplate - missing = False - - for text in lines: - if text == b'__metaclass__ = type': - missing = False - break - - if missing: - with open(path) as file: - contents = file.read() - - # noinspection PyBroadException - try: - node = ast.parse(contents) - - # files consisting of only assignments have no need for metaclass boilerplate - # the most likely case is that of a documentation only python file - if all(isinstance(statement, ast.Assign) for statement in node.body): - missing = False - except Exception: # pylint: disable=broad-except - pass # the compile sanity test will report this error - - if missing: - print('%s: missing: __metaclass__ = type' % path) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json deleted file mode 100644 index ccee80a2f1..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "prefixes": [ - "lib/ansible/", - "plugins/" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py deleted file mode 100755 index 78561d966e..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-assert.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - -ASSERT_RE = re.compile(r'^\s*assert[^a-z0-9_:]') - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as f: - for i, line in enumerate(f.readlines()): - matches = ASSERT_RE.findall(line) - - if matches: - lineno = i + 1 - colno = line.index('assert') + 1 - print('%s:%d:%d: raise AssertionError instead of: %s' % (path, lineno, colno, matches[0][colno - 1:])) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json deleted file mode 100644 index 88858aeb61..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "ignore_self": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py deleted file mode 100755 index a35650efad..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-basestring.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'(isinstance.*basestring)', text) - - if match: - print('%s:%d:%d: do not use `isinstance(s, basestring)`' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json deleted file mode 100644 index 88858aeb61..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "ignore_self": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py deleted file mode 100755 index e28b24f4a9..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-dict-iteritems.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'(?', - b':', - b'"', - b'/', - b'\\', - b'|', - b'?', - b'*' -] + [struct.pack("b", i) for i in range(32)] - -ILLEGAL_NAMES = [ - "CON", - "PRN", - "AUX", - "NUL", - "COM1", - "COM2", - "COM3", - "COM4", - "COM5", - "COM6", - "COM7", - "COM8", - "COM9", - "LPT1", - "LPT2", - "LPT3", - "LPT4", - "LPT5", - "LPT6", - "LPT7", - "LPT8", - "LPT9", -] - -ILLEGAL_END_CHARS = [ - '.', - ' ', -] - - -def check_path(path, is_dir=False): - type_name = 'directory' if is_dir else 'file' - file_name = os.path.basename(path.rstrip(os.path.sep)) - name = os.path.splitext(file_name)[0] - - if name.upper() in ILLEGAL_NAMES: - print("%s: illegal %s name %s" % (path, type_name, name.upper())) - - if file_name[-1] in ILLEGAL_END_CHARS: - print("%s: illegal %s name end-char '%s'" % (path, type_name, file_name[-1])) - - bfile = to_bytes(file_name, encoding='utf-8') - for char in ILLEGAL_CHARS: - if char in bfile: - bpath = to_bytes(path, encoding='utf-8') - print("%s: illegal char '%s' in %s name" % (bpath, char, type_name)) - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - check_path(path, is_dir=path.endswith(os.path.sep)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json deleted file mode 100644 index ccee80a2f1..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "prefixes": [ - "lib/ansible/", - "plugins/" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py deleted file mode 100755 index 74a36ecc58..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-main-display.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import sys - -MAIN_DISPLAY_IMPORT = 'from __main__ import display' - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as f: - for i, line in enumerate(f.readlines()): - if MAIN_DISPLAY_IMPORT in line: - lineno = i + 1 - colno = line.index(MAIN_DISPLAY_IMPORT) + 1 - print('%s:%d:%d: Display is a singleton, just import and instantiate' % (path, lineno, colno)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json deleted file mode 100644 index 54d9fff587..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "text": true, - "ignore_self": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py deleted file mode 100755 index e44005a55f..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-smart-quotes.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as path_fd: - for line, text in enumerate(path_fd.readlines()): - try: - text = text.decode('utf-8') - except UnicodeDecodeError as ex: - print('%s:%d:%d: UnicodeDecodeError: %s' % (path, line + 1, ex.start + 1, ex)) - continue - - match = re.search(u'([‘’“”])', text) - - if match: - print('%s:%d:%d: use ASCII quotes `\'` and `"` instead of Unicode quotes' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json deleted file mode 100644 index 88858aeb61..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "ignore_self": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py deleted file mode 100755 index e2201ab106..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/no-unicode-literals.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'(unicode_literals)', text) - - if match: - print('%s:%d:%d: do not use `unicode_literals`' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json deleted file mode 100644 index 88858aeb61..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "ignore_self": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py deleted file mode 100755 index b2de1ba85d..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/replace-urlopen.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'^(?:[^#]*?)(urlopen)', text) - - if match: - print('%s:%d:%d: use `ansible.module_utils.urls.open_url` instead of `urlopen`' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json deleted file mode 100644 index 44003ec0c9..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "prefixes": [ - "lib/ansible/config/ansible_builtin_runtime.yml", - "meta/routing.yml", - "meta/runtime.yml" - ], - "extensions": [ - ".yml" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py deleted file mode 100755 index 8bc5098f20..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/runtime-metadata.py +++ /dev/null @@ -1,281 +0,0 @@ -#!/usr/bin/env python -"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import datetime -import os -import re -import sys -import warnings - -from functools import partial - -import yaml - -from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA -from voluptuous import Required, Schema, Invalid -from voluptuous.humanize import humanize_error - -from ansible.module_utils.compat.version import StrictVersion, LooseVersion -from ansible.module_utils.six import string_types -from ansible.utils.version import SemanticVersion - - -def isodate(value, check_deprecation_date=False, is_tombstone=False): - """Validate a datetime.date or ISO 8601 date string.""" - # datetime.date objects come from YAML dates, these are ok - if isinstance(value, datetime.date): - removal_date = value - else: - # make sure we have a string - msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date' - if not isinstance(value, string_types): - raise Invalid(msg) - # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, - # we have to do things manually. - if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value): - raise Invalid(msg) - try: - removal_date = datetime.datetime.strptime(value, '%Y-%m-%d').date() - except ValueError: - raise Invalid(msg) - # Make sure date is correct - today = datetime.date.today() - if is_tombstone: - # For a tombstone, the removal date must be in the past - if today < removal_date: - raise Invalid( - 'The tombstone removal_date (%s) must not be after today (%s)' % (removal_date, today)) - else: - # For a deprecation, the removal date must be in the future. Only test this if - # check_deprecation_date is truish, to avoid checks to suddenly start to fail. - if check_deprecation_date and today > removal_date: - raise Invalid( - 'The deprecation removal_date (%s) must be after today (%s)' % (removal_date, today)) - return value - - -def removal_version(value, is_ansible, current_version=None, is_tombstone=False): - """Validate a removal version string.""" - msg = ( - 'Removal version must be a string' if is_ansible else - 'Removal version must be a semantic version (https://semver.org/)' - ) - if not isinstance(value, string_types): - raise Invalid(msg) - try: - if is_ansible: - version = StrictVersion() - version.parse(value) - version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion - else: - version = SemanticVersion() - version.parse(value) - if version.major != 0 and (version.minor != 0 or version.patch != 0): - raise Invalid('removal_version (%r) must be a major release, not a minor or patch release ' - '(see specification at https://semver.org/)' % (value, )) - if current_version is not None: - if is_tombstone: - # For a tombstone, the removal version must not be in the future - if version > current_version: - raise Invalid('The tombstone removal_version (%r) must not be after the ' - 'current version (%s)' % (value, current_version)) - else: - # For a deprecation, the removal version must be in the future - if version <= current_version: - raise Invalid('The deprecation removal_version (%r) must be after the ' - 'current version (%s)' % (value, current_version)) - except ValueError: - raise Invalid(msg) - return value - - -def any_value(value): - """Accepts anything.""" - return value - - -def get_ansible_version(): - """Return current ansible-core version""" - from ansible.release import __version__ - - return LooseVersion('.'.join(__version__.split('.')[:3])) - - -def get_collection_version(): - """Return current collection version, or None if it is not available""" - import importlib.util - - collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), - 'collection_detail.py') - collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path) - collection_detail = importlib.util.module_from_spec(collection_detail_spec) - sys.modules['collection_detail'] = collection_detail - collection_detail_spec.loader.exec_module(collection_detail) - - # noinspection PyBroadException - try: - result = collection_detail.read_manifest_json('.') or collection_detail.read_galaxy_yml('.') - return SemanticVersion(result['version']) - except Exception: # pylint: disable=broad-except - # We do not care why it fails, in case we cannot get the version - # just return None to indicate "we don't know". - return None - - -def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): - """Validate explicit runtime metadata file""" - try: - with open(path, 'r') as f_path: - routing = yaml.safe_load(f_path) - except yaml.error.MarkedYAMLError as ex: - print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line + - 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) - return - except Exception as ex: # pylint: disable=broad-except - print('%s:%d:%d: YAML load failed: %s' % - (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) - return - - if is_ansible: - current_version = get_ansible_version() - else: - current_version = get_collection_version() - - # Updates to schema MUST also be reflected in the documentation - # ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html - - # plugin_routing schema - - avoid_additional_data = Schema( - Any( - { - Required('removal_version'): any_value, - 'warning_text': any_value, - }, - { - Required('removal_date'): any_value, - 'warning_text': any_value, - } - ), - extra=PREVENT_EXTRA - ) - - deprecation_schema = All( - # The first schema validates the input, and the second makes sure no extra keys are specified - Schema( - { - 'removal_version': partial(removal_version, is_ansible=is_ansible, - current_version=current_version), - 'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates), - 'warning_text': Any(*string_types), - } - ), - avoid_additional_data - ) - - tombstoning_schema = All( - # The first schema validates the input, and the second makes sure no extra keys are specified - Schema( - { - 'removal_version': partial(removal_version, is_ansible=is_ansible, - current_version=current_version, is_tombstone=True), - 'removal_date': partial(isodate, is_tombstone=True), - 'warning_text': Any(*string_types), - } - ), - avoid_additional_data - ) - - plugin_routing_schema = Any( - Schema({ - ('deprecation'): Any(deprecation_schema), - ('tombstone'): Any(tombstoning_schema), - ('redirect'): Any(*string_types), - }, extra=PREVENT_EXTRA), - ) - - list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema} - for str_type in string_types] - - plugin_schema = Schema({ - ('action'): Any(None, *list_dict_plugin_routing_schema), - ('become'): Any(None, *list_dict_plugin_routing_schema), - ('cache'): Any(None, *list_dict_plugin_routing_schema), - ('callback'): Any(None, *list_dict_plugin_routing_schema), - ('cliconf'): Any(None, *list_dict_plugin_routing_schema), - ('connection'): Any(None, *list_dict_plugin_routing_schema), - ('doc_fragments'): Any(None, *list_dict_plugin_routing_schema), - ('filter'): Any(None, *list_dict_plugin_routing_schema), - ('httpapi'): Any(None, *list_dict_plugin_routing_schema), - ('inventory'): Any(None, *list_dict_plugin_routing_schema), - ('lookup'): Any(None, *list_dict_plugin_routing_schema), - ('module_utils'): Any(None, *list_dict_plugin_routing_schema), - ('modules'): Any(None, *list_dict_plugin_routing_schema), - ('netconf'): Any(None, *list_dict_plugin_routing_schema), - ('shell'): Any(None, *list_dict_plugin_routing_schema), - ('strategy'): Any(None, *list_dict_plugin_routing_schema), - ('terminal'): Any(None, *list_dict_plugin_routing_schema), - ('test'): Any(None, *list_dict_plugin_routing_schema), - ('vars'): Any(None, *list_dict_plugin_routing_schema), - }, extra=PREVENT_EXTRA) - - # import_redirection schema - - import_redirection_schema = Any( - Schema({ - ('redirect'): Any(*string_types), - # import_redirect doesn't currently support deprecation - }, extra=PREVENT_EXTRA) - ) - - list_dict_import_redirection_schema = [{str_type: import_redirection_schema} - for str_type in string_types] - - # top level schema - - schema = Schema({ - # All of these are optional - ('plugin_routing'): Any(plugin_schema), - ('import_redirection'): Any(None, *list_dict_import_redirection_schema), - # requires_ansible: In the future we should validate this with SpecifierSet - ('requires_ansible'): Any(*string_types), - ('action_groups'): dict, - }, extra=PREVENT_EXTRA) - - # Ensure schema is valid - - try: - schema(routing) - except MultipleInvalid as ex: - for error in ex.errors: - # No way to get line/column numbers - print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error))) - - -def main(): - """Validate runtime metadata""" - paths = sys.argv[1:] or sys.stdin.read().splitlines() - - collection_legacy_file = 'meta/routing.yml' - collection_runtime_file = 'meta/runtime.yml' - - # This is currently disabled, because if it is enabled this test can start failing - # at a random date. For this to be properly activated, we (a) need to be able to return - # codes for this test, and (b) make this error optional. - check_deprecation_dates = False - - for path in paths: - if path == collection_legacy_file: - print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file))) - continue - - validate_metadata_file( - path, - is_ansible=path not in (collection_legacy_file, collection_runtime_file), - check_deprecation_dates=check_deprecation_dates) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.json b/test/lib/ansible_test/_data/sanity/code-smell/shebang.json deleted file mode 100644 index 5648429eb0..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/shebang.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "text": true, - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/shebang.py b/test/lib/ansible_test/_data/sanity/code-smell/shebang.py deleted file mode 100755 index 7cf3cf7200..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/shebang.py +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import re -import stat -import sys - - -def main(): - standard_shebangs = set([ - b'#!/bin/bash -eu', - b'#!/bin/bash -eux', - b'#!/bin/sh', - b'#!/usr/bin/env bash', - b'#!/usr/bin/env fish', - b'#!/usr/bin/env pwsh', - b'#!/usr/bin/env python', - b'#!/usr/bin/make -f', - ]) - - integration_shebangs = set([ - b'#!/bin/sh', - b'#!/usr/bin/env bash', - b'#!/usr/bin/env python', - ]) - - module_shebangs = { - '': b'#!/usr/bin/python', - '.py': b'#!/usr/bin/python', - '.ps1': b'#!powershell', - } - - # see https://unicode.org/faq/utf_bom.html#bom1 - byte_order_marks = ( - (b'\x00\x00\xFE\xFF', 'UTF-32 (BE)'), - (b'\xFF\xFE\x00\x00', 'UTF-32 (LE)'), - (b'\xFE\xFF', 'UTF-16 (BE)'), - (b'\xFF\xFE', 'UTF-16 (LE)'), - (b'\xEF\xBB\xBF', 'UTF-8'), - ) - - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as path_fd: - shebang = path_fd.readline().strip() - mode = os.stat(path).st_mode - executable = (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & mode - - if not shebang or not shebang.startswith(b'#!'): - if executable: - print('%s:%d:%d: file without shebang should not be executable' % (path, 0, 0)) - - for mark, name in byte_order_marks: - if shebang.startswith(mark): - print('%s:%d:%d: file starts with a %s byte order mark' % (path, 0, 0, name)) - break - - continue - - is_module = False - is_integration = False - - dirname = os.path.dirname(path) - - if path.startswith('lib/ansible/modules/'): - is_module = True - elif re.search('^test/support/[^/]+/plugins/modules/', path): - is_module = True - elif re.search('^test/support/[^/]+/collections/ansible_collections/[^/]+/[^/]+/plugins/modules/', path): - is_module = True - elif path.startswith('test/lib/ansible_test/_data/'): - pass - elif path.startswith('lib/') or path.startswith('test/lib/'): - if executable: - print('%s:%d:%d: should not be executable' % (path, 0, 0)) - - if shebang: - print('%s:%d:%d: should not have a shebang' % (path, 0, 0)) - - continue - elif path.startswith('test/integration/targets/') or path.startswith('tests/integration/targets/'): - is_integration = True - - if dirname.endswith('/library') or '/plugins/modules' in dirname or dirname in ( - # non-standard module library directories - 'test/integration/targets/module_precedence/lib_no_extension', - 'test/integration/targets/module_precedence/lib_with_extension', - ): - is_module = True - elif path.startswith('plugins/modules/'): - is_module = True - - if is_module: - if executable: - print('%s:%d:%d: module should not be executable' % (path, 0, 0)) - - ext = os.path.splitext(path)[1] - expected_shebang = module_shebangs.get(ext) - expected_ext = ' or '.join(['"%s"' % k for k in module_shebangs]) - - if expected_shebang: - if shebang == expected_shebang: - continue - - print('%s:%d:%d: expected module shebang "%s" but found: %s' % (path, 1, 1, expected_shebang, shebang)) - else: - print('%s:%d:%d: expected module extension %s but found: %s' % (path, 0, 0, expected_ext, ext)) - else: - if is_integration: - allowed = integration_shebangs - else: - allowed = standard_shebangs - - if shebang not in allowed: - print('%s:%d:%d: unexpected non-module shebang: %s' % (path, 1, 1, shebang)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json deleted file mode 100644 index 6f13c86b30..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "include_directories": true, - "include_symlinks": true, - "output": "path-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py deleted file mode 100755 index 0585c6b1e5..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/symlinks.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys - - -def main(): - root_dir = os.getcwd() + os.path.sep - - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - if not os.path.islink(path.rstrip(os.path.sep)): - continue - - if not os.path.exists(path): - print('%s: broken symlinks are not allowed' % path) - continue - - if path.endswith(os.path.sep): - print('%s: symlinks to directories are not allowed' % path) - continue - - real_path = os.path.realpath(path) - - if not real_path.startswith(root_dir): - print('%s: symlinks outside content tree are not allowed: %s' % (path, os.path.relpath(real_path, os.path.dirname(path)))) - continue - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json deleted file mode 100644 index 36103051b0..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "prefixes": [ - "lib/ansible/modules/", - "plugins/modules/" - ], - "extensions": [ - ".py" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py deleted file mode 100755 index 687136dcdb..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/use-argspec-type-path.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'(expanduser)', text) - - if match: - print('%s:%d:%d: use argspec type="path" instead of type="str" to avoid use of `expanduser`' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json deleted file mode 100644 index 776590b74d..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "extensions": [ - ".py" - ], - "output": "path-line-column-message" -} diff --git a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py deleted file mode 100755 index 49cb76c5e2..0000000000 --- a/test/lib/ansible_test/_data/sanity/code-smell/use-compat-six.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re -import sys - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'r') as path_fd: - for line, text in enumerate(path_fd.readlines()): - match = re.search(r'((^\s*import\s+six\b)|(^\s*from\s+six\b))', text) - - if match: - print('%s:%d:%d: use `ansible.module_utils.six` instead of `six`' % ( - path, line + 1, match.start(1) + 1)) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/compile/compile.py b/test/lib/ansible_test/_data/sanity/compile/compile.py deleted file mode 100755 index 3f6fc96260..0000000000 --- a/test/lib/ansible_test/_data/sanity/compile/compile.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python -"""Python syntax checker with lint friendly output.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import sys - -ENCODING = 'utf-8' -ERRORS = 'replace' -Text = type(u'') - - -def main(): - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - with open(path, 'rb') as source_fd: - source = source_fd.read() - - try: - compile(source, path, 'exec', dont_inherit=True) - except SyntaxError as ex: - extype, message, lineno, offset = type(ex), ex.text, ex.lineno, ex.offset - except BaseException as ex: # pylint: disable=broad-except - extype, message, lineno, offset = type(ex), str(ex), 0, 0 - else: - continue - - result = "%s:%d:%d: %s: %s" % (path, lineno, offset, extype.__name__, safe_message(message)) - - if sys.version_info <= (3,): - result = result.encode(ENCODING, ERRORS) - - print(result) - - -def safe_message(value): - """Given an input value as text or bytes, return the first non-empty line as text, ensuring it can be round-tripped as UTF-8.""" - if isinstance(value, Text): - value = value.encode(ENCODING, ERRORS) - - value = value.decode(ENCODING, ERRORS) - value = value.strip().splitlines()[0].strip() - - return value - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/import/importer.py b/test/lib/ansible_test/_data/sanity/import/importer.py deleted file mode 100755 index f0659d9b4e..0000000000 --- a/test/lib/ansible_test/_data/sanity/import/importer.py +++ /dev/null @@ -1,541 +0,0 @@ -#!/usr/bin/env python -"""Import the given python module(s) and report error(s) encountered.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - - -def main(): - """ - Main program function used to isolate globals from imported code. - Changes to globals in imported modules on Python 2.x will overwrite our own globals. - """ - import ansible - import contextlib - import datetime - import json - import os - import re - import runpy - import subprocess - import sys - import traceback - import types - import warnings - - ansible_path = os.path.dirname(os.path.dirname(ansible.__file__)) - temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep - external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable - collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') - collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') - import_type = os.environ.get('SANITY_IMPORTER_TYPE') - - try: - # noinspection PyCompatibility - from importlib import import_module - except ImportError: - def import_module(name): - __import__(name) - return sys.modules[name] - - try: - # noinspection PyCompatibility - from StringIO import StringIO - except ImportError: - from io import StringIO - - if collection_full_name: - # allow importing code from collections when testing a collection - from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native, text_type - from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder - from ansible.utils.collection_loader import _collection_finder - - yaml_to_json_path = os.path.join(os.path.dirname(__file__), 'yaml_to_json.py') - yaml_to_dict_cache = {} - - # unique ISO date marker matching the one present in yaml_to_json.py - iso_date_marker = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' - iso_date_re = re.compile('^%s([0-9]{4})-([0-9]{2})-([0-9]{2})$' % iso_date_marker) - - def parse_value(value): - """Custom value parser for JSON deserialization that recognizes our internal ISO date format.""" - if isinstance(value, text_type): - match = iso_date_re.search(value) - - if match: - value = datetime.date(int(match.group(1)), int(match.group(2)), int(match.group(3))) - - return value - - def object_hook(data): - """Object hook for custom ISO date deserialization from JSON.""" - return dict((key, parse_value(value)) for key, value in data.items()) - - def yaml_to_dict(yaml, content_id): - """ - Return a Python dict version of the provided YAML. - Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML. - """ - if content_id in yaml_to_dict_cache: - return yaml_to_dict_cache[content_id] - - try: - cmd = [external_python, yaml_to_json_path] - proc = subprocess.Popen([to_bytes(c) for c in cmd], # pylint: disable=consider-using-with - stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml)) - - if proc.returncode != 0: - raise Exception('command %s failed with return code %d: %s' % ([to_native(c) for c in cmd], proc.returncode, to_native(stderr_bytes))) - - data = yaml_to_dict_cache[content_id] = json.loads(to_text(stdout_bytes), object_hook=object_hook) - - return data - except Exception as ex: - raise Exception('internal importer error - failed to parse yaml: %s' % to_native(ex)) - - _collection_finder._meta_yml_to_dict = yaml_to_dict # pylint: disable=protected-access - - collection_loader = _AnsibleCollectionFinder(paths=[collection_root]) - # noinspection PyProtectedMember - collection_loader._install() # pylint: disable=protected-access - else: - # do not support collection loading when not testing a collection - collection_loader = None - - # remove all modules under the ansible package - list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) - - if import_type == 'module': - # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading - # this more accurately reflects the environment that AnsiballZ runs modules under - # it also avoids issues with imports in the ansible package that are not allowed - ansible_module = types.ModuleType(ansible.__name__) - ansible_module.__file__ = ansible.__file__ - ansible_module.__path__ = ansible.__path__ - ansible_module.__package__ = ansible.__package__ - - sys.modules[ansible.__name__] = ansible_module - - class ImporterAnsibleModuleException(Exception): - """Exception thrown during initialization of ImporterAnsibleModule.""" - - class ImporterAnsibleModule: - """Replacement for AnsibleModule to support import testing.""" - def __init__(self, *args, **kwargs): - raise ImporterAnsibleModuleException() - - class RestrictedModuleLoader: - """Python module loader that restricts inappropriate imports.""" - def __init__(self, path, name, restrict_to_module_paths): - self.path = path - self.name = name - self.loaded_modules = set() - self.restrict_to_module_paths = restrict_to_module_paths - - def find_module(self, fullname, path=None): - """Return self if the given fullname is restricted, otherwise return None. - :param fullname: str - :param path: str - :return: RestrictedModuleLoader | None - """ - if fullname in self.loaded_modules: - return None # ignore modules that are already being loaded - - if is_name_in_namepace(fullname, ['ansible']): - if not self.restrict_to_module_paths: - return None # for non-modules, everything in the ansible namespace is allowed - - if fullname in ('ansible.module_utils.basic',): - return self # intercept loading so we can modify the result - - if is_name_in_namepace(fullname, ['ansible.module_utils', self.name]): - return None # module_utils and module under test are always allowed - - if any(os.path.exists(candidate_path) for candidate_path in convert_ansible_name_to_absolute_paths(fullname)): - return self # restrict access to ansible files that exist - - return None # ansible file does not exist, do not restrict access - - if is_name_in_namepace(fullname, ['ansible_collections']): - if not collection_loader: - return self # restrict access to collections when we are not testing a collection - - if not self.restrict_to_module_paths: - return None # for non-modules, everything in the ansible namespace is allowed - - if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]): - return None # module_utils and module under test are always allowed - - if collection_loader.find_module(fullname, path): - return self # restrict access to collection files that exist - - return None # collection file does not exist, do not restrict access - - # not a namespace we care about - return None - - def load_module(self, fullname): - """Raise an ImportError. - :type fullname: str - """ - if fullname == 'ansible.module_utils.basic': - module = self.__load_module(fullname) - - # stop Ansible module execution during AnsibleModule instantiation - module.AnsibleModule = ImporterAnsibleModule - # no-op for _load_params since it may be called before instantiating AnsibleModule - module._load_params = lambda *args, **kwargs: {} # pylint: disable=protected-access - - return module - - raise ImportError('import of "%s" is not allowed in this context' % fullname) - - def __load_module(self, fullname): - """Load the requested module while avoiding infinite recursion. - :type fullname: str - :rtype: module - """ - self.loaded_modules.add(fullname) - return import_module(fullname) - - def run(restrict_to_module_paths): - """Main program function.""" - base_dir = os.getcwd() - messages = set() - - for path in sys.argv[1:] or sys.stdin.read().splitlines(): - name = convert_relative_path_to_name(path) - test_python_module(path, name, base_dir, messages, restrict_to_module_paths) - - if messages: - sys.exit(10) - - def test_python_module(path, name, base_dir, messages, restrict_to_module_paths): - """Test the given python module by importing it. - :type path: str - :type name: str - :type base_dir: str - :type messages: set[str] - :type restrict_to_module_paths: bool - """ - if name in sys.modules: - return # cannot be tested because it has already been loaded - - is_ansible_module = (path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/')) and os.path.basename(path) != '__init__.py' - run_main = is_ansible_module - - if path == 'lib/ansible/modules/async_wrapper.py': - # async_wrapper is a non-standard Ansible module (does not use AnsibleModule) so we cannot test the main function - run_main = False - - capture_normal = Capture() - capture_main = Capture() - - run_module_ok = False - - try: - with monitor_sys_modules(path, messages): - with restrict_imports(path, name, messages, restrict_to_module_paths): - with capture_output(capture_normal): - import_module(name) - - if run_main: - run_module_ok = is_ansible_module - - with monitor_sys_modules(path, messages): - with restrict_imports(path, name, messages, restrict_to_module_paths): - with capture_output(capture_main): - runpy.run_module(name, run_name='__main__', alter_sys=True) - except ImporterAnsibleModuleException: - # module instantiated AnsibleModule without raising an exception - if not run_module_ok: - if is_ansible_module: - report_message(path, 0, 0, 'module-guard', "AnsibleModule instantiation not guarded by `if __name__ == '__main__'`", messages) - else: - report_message(path, 0, 0, 'non-module', "AnsibleModule instantiated by import of non-module", messages) - except BaseException as ex: # pylint: disable=locally-disabled, broad-except - # intentionally catch all exceptions, including calls to sys.exit - exc_type, _exc, exc_tb = sys.exc_info() - message = str(ex) - results = list(reversed(traceback.extract_tb(exc_tb))) - line = 0 - offset = 0 - full_path = os.path.join(base_dir, path) - base_path = base_dir + os.path.sep - source = None - - # avoid line wraps in messages - message = re.sub(r'\n *', ': ', message) - - for result in results: - if result[0] == full_path: - # save the line number for the file under test - line = result[1] or 0 - - if not source and result[0].startswith(base_path) and not result[0].startswith(temp_path): - # save the first path and line number in the traceback which is in our source tree - source = (os.path.relpath(result[0], base_path), result[1] or 0, 0) - - if isinstance(ex, SyntaxError): - # SyntaxError has better information than the traceback - if ex.filename == full_path: # pylint: disable=locally-disabled, no-member - # syntax error was reported in the file under test - line = ex.lineno or 0 # pylint: disable=locally-disabled, no-member - offset = ex.offset or 0 # pylint: disable=locally-disabled, no-member - elif ex.filename.startswith(base_path) and not ex.filename.startswith(temp_path): # pylint: disable=locally-disabled, no-member - # syntax error was reported in our source tree - source = (os.path.relpath(ex.filename, base_path), ex.lineno or 0, ex.offset or 0) # pylint: disable=locally-disabled, no-member - - # remove the filename and line number from the message - # either it was extracted above, or it's not really useful information - message = re.sub(r' \(.*?, line [0-9]+\)$', '', message) - - if source and source[0] != path: - message += ' (at %s:%d:%d)' % (source[0], source[1], source[2]) - - report_message(path, line, offset, 'traceback', '%s: %s' % (exc_type.__name__, message), messages) - finally: - capture_report(path, capture_normal, messages) - capture_report(path, capture_main, messages) - - def is_name_in_namepace(name, namespaces): - """Returns True if the given name is one of the given namespaces, otherwise returns False.""" - name_parts = name.split('.') - - for namespace in namespaces: - namespace_parts = namespace.split('.') - length = min(len(name_parts), len(namespace_parts)) - - truncated_name = name_parts[0:length] - truncated_namespace = namespace_parts[0:length] - - # empty parts in the namespace are treated as wildcards - # to simplify the comparison, use those empty parts to indicate the positions in the name to be empty as well - for idx, part in enumerate(truncated_namespace): - if not part: - truncated_name[idx] = part - - # example: name=ansible, allowed_name=ansible.module_utils - # example: name=ansible.module_utils.system.ping, allowed_name=ansible.module_utils - if truncated_name == truncated_namespace: - return True - - return False - - def check_sys_modules(path, before, messages): - """Check for unwanted changes to sys.modules. - :type path: str - :type before: dict[str, module] - :type messages: set[str] - """ - after = sys.modules - removed = set(before.keys()) - set(after.keys()) - changed = set(key for key, value in before.items() if key in after and value != after[key]) - - # additions are checked by our custom PEP 302 loader, so we don't need to check them again here - - for module in sorted(removed): - report_message(path, 0, 0, 'unload', 'unloading of "%s" in sys.modules is not supported' % module, messages) - - for module in sorted(changed): - report_message(path, 0, 0, 'reload', 'reloading of "%s" in sys.modules is not supported' % module, messages) - - def convert_ansible_name_to_absolute_paths(name): - """Calculate the module path from the given name. - :type name: str - :rtype: list[str] - """ - return [ - os.path.join(ansible_path, name.replace('.', os.path.sep)), - os.path.join(ansible_path, name.replace('.', os.path.sep)) + '.py', - ] - - def convert_relative_path_to_name(path): - """Calculate the module name from the given path. - :type path: str - :rtype: str - """ - if path.endswith('/__init__.py'): - clean_path = os.path.dirname(path) - else: - clean_path = path - - clean_path = os.path.splitext(clean_path)[0] - - name = clean_path.replace(os.path.sep, '.') - - if collection_loader: - # when testing collections the relative paths (and names) being tested are within the collection under test - name = 'ansible_collections.%s.%s' % (collection_full_name, name) - else: - # when testing ansible all files being imported reside under the lib directory - name = name[len('lib/'):] - - return name - - class Capture: - """Captured output and/or exception.""" - def __init__(self): - self.stdout = StringIO() - self.stderr = StringIO() - - def capture_report(path, capture, messages): - """Report on captured output. - :type path: str - :type capture: Capture - :type messages: set[str] - """ - if capture.stdout.getvalue(): - first = capture.stdout.getvalue().strip().splitlines()[0].strip() - report_message(path, 0, 0, 'stdout', first, messages) - - if capture.stderr.getvalue(): - first = capture.stderr.getvalue().strip().splitlines()[0].strip() - report_message(path, 0, 0, 'stderr', first, messages) - - def report_message(path, line, column, code, message, messages): - """Report message if not already reported. - :type path: str - :type line: int - :type column: int - :type code: str - :type message: str - :type messages: set[str] - """ - message = '%s:%d:%d: %s: %s' % (path, line, column, code, message) - - if message not in messages: - messages.add(message) - print(message) - - @contextlib.contextmanager - def restrict_imports(path, name, messages, restrict_to_module_paths): - """Restrict available imports. - :type path: str - :type name: str - :type messages: set[str] - :type restrict_to_module_paths: bool - """ - restricted_loader = RestrictedModuleLoader(path, name, restrict_to_module_paths) - - # noinspection PyTypeChecker - sys.meta_path.insert(0, restricted_loader) - sys.path_importer_cache.clear() - - try: - yield - finally: - if import_type == 'plugin': - from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder - _AnsibleCollectionFinder._remove() # pylint: disable=protected-access - - if sys.meta_path[0] != restricted_loader: - report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages) - - while restricted_loader in sys.meta_path: - # noinspection PyTypeChecker - sys.meta_path.remove(restricted_loader) - - sys.path_importer_cache.clear() - - @contextlib.contextmanager - def monitor_sys_modules(path, messages): - """Monitor sys.modules for unwanted changes, reverting any additions made to our own namespaces.""" - snapshot = sys.modules.copy() - - try: - yield - finally: - check_sys_modules(path, snapshot, messages) - - for key in set(sys.modules.keys()) - set(snapshot.keys()): - if is_name_in_namepace(key, ('ansible', 'ansible_collections')): - del sys.modules[key] # only unload our own code since we know it's native Python - - @contextlib.contextmanager - def capture_output(capture): - """Capture sys.stdout and sys.stderr. - :type capture: Capture - """ - old_stdout = sys.stdout - old_stderr = sys.stderr - - sys.stdout = capture.stdout - sys.stderr = capture.stderr - - # clear all warnings registries to make all warnings available - for module in sys.modules.values(): - try: - # noinspection PyUnresolvedReferences - module.__warningregistry__.clear() - except AttributeError: - pass - - with warnings.catch_warnings(): - warnings.simplefilter('error') - - if sys.version_info[0] == 2: - warnings.filterwarnings( - "ignore", - "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," - " and will be removed in a future release.") - warnings.filterwarnings( - "ignore", - "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," - " and will be removed in the next release.") - - if sys.version_info[:2] == (3, 5): - warnings.filterwarnings( - "ignore", - "Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.") - warnings.filterwarnings( - "ignore", - "Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.") - - if sys.version_info >= (3, 10): - # Temporary solution for Python 3.10 until find_spec is implemented in RestrictedModuleLoader. - # That implementation is dependent on find_spec being added to the controller's collection loader first. - # The warning text is: main..RestrictedModuleLoader.find_spec() not found; falling back to find_module() - warnings.filterwarnings( - "ignore", - r"main\.\.RestrictedModuleLoader\.find_spec\(\) not found; falling back to find_module\(\)", - ) - # Temporary solution for Python 3.10 until exec_module is implemented in RestrictedModuleLoader. - # That implementation is dependent on exec_module being added to the controller's collection loader first. - # The warning text is: main..RestrictedModuleLoader.exec_module() not found; falling back to load_module() - warnings.filterwarnings( - "ignore", - r"main\.\.RestrictedModuleLoader\.exec_module\(\) not found; falling back to load_module\(\)", - ) - - # Temporary solution for Python 3.10 until find_spec is implemented in the controller's collection loader. - warnings.filterwarnings( - "ignore", - r"_Ansible.*Finder\.find_spec\(\) not found; falling back to find_module\(\)", - ) - # Temporary solution for Python 3.10 until exec_module is implemented in the controller's collection loader. - warnings.filterwarnings( - "ignore", - r"_Ansible.*Loader\.exec_module\(\) not found; falling back to load_module\(\)", - ) - - # Temporary solution until there is a vendored copy of distutils.version in module_utils. - # Some of our dependencies such as packaging.tags also import distutils, which we have no control over - # The warning text is: The distutils package is deprecated and slated for removal in Python 3.12. - # Use setuptools or check PEP 632 for potential alternatives - warnings.filterwarnings( - "ignore", - r"The distutils package is deprecated and slated for removal in Python 3\.12\. .*", - ) - - try: - yield - finally: - sys.stdout = old_stdout - sys.stderr = old_stderr - - run(import_type == 'module') - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py deleted file mode 100644 index 09be9576d9..0000000000 --- a/test/lib/ansible_test/_data/sanity/import/yaml_to_json.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Read YAML from stdin and write JSON to stdout.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import datetime -import json -import sys - -from yaml import load - -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader - -# unique ISO date marker matching the one present in importer.py -ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' - - -def default(value): - if isinstance(value, datetime.date): - return '%s%s' % (ISO_DATE_MARKER, value.isoformat()) - - raise TypeError('cannot serialize type: %s' % type(value)) - - -json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout, default=default) diff --git a/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py b/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py deleted file mode 100644 index 74a45f009f..0000000000 --- a/test/lib/ansible_test/_data/sanity/integration-aliases/yaml_to_json.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Read YAML from stdin and write JSON to stdout.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json -import sys - -from yaml import load - -try: - from yaml import CSafeLoader as SafeLoader -except ImportError: - from yaml import SafeLoader - -json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout) diff --git a/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt deleted file mode 100644 index 659c7f59e7..0000000000 --- a/test/lib/ansible_test/_data/sanity/pep8/current-ignore.txt +++ /dev/null @@ -1,4 +0,0 @@ -E402 -W503 -W504 -E741 diff --git a/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 deleted file mode 100755 index 1ef2743acd..0000000000 --- a/test/lib/ansible_test/_data/sanity/pslint/pslint.ps1 +++ /dev/null @@ -1,43 +0,0 @@ -#!/usr/bin/env pwsh -#Requires -Version 6 -#Requires -Modules PSScriptAnalyzer, PSSA-PSCustomUseLiteralPath - -Set-StrictMode -Version 2.0 -$ErrorActionPreference = "Stop" -$WarningPreference = "Stop" - -# Until https://github.com/PowerShell/PSScriptAnalyzer/issues/1217 is fixed we need to import Pester if it's -# available. -if (Get-Module -Name Pester -ListAvailable -ErrorAction SilentlyContinue) { - Import-Module -Name Pester -} - -$LiteralPathRule = Import-Module -Name PSSA-PSCustomUseLiteralPath -PassThru -$LiteralPathRulePath = Join-Path -Path $LiteralPathRule.ModuleBase -ChildPath $LiteralPathRule.RootModule - -$PSSAParams = @{ - CustomRulePath = @($LiteralPathRulePath) - IncludeDefaultRules = $true - Setting = (Join-Path -Path $PSScriptRoot -ChildPath "settings.psd1") -} - -$Results = @() - -ForEach ($Path in $Args) { - $Retries = 3 - - Do { - Try { - $Results += Invoke-ScriptAnalyzer -Path $Path @PSSAParams 3> $null - $Retries = 0 - } - Catch { - If (--$Retries -le 0) { - Throw - } - } - } - Until ($Retries -le 0) -} - -ConvertTo-Json -InputObject $Results diff --git a/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 deleted file mode 100644 index 7646ec35e1..0000000000 --- a/test/lib/ansible_test/_data/sanity/pslint/settings.psd1 +++ /dev/null @@ -1,13 +0,0 @@ -@{ - ExcludeRules=@( - 'PSUseOutputTypeCorrectly', - 'PSUseShouldProcessForStateChangingFunctions', - # We send strings as plaintext so will always come across the 3 issues - 'PSAvoidUsingPlainTextForPassword', - 'PSAvoidUsingConvertToSecureStringWithPlainText', - 'PSAvoidUsingUserNameAndPassWordParams', - # We send the module as a base64 encoded string and a BOM will cause - # issues here - 'PSUseBOMForUnicodeEncodedFile' - ) -} diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg deleted file mode 100644 index 187758f409..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/config/ansible-test.cfg +++ /dev/null @@ -1,54 +0,0 @@ -[MESSAGES CONTROL] - -disable= - consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - cyclic-import, # consistent results require running with --jobs 1 and testing all files - duplicate-code, # consistent results require running with --jobs 1 and testing all files - import-error, # inconsistent results which depend on the availability of imports - import-outside-toplevel, # common pattern in ansible related code - no-name-in-module, # inconsistent results which depend on the availability of imports - no-self-use, - raise-missing-from, # Python 2.x does not support raise from - super-with-arguments, # Python 2.x does not support super without arguments - too-few-public-methods, - too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ - too-many-arguments, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-return-statements, - too-many-statements, - unused-import, # pylint does not understand PEP 484 type hints - -[BASIC] - -bad-names= - _, - bar, - baz, - foo, - tata, - toto, - tutu, - -good-names= - __metaclass__, - C, - ex, - i, - j, - k, - Run, - -class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$ -attr-rgx=[a-z_][a-z0-9_]{1,40}$ -method-rgx=[a-z_][a-z0-9_]{1,40}$ -function-rgx=[a-z_][a-z0-9_]{1,40}$ - -[IMPORTS] - -preferred-modules = - distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg deleted file mode 100644 index 31c140ccbf..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/config/collection.cfg +++ /dev/null @@ -1,143 +0,0 @@ -[MESSAGES CONTROL] - -disable= - abstract-method, - access-member-before-definition, - arguments-differ, - assignment-from-no-return, - assignment-from-none, - attribute-defined-outside-init, - bad-continuation, - bad-indentation, - bad-mcs-classmethod-argument, - broad-except, - c-extension-no-member, - cell-var-from-loop, - chained-comparison, - comparison-with-callable, - consider-iterating-dictionary, - consider-merging-isinstance, - consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-dict-items, - consider-using-enumerate, - consider-using-get, - consider-using-in, - consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-ternary, - consider-using-with, - cyclic-import, # consistent results require running with --jobs 1 and testing all files - deprecated-lambda, - deprecated-method, - deprecated-module, - duplicate-code, # consistent results require running with --jobs 1 and testing all files - eval-used, - exec-used, - expression-not-assigned, - fixme, - function-redefined, - global-statement, - global-variable-undefined, - import-error, # inconsistent results which depend on the availability of imports - import-outside-toplevel, # common pattern in ansible related code - import-self, - inconsistent-return-statements, - invalid-envvar-default, - invalid-name, - invalid-sequence-index, - keyword-arg-before-vararg, - len-as-condition, - line-too-long, - literal-comparison, - locally-disabled, - method-hidden, - misplaced-comparison-constant, - missing-docstring, - no-else-break, - no-else-continue, - no-else-raise, - no-else-return, - no-init, - no-member, - no-name-in-module, # inconsistent results which depend on the availability of imports - no-self-use, - no-value-for-parameter, - non-iterator-returned, - not-a-mapping, - not-an-iterable, - not-callable, - old-style-class, - pointless-statement, - pointless-string-statement, - possibly-unused-variable, - protected-access, - raise-missing-from, # Python 2.x does not support raise from - redefined-argument-from-local, - redefined-builtin, - redefined-outer-name, - redefined-variable-type, - reimported, - relative-beyond-top-level, # https://github.com/PyCQA/pylint/issues/2967 - signature-differs, - simplifiable-if-expression, - simplifiable-if-statement, - subprocess-popen-preexec-fn, - super-init-not-called, - super-with-arguments, # Python 2.x does not support super without arguments - superfluous-parens, - too-few-public-methods, - too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ - too-many-arguments, - too-many-boolean-expressions, - too-many-branches, - too-many-function-args, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-return-statements, - too-many-statements, - trailing-comma-tuple, - trailing-comma-tuple, - try-except-raise, - unbalanced-tuple-unpacking, - undefined-loop-variable, - unexpected-keyword-arg, - ungrouped-imports, - unidiomatic-typecheck, - unnecessary-pass, - unsubscriptable-object, - unsupported-assignment-operation, - unsupported-delete-operation, - unsupported-membership-test, - unused-argument, - unused-import, - unused-variable, - useless-object-inheritance, - useless-return, - useless-super-delegation, - wrong-import-order, - wrong-import-position, - -[BASIC] - -bad-names= - _, - bar, - baz, - foo, - tata, - toto, - tutu, - -good-names= - ex, - i, - j, - k, - Run, - -[TYPECHECK] - -ignored-modules= - _MovedItems, diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg deleted file mode 100644 index a1275aa981..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/config/default.cfg +++ /dev/null @@ -1,148 +0,0 @@ -[MESSAGES CONTROL] - -disable= - import-outside-toplevel, # common pattern in ansible related code - abstract-method, - access-member-before-definition, - arguments-differ, - assignment-from-no-return, - assignment-from-none, - attribute-defined-outside-init, - bad-continuation, - bad-indentation, - bad-mcs-classmethod-argument, - broad-except, - c-extension-no-member, - cell-var-from-loop, - chained-comparison, - comparison-with-callable, - consider-iterating-dictionary, - consider-merging-isinstance, - consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-dict-items, - consider-using-enumerate, - consider-using-get, - consider-using-in, - consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-ternary, - consider-using-with, - cyclic-import, # consistent results require running with --jobs 1 and testing all files - deprecated-lambda, - deprecated-method, - deprecated-module, - duplicate-code, # consistent results require running with --jobs 1 and testing all files - eval-used, - exec-used, - expression-not-assigned, - fixme, - function-redefined, - global-statement, - global-variable-undefined, - import-error, # inconsistent results which depend on the availability of imports - import-self, - inconsistent-return-statements, - invalid-envvar-default, - invalid-name, - invalid-sequence-index, - keyword-arg-before-vararg, - len-as-condition, - line-too-long, - literal-comparison, - locally-disabled, - method-hidden, - misplaced-comparison-constant, - missing-docstring, - no-else-break, - no-else-continue, - no-else-raise, - no-else-return, - no-init, - no-member, - no-name-in-module, # inconsistent results which depend on the availability of imports - no-self-use, - no-value-for-parameter, - non-iterator-returned, - not-a-mapping, - not-an-iterable, - not-callable, - old-style-class, - pointless-statement, - pointless-string-statement, - possibly-unused-variable, - protected-access, - raise-missing-from, # Python 2.x does not support raise from - redefined-argument-from-local, - redefined-builtin, - redefined-outer-name, - redefined-variable-type, - reimported, - relative-import, - signature-differs, - simplifiable-if-expression, - simplifiable-if-statement, - subprocess-popen-preexec-fn, - super-init-not-called, - super-with-arguments, # Python 2.x does not support super without arguments - superfluous-parens, - too-few-public-methods, - too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ - too-many-arguments, - too-many-boolean-expressions, - too-many-branches, - too-many-function-args, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-return-statements, - too-many-statements, - trailing-comma-tuple, - trailing-comma-tuple, - try-except-raise, - unbalanced-tuple-unpacking, - undefined-loop-variable, - unexpected-keyword-arg, - ungrouped-imports, - unidiomatic-typecheck, - unnecessary-pass, - unsubscriptable-object, - unsupported-assignment-operation, - unsupported-delete-operation, - unsupported-membership-test, - unused-argument, - unused-import, - unused-variable, - useless-object-inheritance, - useless-return, - useless-super-delegation, - wrong-import-order, - wrong-import-position, - -[BASIC] - -bad-names= - _, - bar, - baz, - foo, - tata, - toto, - tutu, - -good-names= - ex, - i, - j, - k, - Run, - -[TYPECHECK] - -ignored-modules= - _MovedItems, - -[IMPORTS] - -preferred-modules = - distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg b/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg deleted file mode 100644 index bcf9549fd7..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/config/sanity.cfg +++ /dev/null @@ -1,55 +0,0 @@ -[MESSAGES CONTROL] - -disable= - consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support - cyclic-import, # consistent results require running with --jobs 1 and testing all files - duplicate-code, # consistent results require running with --jobs 1 and testing all files - import-error, # inconsistent results which depend on the availability of imports - import-outside-toplevel, # common pattern in ansible related code - missing-docstring, - no-name-in-module, # inconsistent results which depend on the availability of imports - raise-missing-from, # Python 2.x does not support raise from - super-with-arguments, # Python 2.x does not support super without arguments - too-few-public-methods, - too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ - too-many-arguments, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-return-statements, - too-many-statements, - unused-import, # pylint does not understand PEP 484 type hints - -[BASIC] - -bad-names= - _, - bar, - baz, - foo, - tata, - toto, - tutu, - -good-names= - __metaclass__, - C, - e, - ex, - f, - i, - j, - k, - Run, - -module-rgx=[a-z_][a-z0-9_-]{2,40}$ -method-rgx=[a-z_][a-z0-9_]{2,40}$ -function-rgx=[a-z_][a-z0-9_]{2,40}$ - -[IMPORTS] - -preferred-modules = - distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py deleted file mode 100644 index e39e5214bf..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/plugins/deprecated.py +++ /dev/null @@ -1,258 +0,0 @@ -# (c) 2018, Matt Martz -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -# -*- coding: utf-8 -*- -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import datetime -import re - -import astroid - -from pylint.interfaces import IAstroidChecker -from pylint.checkers import BaseChecker -from pylint.checkers.utils import check_messages - -from ansible.module_utils.compat.version import LooseVersion -from ansible.module_utils.six import string_types -from ansible.release import __version__ as ansible_version_raw -from ansible.utils.version import SemanticVersion - -MSGS = { - 'E9501': ("Deprecated version (%r) found in call to Display.deprecated " - "or AnsibleModule.deprecate", - "ansible-deprecated-version", - "Used when a call to Display.deprecated specifies a version " - "less than or equal to the current version of Ansible", - {'minversion': (2, 6)}), - 'E9502': ("Display.deprecated call without a version or date", - "ansible-deprecated-no-version", - "Used when a call to Display.deprecated does not specify a " - "version or date", - {'minversion': (2, 6)}), - 'E9503': ("Invalid deprecated version (%r) found in call to " - "Display.deprecated or AnsibleModule.deprecate", - "ansible-invalid-deprecated-version", - "Used when a call to Display.deprecated specifies an invalid " - "Ansible version number", - {'minversion': (2, 6)}), - 'E9504': ("Deprecated version (%r) found in call to Display.deprecated " - "or AnsibleModule.deprecate", - "collection-deprecated-version", - "Used when a call to Display.deprecated specifies a collection " - "version less than or equal to the current version of this " - "collection", - {'minversion': (2, 6)}), - 'E9505': ("Invalid deprecated version (%r) found in call to " - "Display.deprecated or AnsibleModule.deprecate", - "collection-invalid-deprecated-version", - "Used when a call to Display.deprecated specifies an invalid " - "collection version number", - {'minversion': (2, 6)}), - 'E9506': ("No collection name found in call to Display.deprecated or " - "AnsibleModule.deprecate", - "ansible-deprecated-no-collection-name", - "The current collection name in format `namespace.name` must " - "be provided as collection_name when calling Display.deprecated " - "or AnsibleModule.deprecate (`ansible.builtin` for ansible-core)", - {'minversion': (2, 6)}), - 'E9507': ("Wrong collection name (%r) found in call to " - "Display.deprecated or AnsibleModule.deprecate", - "wrong-collection-deprecated", - "The name of the current collection must be passed to the " - "Display.deprecated resp. AnsibleModule.deprecate calls " - "(`ansible.builtin` for ansible-core)", - {'minversion': (2, 6)}), - 'E9508': ("Expired date (%r) found in call to Display.deprecated " - "or AnsibleModule.deprecate", - "ansible-deprecated-date", - "Used when a call to Display.deprecated specifies a date " - "before today", - {'minversion': (2, 6)}), - 'E9509': ("Invalid deprecated date (%r) found in call to " - "Display.deprecated or AnsibleModule.deprecate", - "ansible-invalid-deprecated-date", - "Used when a call to Display.deprecated specifies an invalid " - "date. It must be a string in format `YYYY-MM-DD` (ISO 8601)", - {'minversion': (2, 6)}), - 'E9510': ("Both version and date found in call to " - "Display.deprecated or AnsibleModule.deprecate", - "ansible-deprecated-both-version-and-date", - "Only one of version and date must be specified", - {'minversion': (2, 6)}), - 'E9511': ("Removal version (%r) must be a major release, not a minor or " - "patch release (see the specification at https://semver.org/)", - "removal-version-must-be-major", - "Used when a call to Display.deprecated or " - "AnsibleModule.deprecate for a collection specifies a version " - "which is not of the form x.0.0", - {'minversion': (2, 6)}), -} - - -ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3])) - - -def _get_expr_name(node): - """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr`` - - Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated`` - """ - try: - return node.func.expr.attrname - except AttributeError: - # If this fails too, we'll let it raise, the caller should catch it - return node.func.expr.name - - -def parse_isodate(value): - msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' - if not isinstance(value, string_types): - raise ValueError(msg) - # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, - # we have to do things manually. - if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value): - raise ValueError(msg) - try: - return datetime.datetime.strptime(value, '%Y-%m-%d').date() - except ValueError: - raise ValueError(msg) - - -class AnsibleDeprecatedChecker(BaseChecker): - """Checks for Display.deprecated calls to ensure that the ``version`` - has not passed or met the time for removal - """ - - __implements__ = (IAstroidChecker,) - name = 'deprecated' - msgs = MSGS - - options = ( - ('collection-name', { - 'default': None, - 'type': 'string', - 'metavar': '', - 'help': 'The collection\'s name used to check collection names in deprecations.', - }), - ('collection-version', { - 'default': None, - 'type': 'string', - 'metavar': '', - 'help': 'The collection\'s version number used to check deprecations.', - }), - ) - - def __init__(self, *args, **kwargs): - self.collection_version = None - self.collection_name = None - super(AnsibleDeprecatedChecker, self).__init__(*args, **kwargs) - - def set_option(self, optname, value, action=None, optdict=None): - super(AnsibleDeprecatedChecker, self).set_option(optname, value, action, optdict) - if optname == 'collection-version' and value is not None: - self.collection_version = SemanticVersion(self.config.collection_version) - if optname == 'collection-name' and value is not None: - self.collection_name = self.config.collection_name - - def _check_date(self, node, date): - if not isinstance(date, str): - self.add_message('invalid-date', node=node, args=(date,)) - return - - try: - date_parsed = parse_isodate(date) - except ValueError: - self.add_message('ansible-invalid-deprecated-date', node=node, args=(date,)) - return - - if date_parsed < datetime.date.today(): - self.add_message('ansible-deprecated-date', node=node, args=(date,)) - - def _check_version(self, node, version, collection_name): - if not isinstance(version, (str, float)): - self.add_message('invalid-version', node=node, args=(version,)) - return - - version_no = str(version) - - if collection_name == 'ansible.builtin': - # Ansible-base - try: - if not version_no: - raise ValueError('Version string should not be empty') - loose_version = LooseVersion(str(version_no)) - if ANSIBLE_VERSION >= loose_version: - self.add_message('ansible-deprecated-version', node=node, args=(version,)) - except ValueError: - self.add_message('ansible-invalid-deprecated-version', node=node, args=(version,)) - elif collection_name: - # Collections - try: - if not version_no: - raise ValueError('Version string should not be empty') - semantic_version = SemanticVersion(version_no) - if collection_name == self.collection_name and self.collection_version is not None: - if self.collection_version >= semantic_version: - self.add_message('collection-deprecated-version', node=node, args=(version,)) - if semantic_version.major != 0 and (semantic_version.minor != 0 or semantic_version.patch != 0): - self.add_message('removal-version-must-be-major', node=node, args=(version,)) - except ValueError: - self.add_message('collection-invalid-deprecated-version', node=node, args=(version,)) - - @check_messages(*(MSGS.keys())) - def visit_call(self, node): - version = None - date = None - collection_name = None - try: - if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or - node.func.attrname == 'deprecate' and _get_expr_name(node)): - if node.keywords: - for keyword in node.keywords: - if len(node.keywords) == 1 and keyword.arg is None: - # This is likely a **kwargs splat - return - if keyword.arg == 'version': - if isinstance(keyword.value.value, astroid.Name): - # This is likely a variable - return - version = keyword.value.value - if keyword.arg == 'date': - if isinstance(keyword.value.value, astroid.Name): - # This is likely a variable - return - date = keyword.value.value - if keyword.arg == 'collection_name': - if isinstance(keyword.value.value, astroid.Name): - # This is likely a variable - return - collection_name = keyword.value.value - if not version and not date: - try: - version = node.args[1].value - except IndexError: - self.add_message('ansible-deprecated-no-version', node=node) - return - if version and date: - self.add_message('ansible-deprecated-both-version-and-date', node=node) - - if collection_name: - this_collection = collection_name == (self.collection_name or 'ansible.builtin') - if not this_collection: - self.add_message('wrong-collection-deprecated', node=node, args=(collection_name,)) - elif self.collection_name is not None: - self.add_message('ansible-deprecated-no-collection-name', node=node) - - if date: - self._check_date(node, date) - elif version: - self._check_version(node, version, collection_name) - except AttributeError: - # Not the type of node we are interested in - pass - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(AnsibleDeprecatedChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py deleted file mode 100644 index 1c22a08b97..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/plugins/string_format.py +++ /dev/null @@ -1,84 +0,0 @@ -# (c) 2018, Matt Martz -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -# -*- coding: utf-8 -*- -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import astroid -from pylint.interfaces import IAstroidChecker -from pylint.checkers import BaseChecker -from pylint.checkers import utils -from pylint.checkers.utils import check_messages -try: - from pylint.checkers.utils import parse_format_method_string -except ImportError: - # noinspection PyUnresolvedReferences - from pylint.checkers.strings import parse_format_method_string - -MSGS = { - 'E9305': ("Format string contains automatic field numbering " - "specification", - "ansible-format-automatic-specification", - "Used when a PEP 3101 format string contains automatic " - "field numbering (e.g. '{}').", - {'minversion': (2, 6)}), - 'E9390': ("bytes object has no .format attribute", - "ansible-no-format-on-bytestring", - "Used when a bytestring was used as a PEP 3101 format string " - "as Python3 bytestrings do not have a .format attribute", - {'minversion': (3, 0)}), -} - - -class AnsibleStringFormatChecker(BaseChecker): - """Checks string formatting operations to ensure that the format string - is valid and the arguments match the format string. - """ - - __implements__ = (IAstroidChecker,) - name = 'string' - msgs = MSGS - - @check_messages(*(MSGS.keys())) - def visit_call(self, node): - func = utils.safe_infer(node.func) - if (isinstance(func, astroid.BoundMethod) - and isinstance(func.bound, astroid.Instance) - and func.bound.name in ('str', 'unicode', 'bytes')): - if func.name == 'format': - self._check_new_format(node, func) - - def _check_new_format(self, node, func): - """ Check the new string formatting """ - if (isinstance(node.func, astroid.Attribute) - and not isinstance(node.func.expr, astroid.Const)): - return - try: - strnode = next(func.bound.infer()) - except astroid.InferenceError: - return - if not isinstance(strnode, astroid.Const): - return - - if isinstance(strnode.value, bytes): - self.add_message('ansible-no-format-on-bytestring', node=node) - return - if not isinstance(strnode.value, str): - return - - if node.starargs or node.kwargs: - return - try: - num_args = parse_format_method_string(strnode.value)[1] - except utils.IncompleteFormatString: - return - - if num_args: - self.add_message('ansible-format-automatic-specification', - node=node) - return - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(AnsibleStringFormatChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py deleted file mode 100644 index 3d9877e6a3..0000000000 --- a/test/lib/ansible_test/_data/sanity/pylint/plugins/unwanted.py +++ /dev/null @@ -1,240 +0,0 @@ -"""A plugin for pylint to identify imports and functions which should not be used.""" -from __future__ import (absolute_import, division, print_function) - -__metaclass__ = type - -import os - -import astroid - -from pylint.checkers import BaseChecker -from pylint.interfaces import IAstroidChecker - -ANSIBLE_TEST_MODULES_PATH = os.environ['ANSIBLE_TEST_MODULES_PATH'] -ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH'] - - -class UnwantedEntry: - """Defines an unwanted import.""" - def __init__(self, alternative, modules_only=False, names=None, ignore_paths=None): - """ - :type alternative: str - :type modules_only: bool - :type names: tuple[str] | None - :type ignore_paths: tuple[str] | None - """ - self.alternative = alternative - self.modules_only = modules_only - self.names = set(names) if names else set() - self.ignore_paths = ignore_paths - - def applies_to(self, path, name=None): - """ - :type path: str - :type name: str | None - :rtype: bool - """ - if self.names: - if not name: - return False - - if name not in self.names: - return False - - if self.ignore_paths and any(path.endswith(ignore_path) for ignore_path in self.ignore_paths): - return False - - if self.modules_only: - return is_module_path(path) - - return True - - -def is_module_path(path): - """ - :type path: str - :rtype: bool - """ - return path.startswith(ANSIBLE_TEST_MODULES_PATH) or path.startswith(ANSIBLE_TEST_MODULE_UTILS_PATH) - - -class AnsibleUnwantedChecker(BaseChecker): - """Checker for unwanted imports and functions.""" - __implements__ = (IAstroidChecker,) - - name = 'unwanted' - - BAD_IMPORT = 'ansible-bad-import' - BAD_IMPORT_FROM = 'ansible-bad-import-from' - BAD_FUNCTION = 'ansible-bad-function' - BAD_MODULE_IMPORT = 'ansible-bad-module-import' - - msgs = dict( - E5101=('Import %s instead of %s', - BAD_IMPORT, - 'Identifies imports which should not be used.'), - E5102=('Import %s from %s instead of %s', - BAD_IMPORT_FROM, - 'Identifies imports which should not be used.'), - E5103=('Call %s instead of %s', - BAD_FUNCTION, - 'Identifies functions which should not be used.'), - E5104=('Import external package or ansible.module_utils not %s', - BAD_MODULE_IMPORT, - 'Identifies imports which should not be used.'), - ) - - unwanted_imports = dict( - # Additional imports that we may want to start checking: - # boto=UnwantedEntry('boto3', modules_only=True), - # requests=UnwantedEntry('ansible.module_utils.urls', modules_only=True), - # urllib=UnwantedEntry('ansible.module_utils.urls', modules_only=True), - - # see https://docs.python.org/2/library/urllib2.html - urllib2=UnwantedEntry('ansible.module_utils.urls', - ignore_paths=( - '/lib/ansible/module_utils/urls.py', - )), - - # see https://docs.python.org/3/library/collections.abc.html - collections=UnwantedEntry('ansible.module_utils.common._collections_compat', - ignore_paths=( - '/lib/ansible/module_utils/common/_collections_compat.py', - ), - names=( - 'MappingView', - 'ItemsView', - 'KeysView', - 'ValuesView', - 'Mapping', 'MutableMapping', - 'Sequence', 'MutableSequence', - 'Set', 'MutableSet', - 'Container', - 'Hashable', - 'Sized', - 'Callable', - 'Iterable', - 'Iterator', - )), - ) - - unwanted_functions = { - # see https://docs.python.org/3/library/tempfile.html#tempfile.mktemp - 'tempfile.mktemp': UnwantedEntry('tempfile.mkstemp'), - - 'sys.exit': UnwantedEntry('exit_json or fail_json', - ignore_paths=( - '/lib/ansible/module_utils/basic.py', - '/lib/ansible/modules/async_wrapper.py', - ), - modules_only=True), - - 'builtins.print': UnwantedEntry('module.log or module.debug', - ignore_paths=( - '/lib/ansible/module_utils/basic.py', - ), - modules_only=True), - } - - def visit_import(self, node): - """ - :type node: astroid.node_classes.Import - """ - for name in node.names: - self._check_import(node, name[0]) - - def visit_importfrom(self, node): - """ - :type node: astroid.node_classes.ImportFrom - """ - self._check_importfrom(node, node.modname, node.names) - - def visit_attribute(self, node): - """ - :type node: astroid.node_classes.Attribute - """ - last_child = node.last_child() - - # this is faster than using type inference and will catch the most common cases - if not isinstance(last_child, astroid.node_classes.Name): - return - - module = last_child.name - - entry = self.unwanted_imports.get(module) - - if entry and entry.names: - if entry.applies_to(self.linter.current_file, node.attrname): - self.add_message(self.BAD_IMPORT_FROM, args=(node.attrname, entry.alternative, module), node=node) - - def visit_call(self, node): - """ - :type node: astroid.node_classes.Call - """ - try: - for i in node.func.inferred(): - func = None - - if isinstance(i, astroid.scoped_nodes.FunctionDef) and isinstance(i.parent, astroid.scoped_nodes.Module): - func = '%s.%s' % (i.parent.name, i.name) - - if not func: - continue - - entry = self.unwanted_functions.get(func) - - if entry and entry.applies_to(self.linter.current_file): - self.add_message(self.BAD_FUNCTION, args=(entry.alternative, func), node=node) - except astroid.exceptions.InferenceError: - pass - - def _check_import(self, node, modname): - """ - :type node: astroid.node_classes.Import - :type modname: str - """ - self._check_module_import(node, modname) - - entry = self.unwanted_imports.get(modname) - - if not entry: - return - - if entry.applies_to(self.linter.current_file): - self.add_message(self.BAD_IMPORT, args=(entry.alternative, modname), node=node) - - def _check_importfrom(self, node, modname, names): - """ - :type node: astroid.node_classes.ImportFrom - :type modname: str - :type names: list[str[ - """ - self._check_module_import(node, modname) - - entry = self.unwanted_imports.get(modname) - - if not entry: - return - - for name in names: - if entry.applies_to(self.linter.current_file, name[0]): - self.add_message(self.BAD_IMPORT_FROM, args=(name[0], entry.alternative, modname), node=node) - - def _check_module_import(self, node, modname): - """ - :type node: astroid.node_classes.Import | astroid.node_classes.ImportFrom - :type modname: str - """ - if not is_module_path(self.linter.current_file): - return - - if modname == 'ansible.module_utils' or modname.startswith('ansible.module_utils.'): - return - - if modname == 'ansible' or modname.startswith('ansible.'): - self.add_message(self.BAD_MODULE_IMPORT, args=(modname,), node=node) - - -def register(linter): - """required method to auto register this checker """ - linter.register_checker(AnsibleUnwantedChecker(linter)) diff --git a/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt b/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt deleted file mode 100644 index 29588ddd86..0000000000 --- a/test/lib/ansible_test/_data/sanity/shellcheck/exclude.txt +++ /dev/null @@ -1,3 +0,0 @@ -SC1090 -SC1091 -SC2164 diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/main.py b/test/lib/ansible_test/_data/sanity/validate-modules/main.py deleted file mode 100755 index c1e2bdaaeb..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/main.py +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -from validate_modules.main import main - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules b/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules deleted file mode 120000 index 11a5d8e18a..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate-modules +++ /dev/null @@ -1 +0,0 @@ -main.py \ No newline at end of file diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py deleted file mode 100644 index d8ff2dc0d4..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2015 Matt Martz -# Copyright (C) 2015 Rackspace US, Inc. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type -__version__ = '0.0.1b' diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py deleted file mode 100644 index 9f4f5c474d..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/main.py +++ /dev/null @@ -1,2425 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2015 Matt Martz -# Copyright (C) 2015 Rackspace US, Inc. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import abc -import argparse -import ast -import datetime -import json -import errno -import os -import re -import subprocess -import sys -import tempfile -import traceback - -from collections import OrderedDict -from contextlib import contextmanager -from ansible.module_utils.compat.version import StrictVersion, LooseVersion -from fnmatch import fnmatch - -import yaml - -from ansible import __version__ as ansible_version -from ansible.executor.module_common import REPLACER_WINDOWS -from ansible.module_utils.common._collections_compat import Mapping -from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS -from ansible.plugins.loader import fragment_loader -from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder -from ansible.utils.plugin_docs import REJECTLIST, add_collection_to_versions_and_dates, add_fragments, get_docstring -from ansible.utils.version import SemanticVersion - -from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_argument_spec - -from .schema import ansible_module_kwargs_schema, doc_schema, return_schema - -from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate -from voluptuous.humanize import humanize_error - -from ansible.module_utils.six import PY3, with_metaclass, string_types - -if PY3: - # Because there is no ast.TryExcept in Python 3 ast module - TRY_EXCEPT = ast.Try - # REPLACER_WINDOWS from ansible.executor.module_common is byte - # string but we need unicode for Python 3 - REPLACER_WINDOWS = REPLACER_WINDOWS.decode('utf-8') -else: - TRY_EXCEPT = ast.TryExcept - -REJECTLIST_DIRS = frozenset(('.git', 'test', '.github', '.idea')) -INDENT_REGEX = re.compile(r'([\t]*)') -TYPE_REGEX = re.compile(r'.*(if|or)(\s+[^"\']*|\s+)(? 1: - self.reporter.error( - path=self.object_path, - code='use-short-gplv3-license', - msg='Found old style GPLv3 license header: ' - 'https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html#copyright' - ) - - def _check_for_subprocess(self): - for child in self.ast.body: - if isinstance(child, ast.Import): - if child.names[0].name == 'subprocess': - for line_no, line in enumerate(self.text.splitlines()): - sp_match = SUBPROCESS_REGEX.search(line) - if sp_match: - self.reporter.error( - path=self.object_path, - code='use-run-command-not-popen', - msg=('subprocess.Popen call found. Should be module.run_command'), - line=(line_no + 1), - column=(sp_match.span()[0] + 1) - ) - - def _check_for_os_call(self): - if 'os.call' in self.text: - for line_no, line in enumerate(self.text.splitlines()): - os_call_match = OS_CALL_REGEX.search(line) - if os_call_match: - self.reporter.error( - path=self.object_path, - code='use-run-command-not-os-call', - msg=('os.call() call found. Should be module.run_command'), - line=(line_no + 1), - column=(os_call_match.span()[0] + 1) - ) - - def _find_rejectlist_imports(self): - for child in self.ast.body: - names = [] - if isinstance(child, ast.Import): - names.extend(child.names) - elif isinstance(child, TRY_EXCEPT): - bodies = child.body - for handler in child.handlers: - bodies.extend(handler.body) - for grandchild in bodies: - if isinstance(grandchild, ast.Import): - names.extend(grandchild.names) - for name in names: - # TODO: Add line/col - for rejectlist_import, options in REJECTLIST_IMPORTS.items(): - if re.search(rejectlist_import, name.name): - new_only = options['new_only'] - if self._is_new_module() and new_only: - self.reporter.error( - path=self.object_path, - **options['error'] - ) - elif not new_only: - self.reporter.error( - path=self.object_path, - **options['error'] - ) - - def _find_module_utils(self): - linenos = [] - found_basic = False - for child in self.ast.body: - if isinstance(child, (ast.Import, ast.ImportFrom)): - names = [] - try: - names.append(child.module) - if child.module.endswith('.basic'): - found_basic = True - except AttributeError: - pass - names.extend([n.name for n in child.names]) - - if [n for n in names if n.startswith('ansible.module_utils')]: - linenos.append(child.lineno) - - for name in child.names: - if ('module_utils' in getattr(child, 'module', '') and - isinstance(name, ast.alias) and - name.name == '*'): - msg = ( - 'module-utils-specific-import', - ('module_utils imports should import specific ' - 'components, not "*"') - ) - if self._is_new_module(): - self.reporter.error( - path=self.object_path, - code=msg[0], - msg=msg[1], - line=child.lineno - ) - else: - self.reporter.warning( - path=self.object_path, - code=msg[0], - msg=msg[1], - line=child.lineno - ) - - if (isinstance(name, ast.alias) and - name.name == 'basic'): - found_basic = True - - if not found_basic: - self.reporter.warning( - path=self.object_path, - code='missing-module-utils-basic-import', - msg='Did not find "ansible.module_utils.basic" import' - ) - - return linenos - - def _get_first_callable(self): - linenos = [] - for child in self.ast.body: - if isinstance(child, (ast.FunctionDef, ast.ClassDef)): - linenos.append(child.lineno) - - return min(linenos) - - def _find_has_import(self): - for child in self.ast.body: - found_try_except_import = False - found_has = False - if isinstance(child, TRY_EXCEPT): - bodies = child.body - for handler in child.handlers: - bodies.extend(handler.body) - for grandchild in bodies: - if isinstance(grandchild, ast.Import): - found_try_except_import = True - if isinstance(grandchild, ast.Assign): - for target in grandchild.targets: - if not isinstance(target, ast.Name): - continue - if target.id.lower().startswith('has_'): - found_has = True - if found_try_except_import and not found_has: - # TODO: Add line/col - self.reporter.warning( - path=self.object_path, - code='try-except-missing-has', - msg='Found Try/Except block without HAS_ assignment' - ) - - def _ensure_imports_below_docs(self, doc_info, first_callable): - try: - min_doc_line = min( - [doc_info[key]['lineno'] for key in doc_info if doc_info[key]['lineno']] - ) - except ValueError: - # We can't perform this validation, as there are no DOCs provided at all - return - - max_doc_line = max( - [doc_info[key]['end_lineno'] for key in doc_info if doc_info[key]['end_lineno']] - ) - - import_lines = [] - - for child in self.ast.body: - if isinstance(child, (ast.Import, ast.ImportFrom)): - if isinstance(child, ast.ImportFrom) and child.module == '__future__': - # allowed from __future__ imports - for future_import in child.names: - if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS: - self.reporter.error( - path=self.object_path, - code='illegal-future-imports', - msg=('Only the following from __future__ imports are allowed: %s' - % ', '.join(self.ACCEPTLIST_FUTURE_IMPORTS)), - line=child.lineno - ) - break - else: # for-else. If we didn't find a problem nad break out of the loop, then this is a legal import - continue - import_lines.append(child.lineno) - if child.lineno < min_doc_line: - self.reporter.error( - path=self.object_path, - code='import-before-documentation', - msg=('Import found before documentation variables. ' - 'All imports must appear below ' - 'DOCUMENTATION/EXAMPLES/RETURN.'), - line=child.lineno - ) - break - elif isinstance(child, TRY_EXCEPT): - bodies = child.body - for handler in child.handlers: - bodies.extend(handler.body) - for grandchild in bodies: - if isinstance(grandchild, (ast.Import, ast.ImportFrom)): - import_lines.append(grandchild.lineno) - if grandchild.lineno < min_doc_line: - self.reporter.error( - path=self.object_path, - code='import-before-documentation', - msg=('Import found before documentation ' - 'variables. All imports must appear below ' - 'DOCUMENTATION/EXAMPLES/RETURN.'), - line=child.lineno - ) - break - - for import_line in import_lines: - if not (max_doc_line < import_line < first_callable): - msg = ( - 'import-placement', - ('Imports should be directly below DOCUMENTATION/EXAMPLES/' - 'RETURN.') - ) - if self._is_new_module(): - self.reporter.error( - path=self.object_path, - code=msg[0], - msg=msg[1], - line=import_line - ) - else: - self.reporter.warning( - path=self.object_path, - code=msg[0], - msg=msg[1], - line=import_line - ) - - def _validate_ps_replacers(self): - # loop all (for/else + error) - # get module list for each - # check "shape" of each module name - - module_requires = r'(?im)^#\s*requires\s+\-module(?:s?)\s*(Ansible\.ModuleUtils\..+)' - csharp_requires = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*(Ansible\..+)' - found_requires = False - - for req_stmt in re.finditer(module_requires, self.text): - found_requires = True - # this will bomb on dictionary format - "don't do that" - module_list = [x.strip() for x in req_stmt.group(1).split(',')] - if len(module_list) > 1: - self.reporter.error( - path=self.object_path, - code='multiple-utils-per-requires', - msg='Ansible.ModuleUtils requirements do not support multiple modules per statement: "%s"' % req_stmt.group(0) - ) - continue - - module_name = module_list[0] - - if module_name.lower().endswith('.psm1'): - self.reporter.error( - path=self.object_path, - code='invalid-requires-extension', - msg='Module #Requires should not end in .psm1: "%s"' % module_name - ) - - for req_stmt in re.finditer(csharp_requires, self.text): - found_requires = True - # this will bomb on dictionary format - "don't do that" - module_list = [x.strip() for x in req_stmt.group(1).split(',')] - if len(module_list) > 1: - self.reporter.error( - path=self.object_path, - code='multiple-csharp-utils-per-requires', - msg='Ansible C# util requirements do not support multiple utils per statement: "%s"' % req_stmt.group(0) - ) - continue - - module_name = module_list[0] - - if module_name.lower().endswith('.cs'): - self.reporter.error( - path=self.object_path, - code='illegal-extension-cs', - msg='Module #AnsibleRequires -CSharpUtil should not end in .cs: "%s"' % module_name - ) - - # also accept the legacy #POWERSHELL_COMMON replacer signal - if not found_requires and REPLACER_WINDOWS not in self.text: - self.reporter.error( - path=self.object_path, - code='missing-module-utils-import-csharp-requirements', - msg='No Ansible.ModuleUtils or C# Ansible util requirements/imports found' - ) - - def _find_ps_docs_py_file(self): - if self.object_name in self.PS_DOC_REJECTLIST: - return - py_path = self.path.replace('.ps1', '.py') - if not os.path.isfile(py_path): - self.reporter.error( - path=self.object_path, - code='missing-python-doc', - msg='Missing python documentation file' - ) - return py_path - - def _get_docs(self): - docs = { - 'DOCUMENTATION': { - 'value': None, - 'lineno': 0, - 'end_lineno': 0, - }, - 'EXAMPLES': { - 'value': None, - 'lineno': 0, - 'end_lineno': 0, - }, - 'RETURN': { - 'value': None, - 'lineno': 0, - 'end_lineno': 0, - }, - } - for child in self.ast.body: - if isinstance(child, ast.Assign): - for grandchild in child.targets: - if not isinstance(grandchild, ast.Name): - continue - - if grandchild.id == 'DOCUMENTATION': - docs['DOCUMENTATION']['value'] = child.value.s - docs['DOCUMENTATION']['lineno'] = child.lineno - docs['DOCUMENTATION']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) - ) - elif grandchild.id == 'EXAMPLES': - docs['EXAMPLES']['value'] = child.value.s - docs['EXAMPLES']['lineno'] = child.lineno - docs['EXAMPLES']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) - ) - elif grandchild.id == 'RETURN': - docs['RETURN']['value'] = child.value.s - docs['RETURN']['lineno'] = child.lineno - docs['RETURN']['end_lineno'] = ( - child.lineno + len(child.value.s.splitlines()) - ) - - return docs - - def _validate_docs_schema(self, doc, schema, name, error_code): - # TODO: Add line/col - errors = [] - try: - schema(doc) - except Exception as e: - for error in e.errors: - error.data = doc - errors.extend(e.errors) - - for error in errors: - path = [str(p) for p in error.path] - - local_error_code = getattr(error, 'ansible_error_code', error_code) - - if isinstance(error.data, dict): - error_message = humanize_error(error.data, error) - else: - error_message = error - - if path: - combined_path = '%s.%s' % (name, '.'.join(path)) - else: - combined_path = name - - self.reporter.error( - path=self.object_path, - code=local_error_code, - msg='%s: %s' % (combined_path, error_message) - ) - - def _validate_docs(self): - doc_info = self._get_docs() - doc = None - documentation_exists = False - examples_exist = False - returns_exist = False - # We have three ways of marking deprecated/removed files. Have to check each one - # individually and then make sure they all agree - filename_deprecated_or_removed = False - deprecated = False - removed = False - doc_deprecated = None # doc legally might not exist - routing_says_deprecated = False - - if self.object_name.startswith('_') and not os.path.islink(self.object_path): - filename_deprecated_or_removed = True - - # We are testing a collection - if self.routing: - routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}) - if routing_deprecation: - # meta/runtime.yml says this is deprecated - routing_says_deprecated = True - deprecated = True - - if not removed: - if not bool(doc_info['DOCUMENTATION']['value']): - self.reporter.error( - path=self.object_path, - code='missing-documentation', - msg='No DOCUMENTATION provided' - ) - else: - documentation_exists = True - doc, errors, traces = parse_yaml( - doc_info['DOCUMENTATION']['value'], - doc_info['DOCUMENTATION']['lineno'], - self.name, 'DOCUMENTATION' - ) - if doc: - add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True) - for error in errors: - self.reporter.error( - path=self.object_path, - code='documentation-syntax-error', - **error - ) - for trace in traces: - self.reporter.trace( - path=self.object_path, - tracebk=trace - ) - if not errors and not traces: - missing_fragment = False - with CaptureStd(): - try: - get_docstring(self.path, fragment_loader, verbose=True, - collection_name=self.collection_name, is_module=True) - except AssertionError: - fragment = doc['extends_documentation_fragment'] - self.reporter.error( - path=self.object_path, - code='missing-doc-fragment', - msg='DOCUMENTATION fragment missing: %s' % fragment - ) - missing_fragment = True - except Exception as e: - self.reporter.trace( - path=self.object_path, - tracebk=traceback.format_exc() - ) - self.reporter.error( - path=self.object_path, - code='documentation-error', - msg='Unknown DOCUMENTATION error, see TRACE: %s' % e - ) - - if not missing_fragment: - add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True) - - if 'options' in doc and doc['options'] is None: - self.reporter.error( - path=self.object_path, - code='invalid-documentation-options', - msg='DOCUMENTATION.options must be a dictionary/hash when used', - ) - - if 'deprecated' in doc and doc.get('deprecated'): - doc_deprecated = True - doc_deprecation = doc['deprecated'] - documentation_collection = doc_deprecation.get('removed_from_collection') - if documentation_collection != self.collection_name: - self.reporter.error( - path=self.object_path, - code='deprecation-wrong-collection', - msg='"DOCUMENTATION.deprecation.removed_from_collection must be the current collection name: %r vs. %r' % ( - documentation_collection, self.collection_name) - ) - else: - doc_deprecated = False - - if os.path.islink(self.object_path): - # This module has an alias, which we can tell as it's a symlink - # Rather than checking for `module: $filename` we need to check against the true filename - self._validate_docs_schema( - doc, - doc_schema( - os.readlink(self.object_path).split('.')[0], - for_collection=bool(self.collection), - deprecated_module=deprecated, - ), - 'DOCUMENTATION', - 'invalid-documentation', - ) - else: - # This is the normal case - self._validate_docs_schema( - doc, - doc_schema( - self.object_name.split('.')[0], - for_collection=bool(self.collection), - deprecated_module=deprecated, - ), - 'DOCUMENTATION', - 'invalid-documentation', - ) - - if not self.collection: - existing_doc = self._check_for_new_args(doc) - self._check_version_added(doc, existing_doc) - - if not bool(doc_info['EXAMPLES']['value']): - self.reporter.error( - path=self.object_path, - code='missing-examples', - msg='No EXAMPLES provided' - ) - else: - _doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], - doc_info['EXAMPLES']['lineno'], - self.name, 'EXAMPLES', load_all=True, - ansible_loader=True) - for error in errors: - self.reporter.error( - path=self.object_path, - code='invalid-examples', - **error - ) - for trace in traces: - self.reporter.trace( - path=self.object_path, - tracebk=trace - ) - - if not bool(doc_info['RETURN']['value']): - if self._is_new_module(): - self.reporter.error( - path=self.object_path, - code='missing-return', - msg='No RETURN provided' - ) - else: - self.reporter.warning( - path=self.object_path, - code='missing-return-legacy', - msg='No RETURN provided' - ) - else: - data, errors, traces = parse_yaml(doc_info['RETURN']['value'], - doc_info['RETURN']['lineno'], - self.name, 'RETURN') - if data: - add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True) - self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)), - 'RETURN', 'return-syntax-error') - - for error in errors: - self.reporter.error( - path=self.object_path, - code='return-syntax-error', - **error - ) - for trace in traces: - self.reporter.trace( - path=self.object_path, - tracebk=trace - ) - - # Check for mismatched deprecation - if not self.collection: - mismatched_deprecation = True - if not (filename_deprecated_or_removed or removed or deprecated or doc_deprecated): - mismatched_deprecation = False - else: - if (filename_deprecated_or_removed and doc_deprecated): - mismatched_deprecation = False - if (filename_deprecated_or_removed and removed and not (documentation_exists or examples_exist or returns_exist)): - mismatched_deprecation = False - - if mismatched_deprecation: - self.reporter.error( - path=self.object_path, - code='deprecation-mismatch', - msg='Module deprecation/removed must agree in documentation, by prepending filename with' - ' "_", and setting DOCUMENTATION.deprecated for deprecation or by removing all' - ' documentation for removed' - ) - else: - # We are testing a collection - if self.object_name.startswith('_'): - self.reporter.error( - path=self.object_path, - code='collections-no-underscore-on-deprecation', - msg='Deprecated content in collections MUST NOT start with "_", update meta/runtime.yml instead', - ) - - if not (doc_deprecated == routing_says_deprecated): - # DOCUMENTATION.deprecated and meta/runtime.yml disagree - self.reporter.error( - path=self.object_path, - code='deprecation-mismatch', - msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.' - ) - elif routing_says_deprecated: - # Both DOCUMENTATION.deprecated and meta/runtime.yml agree that the module is deprecated. - # Make sure they give the same version or date. - routing_date = routing_deprecation.get('removal_date') - routing_version = routing_deprecation.get('removal_version') - # The versions and dates in the module documentation are auto-tagged, so remove the tag - # to make comparison possible and to avoid confusing the user. - documentation_date = doc_deprecation.get('removed_at_date') - documentation_version = doc_deprecation.get('removed_in') - if not compare_dates(routing_date, documentation_date): - self.reporter.error( - path=self.object_path, - code='deprecation-mismatch', - msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal date: %r vs. %r' % ( - routing_date, documentation_date) - ) - if routing_version != documentation_version: - self.reporter.error( - path=self.object_path, - code='deprecation-mismatch', - msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal version: %r vs. %r' % ( - routing_version, documentation_version) - ) - - # In the future we should error if ANSIBLE_METADATA exists in a collection - - return doc_info, doc - - def _check_version_added(self, doc, existing_doc): - version_added_raw = doc.get('version_added') - try: - collection_name = doc.get('version_added_collection') - version_added = self._create_strict_version( - str(version_added_raw or '0.0'), - collection_name=collection_name) - except ValueError as e: - version_added = version_added_raw or '0.0' - if self._is_new_module() or version_added != 'historical': - # already reported during schema validation, except: - if version_added == 'historical': - self.reporter.error( - path=self.object_path, - code='module-invalid-version-added', - msg='version_added is not a valid version number: %r. Error: %s' % (version_added, e) - ) - return - - if existing_doc and str(version_added_raw) != str(existing_doc.get('version_added')): - self.reporter.error( - path=self.object_path, - code='module-incorrect-version-added', - msg='version_added should be %r. Currently %r' % (existing_doc.get('version_added'), version_added_raw) - ) - - if not self._is_new_module(): - return - - should_be = '.'.join(ansible_version.split('.')[:2]) - strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') - - if (version_added < strict_ansible_version or - strict_ansible_version < version_added): - self.reporter.error( - path=self.object_path, - code='module-incorrect-version-added', - msg='version_added should be %r. Currently %r' % (should_be, version_added_raw) - ) - - def _validate_ansible_module_call(self, docs): - try: - spec, kwargs = get_argument_spec(self.path, self.collection) - except AnsibleModuleNotInitialized: - self.reporter.error( - path=self.object_path, - code='ansible-module-not-initialized', - msg="Execution of the module did not result in initialization of AnsibleModule", - ) - return - except AnsibleModuleImportError as e: - self.reporter.error( - path=self.object_path, - code='import-error', - msg="Exception attempting to import module for argument_spec introspection, '%s'" % e - ) - self.reporter.trace( - path=self.object_path, - tracebk=traceback.format_exc() - ) - return - - schema = ansible_module_kwargs_schema(self.object_name.split('.')[0], for_collection=bool(self.collection)) - self._validate_docs_schema(kwargs, schema, 'AnsibleModule', 'invalid-ansiblemodule-schema') - - self._validate_argument_spec(docs, spec, kwargs) - - def _validate_list_of_module_args(self, name, terms, spec, context): - if terms is None: - return - if not isinstance(terms, (list, tuple)): - # This is already reported by schema checking - return - for check in terms: - if not isinstance(check, (list, tuple)): - # This is already reported by schema checking - continue - bad_term = False - for term in check: - if not isinstance(term, string_types): - msg = name - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must contain strings in the lists or tuples; found value %r" % (term, ) - self.reporter.error( - path=self.object_path, - code=name + '-type', - msg=msg, - ) - bad_term = True - if bad_term: - continue - if len(set(check)) != len(check): - msg = name - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has repeated terms" - self.reporter.error( - path=self.object_path, - code=name + '-collision', - msg=msg, - ) - if not set(check) <= set(spec): - msg = name - if context: - msg += " found in %s" % " -> ".join(context) - msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(check).difference(set(spec)))) - self.reporter.error( - path=self.object_path, - code=name + '-unknown', - msg=msg, - ) - - def _validate_required_if(self, terms, spec, context, module): - if terms is None: - return - if not isinstance(terms, (list, tuple)): - # This is already reported by schema checking - return - for check in terms: - if not isinstance(check, (list, tuple)) or len(check) not in [3, 4]: - # This is already reported by schema checking - continue - if len(check) == 4 and not isinstance(check[3], bool): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must have forth value omitted or of type bool; got %r" % (check[3], ) - self.reporter.error( - path=self.object_path, - code='required_if-is_one_of-type', - msg=msg, - ) - requirements = check[2] - if not isinstance(requirements, (list, tuple)): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must have third value (requirements) being a list or tuple; got type %r" % (requirements, ) - self.reporter.error( - path=self.object_path, - code='required_if-requirements-type', - msg=msg, - ) - continue - bad_term = False - for term in requirements: - if not isinstance(term, string_types): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must have only strings in third value (requirements); got %r" % (term, ) - self.reporter.error( - path=self.object_path, - code='required_if-requirements-type', - msg=msg, - ) - bad_term = True - if bad_term: - continue - if len(set(requirements)) != len(requirements): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has repeated terms in requirements" - self.reporter.error( - path=self.object_path, - code='required_if-requirements-collision', - msg=msg, - ) - if not set(requirements) <= set(spec): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " contains terms in requirements which are not part of argument_spec: %s" % ", ".join(sorted(set(requirements).difference(set(spec)))) - self.reporter.error( - path=self.object_path, - code='required_if-requirements-unknown', - msg=msg, - ) - key = check[0] - if key not in spec: - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must have its key %s in argument_spec" % key - self.reporter.error( - path=self.object_path, - code='required_if-unknown-key', - msg=msg, - ) - continue - if key in requirements: - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " contains its key %s in requirements" % key - self.reporter.error( - path=self.object_path, - code='required_if-key-in-requirements', - msg=msg, - ) - value = check[1] - if value is not None: - _type = spec[key].get('type', 'str') - if callable(_type): - _type_checker = _type - else: - _type_checker = DEFAULT_TYPE_VALIDATORS.get(_type) - try: - with CaptureStd(): - dummy = _type_checker(value) - except (Exception, SystemExit): - msg = "required_if" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has value %r which does not fit to %s's parameter type %r" % (value, key, _type) - self.reporter.error( - path=self.object_path, - code='required_if-value-type', - msg=msg, - ) - - def _validate_required_by(self, terms, spec, context): - if terms is None: - return - if not isinstance(terms, Mapping): - # This is already reported by schema checking - return - for key, value in terms.items(): - if isinstance(value, string_types): - value = [value] - if not isinstance(value, (list, tuple)): - # This is already reported by schema checking - continue - for term in value: - if not isinstance(term, string_types): - # This is already reported by schema checking - continue - if len(set(value)) != len(value) or key in value: - msg = "required_by" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has repeated terms" - self.reporter.error( - path=self.object_path, - code='required_by-collision', - msg=msg, - ) - if not set(value) <= set(spec) or key not in spec: - msg = "required_by" - if context: - msg += " found in %s" % " -> ".join(context) - msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(value).difference(set(spec)))) - self.reporter.error( - path=self.object_path, - code='required_by-unknown', - msg=msg, - ) - - def _validate_argument_spec(self, docs, spec, kwargs, context=None, last_context_spec=None): - if not self.analyze_arg_spec: - return - - if docs is None: - docs = {} - - if context is None: - context = [] - - if last_context_spec is None: - last_context_spec = kwargs - - try: - if not context: - add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True) - except Exception: - # Cannot merge fragments - return - - # Use this to access type checkers later - module = NoArgsAnsibleModule({}) - - self._validate_list_of_module_args('mutually_exclusive', last_context_spec.get('mutually_exclusive'), spec, context) - self._validate_list_of_module_args('required_together', last_context_spec.get('required_together'), spec, context) - self._validate_list_of_module_args('required_one_of', last_context_spec.get('required_one_of'), spec, context) - self._validate_required_if(last_context_spec.get('required_if'), spec, context, module) - self._validate_required_by(last_context_spec.get('required_by'), spec, context) - - provider_args = set() - args_from_argspec = set() - deprecated_args_from_argspec = set() - doc_options = docs.get('options', {}) - if doc_options is None: - doc_options = {} - for arg, data in spec.items(): - restricted_argument_names = ('message', 'syslog_facility') - if arg.lower() in restricted_argument_names: - msg = "Argument '%s' in argument_spec " % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += "must not be one of %s as it is used " \ - "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) - self.reporter.error( - path=self.object_path, - code='invalid-argument-name', - msg=msg, - ) - continue - if 'aliases' in data: - for al in data['aliases']: - if al.lower() in restricted_argument_names: - msg = "Argument alias '%s' in argument_spec " % al - if context: - msg += " found in %s" % " -> ".join(context) - msg += "must not be one of %s as it is used " \ - "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) - self.reporter.error( - path=self.object_path, - code='invalid-argument-name', - msg=msg, - ) - continue - - # Could this a place where secrets are leaked? - # If it is type: path we know it's not a secret key as it's a file path. - # If it is type: bool it is more likely a flag indicating that something is secret, than an actual secret. - if all(( - data.get('no_log') is None, is_potential_secret_option(arg), - data.get('type') not in ("path", "bool"), data.get('choices') is None, - )): - msg = "Argument '%s' in argument_spec could be a secret, though doesn't have `no_log` set" % arg - if context: - msg += " found in %s" % " -> ".join(context) - self.reporter.error( - path=self.object_path, - code='no-log-needed', - msg=msg, - ) - - if not isinstance(data, dict): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " must be a dictionary/hash when used" - self.reporter.error( - path=self.object_path, - code='invalid-argument-spec', - msg=msg, - ) - continue - - removed_at_date = data.get('removed_at_date', None) - if removed_at_date is not None: - try: - if parse_isodate(removed_at_date, allow_date=False) < datetime.date.today(): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has a removed_at_date '%s' before today" % removed_at_date - self.reporter.error( - path=self.object_path, - code='deprecated-date', - msg=msg, - ) - except ValueError: - # This should only happen when removed_at_date is not in ISO format. Since schema - # validation already reported this as an error, don't report it a second time. - pass - - deprecated_aliases = data.get('deprecated_aliases', None) - if deprecated_aliases is not None: - for deprecated_alias in deprecated_aliases: - if 'name' in deprecated_alias and 'date' in deprecated_alias: - try: - date = deprecated_alias['date'] - if parse_isodate(date, allow_date=False) < datetime.date.today(): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has deprecated aliases '%s' with removal date '%s' before today" % ( - deprecated_alias['name'], deprecated_alias['date']) - self.reporter.error( - path=self.object_path, - code='deprecated-date', - msg=msg, - ) - except ValueError: - # This should only happen when deprecated_alias['date'] is not in ISO format. Since - # schema validation already reported this as an error, don't report it a second - # time. - pass - - has_version = False - if self.collection and self.collection_version is not None: - compare_version = self.collection_version - version_of_what = "this collection (%s)" % self.collection_version_str - code_prefix = 'collection' - has_version = True - elif not self.collection: - compare_version = LOOSE_ANSIBLE_VERSION - version_of_what = "Ansible (%s)" % ansible_version - code_prefix = 'ansible' - has_version = True - - removed_in_version = data.get('removed_in_version', None) - if removed_in_version is not None: - try: - collection_name = data.get('removed_from_collection') - removed_in = self._create_version(str(removed_in_version), collection_name=collection_name) - if has_version and collection_name == self.collection_name and compare_version >= removed_in: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has a deprecated removed_in_version %r," % removed_in_version - msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what - self.reporter.error( - path=self.object_path, - code=code_prefix + '-deprecated-version', - msg=msg, - ) - except ValueError as e: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has an invalid removed_in_version number %r: %s" % (removed_in_version, e) - self.reporter.error( - path=self.object_path, - code='invalid-deprecated-version', - msg=msg, - ) - except TypeError: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has an invalid removed_in_version number %r: " % (removed_in_version, ) - msg += " error while comparing to version of %s" % version_of_what - self.reporter.error( - path=self.object_path, - code='invalid-deprecated-version', - msg=msg, - ) - - if deprecated_aliases is not None: - for deprecated_alias in deprecated_aliases: - if 'name' in deprecated_alias and 'version' in deprecated_alias: - try: - collection_name = deprecated_alias.get('collection_name') - version = self._create_version(str(deprecated_alias['version']), collection_name=collection_name) - if has_version and collection_name == self.collection_name and compare_version >= version: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has deprecated aliases '%s' with removal in version %r," % ( - deprecated_alias['name'], deprecated_alias['version']) - msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what - self.reporter.error( - path=self.object_path, - code=code_prefix + '-deprecated-version', - msg=msg, - ) - except ValueError as e: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has deprecated aliases '%s' with invalid removal version %r: %s" % ( - deprecated_alias['name'], deprecated_alias['version'], e) - self.reporter.error( - path=self.object_path, - code='invalid-deprecated-version', - msg=msg, - ) - except TypeError: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has deprecated aliases '%s' with invalid removal version %r:" % ( - deprecated_alias['name'], deprecated_alias['version']) - msg += " error while comparing to version of %s" % version_of_what - self.reporter.error( - path=self.object_path, - code='invalid-deprecated-version', - msg=msg, - ) - - aliases = data.get('aliases', []) - if arg in aliases: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " is specified as its own alias" - self.reporter.error( - path=self.object_path, - code='parameter-alias-self', - msg=msg - ) - if len(aliases) > len(set(aliases)): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has at least one alias specified multiple times in aliases" - self.reporter.error( - path=self.object_path, - code='parameter-alias-repeated', - msg=msg - ) - if not context and arg == 'state': - bad_states = set(['list', 'info', 'get']) & set(data.get('choices', set())) - for bad_state in bad_states: - self.reporter.error( - path=self.object_path, - code='parameter-state-invalid-choice', - msg="Argument 'state' includes the value '%s' as a choice" % bad_state) - if not data.get('removed_in_version', None) and not data.get('removed_at_date', None): - args_from_argspec.add(arg) - args_from_argspec.update(aliases) - else: - deprecated_args_from_argspec.add(arg) - deprecated_args_from_argspec.update(aliases) - if arg == 'provider' and self.object_path.startswith('lib/ansible/modules/network/'): - if data.get('options') is not None and not isinstance(data.get('options'), Mapping): - self.reporter.error( - path=self.object_path, - code='invalid-argument-spec-options', - msg="Argument 'options' in argument_spec['provider'] must be a dictionary/hash when used", - ) - elif data.get('options'): - # Record provider options from network modules, for later comparison - for provider_arg, provider_data in data.get('options', {}).items(): - provider_args.add(provider_arg) - provider_args.update(provider_data.get('aliases', [])) - - if data.get('required') and data.get('default', object) != object: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " is marked as required but specifies a default. Arguments with a" \ - " default should not be marked as required" - self.reporter.error( - path=self.object_path, - code='no-default-for-required-parameter', - msg=msg - ) - - if arg in provider_args: - # Provider args are being removed from network module top level - # don't validate docs<->arg_spec checks below - continue - - _type = data.get('type', 'str') - if callable(_type): - _type_checker = _type - else: - _type_checker = DEFAULT_TYPE_VALIDATORS.get(_type) - - _elements = data.get('elements') - if (_type == 'list') and not _elements: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines type as list but elements is not defined" - self.reporter.error( - path=self.object_path, - code='parameter-list-no-elements', - msg=msg - ) - if _elements: - if not callable(_elements): - DEFAULT_TYPE_VALIDATORS.get(_elements) - if _type != 'list': - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines elements as %s but it is valid only when value of parameter type is list" % _elements - self.reporter.error( - path=self.object_path, - code='parameter-invalid-elements', - msg=msg - ) - - arg_default = None - if 'default' in data and not is_empty(data['default']): - try: - with CaptureStd(): - arg_default = _type_checker(data['default']) - except (Exception, SystemExit): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines default as (%r) but this is incompatible with parameter type %r" % (data['default'], _type) - self.reporter.error( - path=self.object_path, - code='incompatible-default-type', - msg=msg - ) - continue - - doc_options_args = [] - for alias in sorted(set([arg] + list(aliases))): - if alias in doc_options: - doc_options_args.append(alias) - if len(doc_options_args) == 0: - # Undocumented arguments will be handled later (search for undocumented-parameter) - doc_options_arg = {} - else: - doc_options_arg = doc_options[doc_options_args[0]] - if len(doc_options_args) > 1: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " with aliases %s is documented multiple times, namely as %s" % ( - ", ".join([("'%s'" % alias) for alias in aliases]), - ", ".join([("'%s'" % alias) for alias in doc_options_args]) - ) - self.reporter.error( - path=self.object_path, - code='parameter-documented-multiple-times', - msg=msg - ) - - try: - doc_default = None - if 'default' in doc_options_arg and not is_empty(doc_options_arg['default']): - with CaptureStd(): - doc_default = _type_checker(doc_options_arg['default']) - except (Exception, SystemExit): - msg = "Argument '%s' in documentation" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines default as (%r) but this is incompatible with parameter type %r" % (doc_options_arg.get('default'), _type) - self.reporter.error( - path=self.object_path, - code='doc-default-incompatible-type', - msg=msg - ) - continue - - if arg_default != doc_default: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines default as (%r) but documentation defines default as (%r)" % (arg_default, doc_default) - self.reporter.error( - path=self.object_path, - code='doc-default-does-not-match-spec', - msg=msg - ) - - doc_type = doc_options_arg.get('type') - if 'type' in data and data['type'] is not None: - if doc_type is None: - if not arg.startswith('_'): # hidden parameter, for example _raw_params - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines type as %r but documentation doesn't define type" % (data['type']) - self.reporter.error( - path=self.object_path, - code='parameter-type-not-in-doc', - msg=msg - ) - elif data['type'] != doc_type: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines type as %r but documentation defines type as %r" % (data['type'], doc_type) - self.reporter.error( - path=self.object_path, - code='doc-type-does-not-match-spec', - msg=msg - ) - else: - if doc_type is None: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " uses default type ('str') but documentation doesn't define type" - self.reporter.error( - path=self.object_path, - code='doc-missing-type', - msg=msg - ) - elif doc_type != 'str': - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " implies type as 'str' but documentation defines as %r" % doc_type - self.reporter.error( - path=self.object_path, - code='implied-parameter-type-mismatch', - msg=msg - ) - - doc_choices = [] - try: - for choice in doc_options_arg.get('choices', []): - try: - with CaptureStd(): - doc_choices.append(_type_checker(choice)) - except (Exception, SystemExit): - msg = "Argument '%s' in documentation" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) - self.reporter.error( - path=self.object_path, - code='doc-choices-incompatible-type', - msg=msg - ) - raise StopIteration() - except StopIteration: - continue - - arg_choices = [] - try: - for choice in data.get('choices', []): - try: - with CaptureStd(): - arg_choices.append(_type_checker(choice)) - except (Exception, SystemExit): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) - self.reporter.error( - path=self.object_path, - code='incompatible-choices', - msg=msg - ) - raise StopIteration() - except StopIteration: - continue - - if not compare_unordered_lists(arg_choices, doc_choices): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines choices as (%r) but documentation defines choices as (%r)" % (arg_choices, doc_choices) - self.reporter.error( - path=self.object_path, - code='doc-choices-do-not-match-spec', - msg=msg - ) - - doc_required = doc_options_arg.get('required', False) - data_required = data.get('required', False) - if (doc_required or data_required) and not (doc_required and data_required): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - if doc_required: - msg += " is not required, but is documented as being required" - else: - msg += " is required, but is not documented as being required" - self.reporter.error( - path=self.object_path, - code='doc-required-mismatch', - msg=msg - ) - - doc_elements = doc_options_arg.get('elements', None) - doc_type = doc_options_arg.get('type', 'str') - data_elements = data.get('elements', None) - if (doc_elements and not doc_type == 'list'): - msg = "Argument '%s' " % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " defines parameter elements as %s but it is valid only when value of parameter type is list" % doc_elements - self.reporter.error( - path=self.object_path, - code='doc-elements-invalid', - msg=msg - ) - if (doc_elements or data_elements) and not (doc_elements == data_elements): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - if data_elements: - msg += " specifies elements as %s," % data_elements - else: - msg += " does not specify elements," - if doc_elements: - msg += "but elements is documented as being %s" % doc_elements - else: - msg += "but elements is not documented" - self.reporter.error( - path=self.object_path, - code='doc-elements-mismatch', - msg=msg - ) - - spec_suboptions = data.get('options') - doc_suboptions = doc_options_arg.get('suboptions', {}) - if spec_suboptions: - if not doc_suboptions: - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " has sub-options but documentation does not define it" - self.reporter.error( - path=self.object_path, - code='missing-suboption-docs', - msg=msg - ) - self._validate_argument_spec({'options': doc_suboptions}, spec_suboptions, kwargs, - context=context + [arg], last_context_spec=data) - - for arg in args_from_argspec: - if not str(arg).isidentifier(): - msg = "Argument '%s' in argument_spec" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " is not a valid python identifier" - self.reporter.error( - path=self.object_path, - code='parameter-invalid', - msg=msg - ) - - if docs: - args_from_docs = set() - for arg, data in doc_options.items(): - args_from_docs.add(arg) - args_from_docs.update(data.get('aliases', [])) - - args_missing_from_docs = args_from_argspec.difference(args_from_docs) - docs_missing_from_args = args_from_docs.difference(args_from_argspec | deprecated_args_from_argspec) - for arg in args_missing_from_docs: - if arg in provider_args: - # Provider args are being removed from network module top level - # So they are likely not documented on purpose - continue - msg = "Argument '%s'" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " is listed in the argument_spec, but not documented in the module documentation" - self.reporter.error( - path=self.object_path, - code='undocumented-parameter', - msg=msg - ) - for arg in docs_missing_from_args: - msg = "Argument '%s'" % arg - if context: - msg += " found in %s" % " -> ".join(context) - msg += " is listed in DOCUMENTATION.options, but not accepted by the module argument_spec" - self.reporter.error( - path=self.object_path, - code='nonexistent-parameter-documented', - msg=msg - ) - - def _check_for_new_args(self, doc): - if not self.base_branch or self._is_new_module(): - return - - with CaptureStd(): - try: - existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring( - self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True) - existing_options = existing_doc.get('options', {}) or {} - except AssertionError: - fragment = doc['extends_documentation_fragment'] - self.reporter.warning( - path=self.object_path, - code='missing-existing-doc-fragment', - msg='Pre-existing DOCUMENTATION fragment missing: %s' % fragment - ) - return - except Exception as e: - self.reporter.warning_trace( - path=self.object_path, - tracebk=e - ) - self.reporter.warning( - path=self.object_path, - code='unknown-doc-fragment', - msg=('Unknown pre-existing DOCUMENTATION error, see TRACE. Submodule refs may need updated') - ) - return - - try: - mod_collection_name = existing_doc.get('version_added_collection') - mod_version_added = self._create_strict_version( - str(existing_doc.get('version_added', '0.0')), - collection_name=mod_collection_name) - except ValueError: - mod_collection_name = self.collection_name - mod_version_added = self._create_strict_version('0.0') - - options = doc.get('options', {}) or {} - - should_be = '.'.join(ansible_version.split('.')[:2]) - strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') - - for option, details in options.items(): - try: - names = [option] + details.get('aliases', []) - except (TypeError, AttributeError): - # Reporting of this syntax error will be handled by schema validation. - continue - - if any(name in existing_options for name in names): - # The option already existed. Make sure version_added didn't change. - for name in names: - existing_collection_name = existing_options.get(name, {}).get('version_added_collection') - existing_version = existing_options.get(name, {}).get('version_added') - if existing_version: - break - current_collection_name = details.get('version_added_collection') - current_version = details.get('version_added') - if current_collection_name != existing_collection_name: - self.reporter.error( - path=self.object_path, - code='option-incorrect-version-added-collection', - msg=('version_added for existing option (%s) should ' - 'belong to collection %r. Currently belongs to %r' % - (option, current_collection_name, existing_collection_name)) - ) - elif str(current_version) != str(existing_version): - self.reporter.error( - path=self.object_path, - code='option-incorrect-version-added', - msg=('version_added for existing option (%s) should ' - 'be %r. Currently %r' % - (option, existing_version, current_version)) - ) - continue - - try: - collection_name = details.get('version_added_collection') - version_added = self._create_strict_version( - str(details.get('version_added', '0.0')), - collection_name=collection_name) - except ValueError as e: - # already reported during schema validation - continue - - if collection_name != self.collection_name: - continue - if (strict_ansible_version != mod_version_added and - (version_added < strict_ansible_version or - strict_ansible_version < version_added)): - self.reporter.error( - path=self.object_path, - code='option-incorrect-version-added', - msg=('version_added for new option (%s) should ' - 'be %r. Currently %r' % - (option, should_be, version_added)) - ) - - return existing_doc - - @staticmethod - def is_on_rejectlist(path): - base_name = os.path.basename(path) - file_name = os.path.splitext(base_name)[0] - - if file_name.startswith('_') and os.path.islink(path): - return True - - if not frozenset((base_name, file_name)).isdisjoint(ModuleValidator.REJECTLIST): - return True - - for pat in ModuleValidator.REJECTLIST_PATTERNS: - if fnmatch(base_name, pat): - return True - - return False - - def validate(self): - super(ModuleValidator, self).validate() - if not self._python_module() and not self._powershell_module(): - self.reporter.error( - path=self.object_path, - code='invalid-extension', - msg=('Official Ansible modules must have a .py ' - 'extension for python modules or a .ps1 ' - 'for powershell modules') - ) - self._python_module_override = True - - if self._python_module() and self.ast is None: - self.reporter.error( - path=self.object_path, - code='python-syntax-error', - msg='Python SyntaxError while parsing module' - ) - try: - compile(self.text, self.path, 'exec') - except Exception: - self.reporter.trace( - path=self.object_path, - tracebk=traceback.format_exc() - ) - return - - end_of_deprecation_should_be_removed_only = False - if self._python_module(): - doc_info, docs = self._validate_docs() - - # See if current version => deprecated.removed_in, ie, should be docs only - if docs and docs.get('deprecated', False): - - if 'removed_in' in docs['deprecated']: - removed_in = None - collection_name = docs['deprecated'].get('removed_from_collection') - version = docs['deprecated']['removed_in'] - if collection_name != self.collection_name: - self.reporter.error( - path=self.object_path, - code='invalid-module-deprecation-source', - msg=('The deprecation version for a module must be added in this collection') - ) - else: - try: - removed_in = self._create_strict_version(str(version), collection_name=collection_name) - except ValueError as e: - self.reporter.error( - path=self.object_path, - code='invalid-module-deprecation-version', - msg=('The deprecation version %r cannot be parsed: %s' % (version, e)) - ) - - if removed_in: - if not self.collection: - strict_ansible_version = self._create_strict_version( - '.'.join(ansible_version.split('.')[:2]), self.collection_name) - end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in - - if end_of_deprecation_should_be_removed_only: - self.reporter.error( - path=self.object_path, - code='ansible-deprecated-module', - msg='Module is marked for removal in version %s of Ansible when the current version is %s' % ( - version, ansible_version), - ) - elif self.collection_version: - strict_ansible_version = self.collection_version - end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in - - if end_of_deprecation_should_be_removed_only: - self.reporter.error( - path=self.object_path, - code='collection-deprecated-module', - msg='Module is marked for removal in version %s of this collection when the current version is %s' % ( - version, self.collection_version_str), - ) - - # handle deprecation by date - if 'removed_at_date' in docs['deprecated']: - try: - removed_at_date = docs['deprecated']['removed_at_date'] - if parse_isodate(removed_at_date, allow_date=True) < datetime.date.today(): - msg = "Module's deprecated.removed_at_date date '%s' is before today" % removed_at_date - self.reporter.error(path=self.object_path, code='deprecated-date', msg=msg) - except ValueError: - # This happens if the date cannot be parsed. This is already checked by the schema. - pass - - if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only: - self._validate_ansible_module_call(docs) - self._check_for_sys_exit() - self._find_rejectlist_imports() - self._find_module_utils() - self._find_has_import() - first_callable = self._get_first_callable() - self._ensure_imports_below_docs(doc_info, first_callable) - self._check_for_subprocess() - self._check_for_os_call() - - if self._powershell_module(): - if self.basename in self.PS_DOC_REJECTLIST: - return - - self._validate_ps_replacers() - docs_path = self._find_ps_docs_py_file() - - # We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util - pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic' - if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_REJECTLIST: - with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv: - docs = docs_mv._validate_docs()[1] - self._validate_ansible_module_call(docs) - - self._check_gpl3_header() - if not self._just_docs() and not end_of_deprecation_should_be_removed_only: - self._check_interpreter(powershell=self._powershell_module()) - self._check_type_instead_of_isinstance( - powershell=self._powershell_module() - ) - - -class PythonPackageValidator(Validator): - REJECTLIST_FILES = frozenset(('__pycache__',)) - - def __init__(self, path, reporter=None): - super(PythonPackageValidator, self).__init__(reporter=reporter or Reporter()) - - self.path = path - self.basename = os.path.basename(path) - - @property - def object_name(self): - return self.basename - - @property - def object_path(self): - return self.path - - def validate(self): - super(PythonPackageValidator, self).validate() - - if self.basename in self.REJECTLIST_FILES: - return - - init_file = os.path.join(self.path, '__init__.py') - if not os.path.exists(init_file): - self.reporter.error( - path=self.object_path, - code='subdirectory-missing-init', - msg='Ansible module subdirectories must contain an __init__.py' - ) - - -def setup_collection_loader(): - collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATH', '').split(os.pathsep) - _AnsibleCollectionFinder(collections_paths) - - -def re_compile(value): - """ - Argparse expects things to raise TypeError, re.compile raises an re.error - exception - - This function is a shorthand to convert the re.error exception to a - TypeError - """ - - try: - return re.compile(value) - except re.error as e: - raise TypeError(e) - - -def run(): - parser = argparse.ArgumentParser(prog="validate-modules") - parser.add_argument('modules', nargs='+', - help='Path to module or module directory') - parser.add_argument('-w', '--warnings', help='Show warnings', - action='store_true') - parser.add_argument('--exclude', help='RegEx exclusion pattern', - type=re_compile) - parser.add_argument('--arg-spec', help='Analyze module argument spec', - action='store_true', default=False) - parser.add_argument('--base-branch', default=None, - help='Used in determining if new options were added') - parser.add_argument('--format', choices=['json', 'plain'], default='plain', - help='Output format. Default: "%(default)s"') - parser.add_argument('--output', default='-', - help='Output location, use "-" for stdout. ' - 'Default "%(default)s"') - parser.add_argument('--collection', - help='Specifies the path to the collection, when ' - 'validating files within a collection. Ensure ' - 'that ANSIBLE_COLLECTIONS_PATH is set so the ' - 'contents of the collection can be located') - parser.add_argument('--collection-version', - help='The collection\'s version number used to check ' - 'deprecations') - - args = parser.parse_args() - - args.modules = [m.rstrip('/') for m in args.modules] - - reporter = Reporter() - git_cache = GitCache(args.base_branch) - - check_dirs = set() - - routing = None - if args.collection: - setup_collection_loader() - routing_file = 'meta/runtime.yml' - # Load meta/runtime.yml if it exists, as it may contain deprecation information - if os.path.isfile(routing_file): - try: - with open(routing_file) as f: - routing = yaml.safe_load(f) - except yaml.error.MarkedYAMLError as ex: - print('%s:%d:%d: YAML load failed: %s' % (routing_file, ex.context_mark.line + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) - except Exception as ex: # pylint: disable=broad-except - print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex)))) - - for module in args.modules: - if os.path.isfile(module): - path = module - if args.exclude and args.exclude.search(path): - continue - if ModuleValidator.is_on_rejectlist(path): - continue - with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, - analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, - git_cache=git_cache, reporter=reporter, routing=routing) as mv1: - mv1.validate() - check_dirs.add(os.path.dirname(path)) - - for root, dirs, files in os.walk(module): - basedir = root[len(module) + 1:].split('/', 1)[0] - if basedir in REJECTLIST_DIRS: - continue - for dirname in dirs: - if root == module and dirname in REJECTLIST_DIRS: - continue - path = os.path.join(root, dirname) - if args.exclude and args.exclude.search(path): - continue - check_dirs.add(path) - - for filename in files: - path = os.path.join(root, filename) - if args.exclude and args.exclude.search(path): - continue - if ModuleValidator.is_on_rejectlist(path): - continue - with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, - analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, - git_cache=git_cache, reporter=reporter, routing=routing) as mv2: - mv2.validate() - - if not args.collection: - for path in sorted(check_dirs): - pv = PythonPackageValidator(path, reporter=reporter) - pv.validate() - - if args.format == 'plain': - sys.exit(reporter.plain(warnings=args.warnings, output=args.output)) - else: - sys.exit(reporter.json(warnings=args.warnings, output=args.output)) - - -class GitCache: - def __init__(self, base_branch): - self.base_branch = base_branch - - if self.base_branch: - self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/']) - else: - self.base_tree = [] - - try: - self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/']) - except GitError as ex: - if ex.status == 128: - # fallback when there is no .git directory - self.head_tree = self._get_module_files() - else: - raise - except OSError as ex: - if ex.errno == errno.ENOENT: - # fallback when git is not installed - self.head_tree = self._get_module_files() - else: - raise - - self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1')) - - self.base_module_paths.pop('__init__.py', None) - - self.head_aliased_modules = set() - - for path in self.head_tree: - filename = os.path.basename(path) - - if filename.startswith('_') and filename != '__init__.py': - if os.path.islink(path): - self.head_aliased_modules.add(os.path.basename(os.path.realpath(path))) - - @staticmethod - def _get_module_files(): - module_files = [] - - for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'): - for file_name in file_names: - module_files.append(os.path.join(dir_path, file_name)) - - return module_files - - @staticmethod - def _git(args): - cmd = ['git'] + args - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = p.communicate() - if p.returncode != 0: - raise GitError(stderr, p.returncode) - return stdout.decode('utf-8').splitlines() - - -class GitError(Exception): - def __init__(self, message, status): - super(GitError, self).__init__(message) - - self.status = status - - -def main(): - try: - run() - except KeyboardInterrupt: - pass diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py deleted file mode 100644 index 8cd0e5e560..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/module_args.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2016 Matt Martz -# Copyright (C) 2016 Rackspace US, Inc. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import runpy -import inspect -import json -import os -import subprocess -import sys - -from contextlib import contextmanager - -from ansible.executor.powershell.module_manifest import PSModuleDepFinder -from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule -from ansible.module_utils.six import reraise -from ansible.module_utils._text import to_bytes, to_text - -from .utils import CaptureStd, find_executable, get_module_name_from_filename - - -ANSIBLE_MODULE_CONSTRUCTOR_ARGS = tuple(list(inspect.signature(AnsibleModule.__init__).parameters)[1:]) - - -class AnsibleModuleCallError(RuntimeError): - pass - - -class AnsibleModuleImportError(ImportError): - pass - - -class AnsibleModuleNotInitialized(Exception): - pass - - -class _FakeAnsibleModuleInit: - def __init__(self): - self.args = tuple() - self.kwargs = {} - self.called = False - - def __call__(self, *args, **kwargs): - if args and isinstance(args[0], AnsibleModule): - # Make sure, due to creative calling, that we didn't end up with - # ``self`` in ``args`` - self.args = args[1:] - else: - self.args = args - self.kwargs = kwargs - self.called = True - raise AnsibleModuleCallError('AnsibleModuleCallError') - - -def _fake_load_params(): - pass - - -@contextmanager -def setup_env(filename): - # Used to clean up imports later - pre_sys_modules = list(sys.modules.keys()) - - fake = _FakeAnsibleModuleInit() - module = __import__('ansible.module_utils.basic').module_utils.basic - _original_init = module.AnsibleModule.__init__ - _original_load_params = module._load_params - setattr(module.AnsibleModule, '__init__', fake) - setattr(module, '_load_params', _fake_load_params) - - try: - yield fake - finally: - setattr(module.AnsibleModule, '__init__', _original_init) - setattr(module, '_load_params', _original_load_params) - - # Clean up imports to prevent issues with mutable data being used in modules - for k in list(sys.modules.keys()): - # It's faster if we limit to items in ansible.module_utils - # But if this causes problems later, we should remove it - if k not in pre_sys_modules and k.startswith('ansible.module_utils.'): - del sys.modules[k] - - -def get_ps_argument_spec(filename, collection): - fqc_name = get_module_name_from_filename(filename, collection) - - pwsh = find_executable('pwsh') - if not pwsh: - raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.') - - module_path = os.path.join(os.getcwd(), filename) - b_module_path = to_bytes(module_path, errors='surrogate_or_strict') - with open(b_module_path, mode='rb') as module_fd: - b_module_data = module_fd.read() - - ps_dep_finder = PSModuleDepFinder() - ps_dep_finder.scan_module(b_module_data, fqn=fqc_name) - - # For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util. - ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False) - - util_manifest = json.dumps({ - 'module_path': to_text(module_path, errors='surrogiate_or_strict'), - 'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'], - 'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]), - }) - - script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1') - proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE, - shell=False) - stdout, stderr = proc.communicate() - - if proc.returncode != 0: - raise AnsibleModuleImportError("STDOUT:\n%s\nSTDERR:\n%s" % (stdout.decode('utf-8'), stderr.decode('utf-8'))) - - kwargs = json.loads(stdout) - - # the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS - kwargs['argument_spec'] = kwargs.pop('options', {}) - - return kwargs['argument_spec'], kwargs - - -def get_py_argument_spec(filename, collection): - name = get_module_name_from_filename(filename, collection) - - with setup_env(filename) as fake: - try: - with CaptureStd(): - runpy.run_module(name, run_name='__main__', alter_sys=True) - except AnsibleModuleCallError: - pass - except BaseException as e: - # we want to catch all exceptions here, including sys.exit - reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2]) - - if not fake.called: - raise AnsibleModuleNotInitialized() - - try: - # Convert positional arguments to kwargs to make sure that all parameters are actually checked - for arg, arg_name in zip(fake.args, ANSIBLE_MODULE_CONSTRUCTOR_ARGS): - fake.kwargs[arg_name] = arg - # for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True} - argument_spec = fake.kwargs.get('argument_spec') or {} - # If add_file_common_args is truish, add options from FILE_COMMON_ARGUMENTS when not present. - # This is the only modification to argument_spec done by AnsibleModule itself, and which is - # not caught by setup_env's AnsibleModule replacement - if fake.kwargs.get('add_file_common_args'): - for k, v in FILE_COMMON_ARGUMENTS.items(): - if k not in argument_spec: - argument_spec[k] = v - return argument_spec, fake.kwargs - except (TypeError, IndexError): - return {}, {} - - -def get_argument_spec(filename, collection): - if filename.endswith('.py'): - return get_py_argument_spec(filename, collection) - else: - return get_ps_argument_spec(filename, collection) diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 deleted file mode 100755 index 5ceb9d50b7..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/ps_argspec.ps1 +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env pwsh -#Requires -Version 6 - -Set-StrictMode -Version 2.0 -$ErrorActionPreference = "Stop" -$WarningPreference = "Stop" - -Function Resolve-CircularReference { - <# - .SYNOPSIS - Removes known types that cause a circular reference in their json serialization. - - .PARAMETER Hash - The hash to scan for circular references - #> - [CmdletBinding()] - param ( - [Parameter(Mandatory=$true)] - [System.Collections.IDictionary] - $Hash - ) - - foreach ($key in [String[]]$Hash.Keys) { - $value = $Hash[$key] - if ($value -is [System.Collections.IDictionary]) { - Resolve-CircularReference -Hash $value - } elseif ($value -is [Array] -or $value -is [System.Collections.IList]) { - $values = @(foreach ($v in $value) { - if ($v -is [System.Collections.IDictionary]) { - Resolve-CircularReference -Hash $v - } - ,$v - }) - $Hash[$key] = $values - } elseif ($value -is [DateTime]) { - $Hash[$key] = $value.ToString("yyyy-MM-dd") - } elseif ($value -is [delegate]) { - # Type can be set to a delegate function which defines it's own type. For the documentation we just - # reflection that as raw - if ($key -eq 'type') { - $Hash[$key] = 'raw' - } else { - $Hash[$key] = $value.ToString() # Shouldn't ever happen but just in case. - } - } - } -} - -$manifest = ConvertFrom-Json -InputObject $args[0] -AsHashtable -if (-not $manifest.Contains('module_path') -or -not $manifest.module_path) { - Write-Error -Message "No module specified." - exit 1 -} -$module_path = $manifest.module_path - -# Check if the path is relative and get the full path to the module -if (-not ([System.IO.Path]::IsPathRooted($module_path))) { - $module_path = $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($module_path) -} - -if (-not (Test-Path -LiteralPath $module_path -PathType Leaf)) { - Write-Error -Message "The module at '$module_path' does not exist." - exit 1 -} - -$module_code = Get-Content -LiteralPath $module_path -Raw - -$powershell = [PowerShell]::Create() -$powershell.Runspace.SessionStateProxy.SetVariable("ErrorActionPreference", "Stop") - -# Load the PowerShell module utils as the module may be using them to refer to shared module options. Currently we -# can only load the PowerShell utils due to cross platform compatibility issues. -if ($manifest.Contains('ps_utils')) { - foreach ($util_info in $manifest.ps_utils.GetEnumerator()) { - $util_name = $util_info.Key - $util_path = $util_info.Value - - if (-not (Test-Path -LiteralPath $util_path -PathType Leaf)) { - # Failed to find the util path, just silently ignore for now and hope for the best. - continue - } - - $util_sb = [ScriptBlock]::Create((Get-Content -LiteralPath $util_path -Raw)) - $powershell.AddCommand('New-Module').AddParameters(@{ - Name = $util_name - ScriptBlock = $util_sb - }) > $null - $powershell.AddCommand('Import-Module').AddParameter('WarningAction', 'SilentlyContinue') > $null - $powershell.AddCommand('Out-Null').AddStatement() > $null - - # Also import it into the current runspace in case ps_argspec.ps1 needs to use it. - $null = New-Module -Name $util_name -ScriptBlock $util_sb | Import-Module -WarningAction SilentlyContinue - } -} - -Add-CSharpType -References @(Get-Content -LiteralPath $manifest.ansible_basic -Raw) -[Ansible.Basic.AnsibleModule]::_DebugArgSpec = $true - -$powershell.AddScript($module_code) > $null -$powershell.Invoke() > $null - -if ($powershell.HadErrors) { - $powershell.Streams.Error - exit 1 -} - -$arg_spec = $powershell.Runspace.SessionStateProxy.GetVariable('ansibleTestArgSpec') -Resolve-CircularReference -Hash $arg_spec - -ConvertTo-Json -InputObject $arg_spec -Compress -Depth 99 diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py deleted file mode 100644 index 07034530c1..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/schema.py +++ /dev/null @@ -1,587 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2015, Matt Martz -# Copyright: (c) 2015, Rackspace US, Inc. -# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import re - -from ansible.module_utils.compat.version import StrictVersion -from functools import partial - -from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid -from ansible.module_utils.six import string_types -from ansible.module_utils.common.collections import is_iterable -from ansible.utils.version import SemanticVersion -from ansible.release import __version__ - -from .utils import parse_isodate - -list_string_types = list(string_types) -tuple_string_types = tuple(string_types) -any_string_types = Any(*string_types) - -# Valid DOCUMENTATION.author lines -# Based on Ansibulbot's extract_github_id() -# author: First Last (@name) [optional anything] -# "Ansible Core Team" - Used by the Bot -# "Michael DeHaan" - nop -# "OpenStack Ansible SIG" - OpenStack does not use GitHub -# "Name (!UNKNOWN)" - For the few untraceable authors -author_line = re.compile(r'^\w.*(\(@([\w-]+)\)|!UNKNOWN)(?![\w.])|^Ansible Core Team$|^Michael DeHaan$|^OpenStack Ansible SIG$') - - -def _add_ansible_error_code(exception, error_code): - setattr(exception, 'ansible_error_code', error_code) - return exception - - -def isodate(v, error_code=None): - try: - parse_isodate(v, allow_date=True) - except ValueError as e: - raise _add_ansible_error_code(Invalid(str(e)), error_code or 'ansible-invalid-date') - return v - - -COLLECTION_NAME_RE = re.compile(r'^([^.]+(\.[^.]+)+)$') - - -def collection_name(v, error_code=None): - if not isinstance(v, string_types): - raise _add_ansible_error_code( - Invalid('Collection name must be a string'), error_code or 'collection-invalid-name') - m = COLLECTION_NAME_RE.match(v) - if not m: - raise _add_ansible_error_code( - Invalid('Collection name must be of format `.`'), error_code or 'collection-invalid-name') - return v - - -def deprecation_versions(): - """Create a list of valid version for deprecation entries, current+4""" - major, minor = [int(version) for version in __version__.split('.')[0:2]] - return Any(*['{0}.{1}'.format(major, minor + increment) for increment in range(0, 5)]) - - -def version(for_collection=False): - if for_collection: - # We do not accept floats for versions in collections - return Any(*string_types) - return Any(float, *string_types) - - -def date(error_code=None): - return Any(isodate, error_code=error_code) - - -def is_callable(v): - if not callable(v): - raise ValueInvalid('not a valid value') - return v - - -def sequence_of_sequences(min=None, max=None): - return All( - Any( - None, - [Any(list, tuple)], - tuple([Any(list, tuple)]), - ), - Any( - None, - [Length(min=min, max=max)], - tuple([Length(min=min, max=max)]), - ), - ) - - -seealso_schema = Schema( - [ - Any( - { - Required('module'): Any(*string_types), - 'description': Any(*string_types), - }, - { - Required('ref'): Any(*string_types), - Required('description'): Any(*string_types), - }, - { - Required('name'): Any(*string_types), - Required('link'): Any(*string_types), - Required('description'): Any(*string_types), - }, - ), - ] -) - - -argument_spec_types = ['bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', - 'sid', 'str'] - - -argument_spec_modifiers = { - 'mutually_exclusive': sequence_of_sequences(min=2), - 'required_together': sequence_of_sequences(min=2), - 'required_one_of': sequence_of_sequences(min=2), - 'required_if': sequence_of_sequences(min=3, max=4), - 'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}), -} - - -def no_required_with_default(v): - if v.get('default') and v.get('required'): - raise Invalid('required=True cannot be supplied with a default') - return v - - -def elements_with_list(v): - if v.get('elements') and v.get('type') != 'list': - raise Invalid('type must be list to use elements') - return v - - -def options_with_apply_defaults(v): - if v.get('apply_defaults') and not v.get('options'): - raise Invalid('apply_defaults=True requires options to be set') - return v - - -def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'): - version = v.get(version_field) - collection_name = v.get(collection_name_field) - if not isinstance(version, string_types) or not isinstance(collection_name, string_types): - # If they are not strings, schema validation will have already complained. - return v - if collection_name == 'ansible.builtin': - try: - parsed_version = StrictVersion() - parsed_version.parse(version) - except ValueError as exc: - raise _add_ansible_error_code( - Invalid('%s (%r) is not a valid ansible-core version: %s' % (version_field, version, exc)), - error_code=error_code) - return v - try: - parsed_version = SemanticVersion() - parsed_version.parse(version) - if parsed_version.major != 0 and (parsed_version.minor != 0 or parsed_version.patch != 0): - raise _add_ansible_error_code( - Invalid('%s (%r) must be a major release, not a minor or patch release (see specification at ' - 'https://semver.org/)' % (version_field, version)), - error_code='removal-version-must-be-major') - except ValueError as exc: - raise _add_ansible_error_code( - Invalid('%s (%r) is not a valid collection version (see specification at https://semver.org/): ' - '%s' % (version_field, version, exc)), - error_code=error_code) - return v - - -def option_deprecation(v): - if v.get('removed_in_version') or v.get('removed_at_date'): - if v.get('removed_in_version') and v.get('removed_at_date'): - raise _add_ansible_error_code( - Invalid('Only one of removed_in_version and removed_at_date must be specified'), - error_code='deprecation-either-date-or-version') - if not v.get('removed_from_collection'): - raise _add_ansible_error_code( - Invalid('If removed_in_version or removed_at_date is specified, ' - 'removed_from_collection must be specified as well'), - error_code='deprecation-collection-missing') - check_removal_version(v, - version_field='removed_in_version', - collection_name_field='removed_from_collection', - error_code='invalid-removal-version') - return - if v.get('removed_from_collection'): - raise Invalid('removed_from_collection cannot be specified without either ' - 'removed_in_version or removed_at_date') - - -def argument_spec_schema(for_collection): - any_string_types = Any(*string_types) - schema = { - any_string_types: { - 'type': Any(is_callable, *argument_spec_types), - 'elements': Any(*argument_spec_types), - 'default': object, - 'fallback': Any( - (is_callable, list_string_types), - [is_callable, list_string_types], - ), - 'choices': Any([object], (object,)), - 'required': bool, - 'no_log': bool, - 'aliases': Any(list_string_types, tuple(list_string_types)), - 'apply_defaults': bool, - 'removed_in_version': version(for_collection), - 'removed_at_date': date(), - 'removed_from_collection': collection_name, - 'options': Self, - 'deprecated_aliases': Any([All( - Any( - { - Required('name'): Any(*string_types), - Required('date'): date(), - Required('collection_name'): collection_name, - }, - { - Required('name'): Any(*string_types), - Required('version'): version(for_collection), - Required('collection_name'): collection_name, - }, - ), - partial(check_removal_version, - version_field='version', - collection_name_field='collection_name', - error_code='invalid-removal-version') - )]), - } - } - schema[any_string_types].update(argument_spec_modifiers) - schemas = All( - schema, - Schema({any_string_types: no_required_with_default}), - Schema({any_string_types: elements_with_list}), - Schema({any_string_types: options_with_apply_defaults}), - Schema({any_string_types: option_deprecation}), - ) - return Schema(schemas) - - -def ansible_module_kwargs_schema(module_name, for_collection): - schema = { - 'argument_spec': argument_spec_schema(for_collection), - 'bypass_checks': bool, - 'no_log': bool, - 'check_invalid_arguments': Any(None, bool), - 'add_file_common_args': bool, - 'supports_check_mode': bool, - } - if module_name.endswith(('_info', '_facts')): - del schema['supports_check_mode'] - schema[Required('supports_check_mode')] = True - schema.update(argument_spec_modifiers) - return Schema(schema) - - -json_value = Schema(Any( - None, - int, - float, - [Self], - *(list({str_type: Self} for str_type in string_types) + list(string_types)) -)) - - -def version_added(v, error_code='version-added-invalid', accept_historical=False): - if 'version_added' in v: - version_added = v.get('version_added') - if isinstance(version_added, string_types): - # If it is not a string, schema validation will have already complained - # - or we have a float and we are in ansible/ansible, in which case we're - # also happy. - if v.get('version_added_collection') == 'ansible.builtin': - if version_added == 'historical' and accept_historical: - return v - try: - version = StrictVersion() - version.parse(version_added) - except ValueError as exc: - raise _add_ansible_error_code( - Invalid('version_added (%r) is not a valid ansible-core version: ' - '%s' % (version_added, exc)), - error_code=error_code) - else: - try: - version = SemanticVersion() - version.parse(version_added) - if version.major != 0 and version.patch != 0: - raise _add_ansible_error_code( - Invalid('version_added (%r) must be a major or minor release, ' - 'not a patch release (see specification at ' - 'https://semver.org/)' % (version_added, )), - error_code='version-added-must-be-major-or-minor') - except ValueError as exc: - raise _add_ansible_error_code( - Invalid('version_added (%r) is not a valid collection version ' - '(see specification at https://semver.org/): ' - '%s' % (version_added, exc)), - error_code=error_code) - elif 'version_added_collection' in v: - # Must have been manual intervention, since version_added_collection is only - # added automatically when version_added is present - raise Invalid('version_added_collection cannot be specified without version_added') - return v - - -def list_dict_option_schema(for_collection): - suboption_schema = Schema( - { - Required('description'): Any(list_string_types, *string_types), - 'required': bool, - 'choices': list, - 'aliases': Any(list_string_types), - 'version_added': version(for_collection), - 'version_added_collection': collection_name, - 'default': json_value, - # Note: Types are strings, not literal bools, such as True or False - 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - # in case of type='list' elements define type of individual item in list - 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - # Recursive suboptions - 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), - }, - extra=PREVENT_EXTRA - ) - - # This generates list of dicts with keys from string_types and suboption_schema value - # for example in Python 3: {str: suboption_schema} - list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] - - option_schema = Schema( - { - Required('description'): Any(list_string_types, *string_types), - 'required': bool, - 'choices': list, - 'aliases': Any(list_string_types), - 'version_added': version(for_collection), - 'version_added_collection': collection_name, - 'default': json_value, - 'suboptions': Any(None, *list_dict_suboption_schema), - # Note: Types are strings, not literal bools, such as True or False - 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - # in case of type='list' elements define type of individual item in list - 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - }, - extra=PREVENT_EXTRA - ) - - option_version_added = Schema( - All({ - 'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]), - }, partial(version_added, error_code='option-invalid-version-added')), - extra=ALLOW_EXTRA - ) - - # This generates list of dicts with keys from string_types and option_schema value - # for example in Python 3: {str: option_schema} - return [{str_type: All(option_schema, option_version_added)} for str_type in string_types] - - -def return_contains(v): - schema = Schema( - { - Required('contains'): Any(dict, list, *string_types) - }, - extra=ALLOW_EXTRA - ) - if v.get('type') == 'complex': - return schema(v) - return v - - -def return_schema(for_collection): - return_contains_schema = Any( - All( - Schema( - { - Required('description'): Any(list_string_types, *string_types), - 'returned': Any(*string_types), # only returned on top level - Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), - 'version_added': version(for_collection), - 'version_added_collection': collection_name, - 'sample': json_value, - 'example': json_value, - 'contains': Any(None, *list({str_type: Self} for str_type in string_types)), - # in case of type='list' elements define type of individual item in list - 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - } - ), - Schema(return_contains), - Schema(partial(version_added, error_code='option-invalid-version-added')), - ), - Schema(type(None)), - ) - - # This generates list of dicts with keys from string_types and return_contains_schema value - # for example in Python 3: {str: return_contains_schema} - list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types] - - return Any( - All( - Schema( - { - any_string_types: { - Required('description'): Any(list_string_types, *string_types), - Required('returned'): Any(*string_types), - Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), - 'version_added': version(for_collection), - 'version_added_collection': collection_name, - 'sample': json_value, - 'example': json_value, - 'contains': Any(None, *list_dict_return_contains_schema), - # in case of type='list' elements define type of individual item in list - 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), - } - } - ), - Schema({any_string_types: return_contains}), - Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}), - ), - Schema(type(None)), - ) - - -def deprecation_schema(for_collection): - main_fields = { - Required('why'): Any(*string_types), - Required('alternative'): Any(*string_types), - Required('removed_from_collection'): collection_name, - 'removed': Any(True), - } - - date_schema = { - Required('removed_at_date'): date(), - } - date_schema.update(main_fields) - - if for_collection: - version_schema = { - Required('removed_in'): version(for_collection), - } - else: - version_schema = { - Required('removed_in'): deprecation_versions(), - } - version_schema.update(main_fields) - - result = Any( - Schema(version_schema, extra=PREVENT_EXTRA), - Schema(date_schema, extra=PREVENT_EXTRA), - ) - - if for_collection: - result = All( - result, - partial(check_removal_version, - version_field='removed_in', - collection_name_field='removed_from_collection', - error_code='invalid-removal-version')) - - return result - - -def author(value): - if value is None: - return value # let schema checks handle - - if not is_iterable(value): - value = [value] - - for line in value: - if not isinstance(line, string_types): - continue # let schema checks handle - m = author_line.search(line) - if not m: - raise Invalid("Invalid author") - - return value - - -def doc_schema(module_name, for_collection=False, deprecated_module=False): - - if module_name.startswith('_'): - module_name = module_name[1:] - deprecated_module = True - doc_schema_dict = { - Required('module'): module_name, - Required('short_description'): Any(*string_types), - Required('description'): Any(list_string_types, *string_types), - Required('author'): All(Any(None, list_string_types, *string_types), author), - 'notes': Any(None, list_string_types), - 'seealso': Any(None, seealso_schema), - 'requirements': list_string_types, - 'todo': Any(None, list_string_types, *string_types), - 'options': Any(None, *list_dict_option_schema(for_collection)), - 'extends_documentation_fragment': Any(list_string_types, *string_types), - 'version_added_collection': collection_name, - } - - if for_collection: - # Optional - doc_schema_dict['version_added'] = version(for_collection=True) - else: - doc_schema_dict[Required('version_added')] = version(for_collection=False) - - if deprecated_module: - deprecation_required_scheme = { - Required('deprecated'): Any(deprecation_schema(for_collection=for_collection)), - } - - doc_schema_dict.update(deprecation_required_scheme) - - def add_default_attributes(more=None): - schema = { - 'description': any_string_types, - 'support': any_string_types, - 'version_added_collection': any_string_types, - 'version_added': any_string_types, - } - if more: - schema.update(more) - return schema - - doc_schema_dict['attributes'] = Schema( - All( - Schema({ - any_string_types: { - Required('description'): any_string_types, - Required('support'): Any('full', 'partial', 'none'), - 'version_added_collection': collection_name, - 'version_added': version(for_collection=for_collection), - }, - }, extra=ALLOW_EXTRA), - partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False), - Schema({ - any_string_types: add_default_attributes(), - 'action_group': add_default_attributes({ - Required('membership'): list_string_types, - }), - 'forced_action_plugin': add_default_attributes({ - Required('action_plugin'): any_string_types, - }), - 'proprietary': add_default_attributes({ - Required('platforms'): list_string_types, - }), - }, extra=PREVENT_EXTRA), - ) - ) - return Schema( - All( - Schema( - doc_schema_dict, - extra=PREVENT_EXTRA - ), - partial(version_added, error_code='module-invalid-version-added', accept_historical=not for_collection), - ) - ) - - -# Things to add soon -#################### -# 1) Recursively validate `type: complex` fields -# This will improve documentation, though require fair amount of module tidyup - -# Possible Future Enhancements -############################## - -# 1) Don't allow empty options for choices, aliases, etc -# 2) If type: bool ensure choices isn't set - perhaps use Exclusive -# 3) both version_added should be quoted floats - -# Tool that takes JSON and generates RETURN skeleton (needs to support complex structures) diff --git a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py deleted file mode 100644 index ac46f6669f..0000000000 --- a/test/lib/ansible_test/_data/sanity/validate-modules/validate_modules/utils.py +++ /dev/null @@ -1,225 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2015 Matt Martz -# Copyright (C) 2015 Rackspace US, Inc. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import ast -import datetime -import os -import re -import sys - -from io import BytesIO, TextIOWrapper - -import yaml -import yaml.reader - -from ansible.module_utils._text import to_text -from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.common.yaml import SafeLoader -from ansible.module_utils.six import string_types -from ansible.parsing.yaml.loader import AnsibleLoader - - -class AnsibleTextIOWrapper(TextIOWrapper): - def write(self, s): - super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace')) - - -def find_executable(executable, cwd=None, path=None): - """Finds the full path to the executable specified""" - match = None - real_cwd = os.getcwd() - - if not cwd: - cwd = real_cwd - - if os.path.dirname(executable): - target = os.path.join(cwd, executable) - if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK): - match = executable - else: - path = os.environ.get('PATH', os.path.defpath) - - path_dirs = path.split(os.path.pathsep) - seen_dirs = set() - - for path_dir in path_dirs: - if path_dir in seen_dirs: - continue - - seen_dirs.add(path_dir) - - if os.path.abspath(path_dir) == real_cwd: - path_dir = cwd - - candidate = os.path.join(path_dir, executable) - - if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK): - match = candidate - break - - return match - - -def find_globals(g, tree): - """Uses AST to find globals in an ast tree""" - for child in tree: - if hasattr(child, 'body') and isinstance(child.body, list): - find_globals(g, child.body) - elif isinstance(child, (ast.FunctionDef, ast.ClassDef)): - g.add(child.name) - continue - elif isinstance(child, ast.Assign): - try: - g.add(child.targets[0].id) - except (IndexError, AttributeError): - pass - elif isinstance(child, ast.Import): - g.add(child.names[0].name) - elif isinstance(child, ast.ImportFrom): - for name in child.names: - g_name = name.asname or name.name - if g_name == '*': - continue - g.add(g_name) - - -class CaptureStd(): - """Context manager to handle capturing stderr and stdout""" - - def __enter__(self): - self.sys_stdout = sys.stdout - self.sys_stderr = sys.stderr - sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding) - sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding) - return self - - def __exit__(self, exc_type, exc_value, traceback): - sys.stdout = self.sys_stdout - sys.stderr = self.sys_stderr - - def get(self): - """Return ``(stdout, stderr)``""" - - return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue() - - -def get_module_name_from_filename(filename, collection): - # Calculate the module's name so that relative imports work correctly - if collection: - # collection is a relative path, example: ansible_collections/my_namespace/my_collection - # filename is a relative path, example: plugins/modules/my_module.py - path = os.path.join(collection, filename) - else: - # filename is a relative path, example: lib/ansible/modules/system/ping.py - path = os.path.relpath(filename, 'lib') - - name = os.path.splitext(path)[0].replace(os.path.sep, '.') - - return name - - -def parse_yaml(value, lineno, module, name, load_all=False, ansible_loader=False): - traces = [] - errors = [] - data = None - - if load_all: - yaml_load = yaml.load_all - else: - yaml_load = yaml.load - - if ansible_loader: - loader = AnsibleLoader - else: - loader = SafeLoader - - try: - data = yaml_load(value, Loader=loader) - if load_all: - data = list(data) - except yaml.MarkedYAMLError as e: - e.problem_mark.line += lineno - 1 - e.problem_mark.name = '%s.%s' % (module, name) - errors.append({ - 'msg': '%s is not valid YAML' % name, - 'line': e.problem_mark.line + 1, - 'column': e.problem_mark.column + 1 - }) - traces.append(e) - except yaml.reader.ReaderError as e: - traces.append(e) - # TODO: Better line/column detection - errors.append({ - 'msg': ('%s is not valid YAML. Character ' - '0x%x at position %d.' % (name, e.character, e.position)), - 'line': lineno - }) - except yaml.YAMLError as e: - traces.append(e) - errors.append({ - 'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e), - 'line': lineno - }) - - return data, errors, traces - - -def is_empty(value): - """Evaluate null like values excluding False""" - if value is False: - return False - return not bool(value) - - -def compare_unordered_lists(a, b): - """Safe list comparisons - - Supports: - - unordered lists - - unhashable elements - """ - return len(a) == len(b) and all(x in b for x in a) - - -class NoArgsAnsibleModule(AnsibleModule): - """AnsibleModule that does not actually load params. This is used to get access to the - methods within AnsibleModule without having to fake a bunch of data - """ - def _load_params(self): - self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False} - - -def parse_isodate(v, allow_date): - if allow_date: - if isinstance(v, datetime.date): - return v - msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date' - else: - msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' - if not isinstance(v, string_types): - raise ValueError(msg) - # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, - # we have to do things manually. - if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', v): - raise ValueError(msg) - try: - return datetime.datetime.strptime(v, '%Y-%m-%d').date() - except ValueError: - raise ValueError(msg) diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml deleted file mode 100644 index 45d8b7adcf..0000000000 --- a/test/lib/ansible_test/_data/sanity/yamllint/config/default.yml +++ /dev/null @@ -1,19 +0,0 @@ -extends: default - -rules: - braces: {max-spaces-inside: 1, level: error} - brackets: {max-spaces-inside: 1, level: error} - colons: {max-spaces-after: -1, level: error} - commas: {max-spaces-after: -1, level: error} - comments: disable - comments-indentation: disable - document-start: disable - empty-lines: {max: 3, level: error} - hyphens: {level: error} - indentation: disable - key-duplicates: enable - line-length: disable - new-line-at-end-of-file: disable - new-lines: {type: unix} - trailing-spaces: disable - truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml deleted file mode 100644 index da7e604999..0000000000 --- a/test/lib/ansible_test/_data/sanity/yamllint/config/modules.yml +++ /dev/null @@ -1,19 +0,0 @@ -extends: default - -rules: - braces: disable - brackets: disable - colons: disable - commas: disable - comments: disable - comments-indentation: disable - document-start: disable - empty-lines: disable - hyphens: disable - indentation: disable - key-duplicates: enable - line-length: disable - new-line-at-end-of-file: disable - new-lines: {type: unix} - trailing-spaces: disable - truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml b/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml deleted file mode 100644 index 6d41813787..0000000000 --- a/test/lib/ansible_test/_data/sanity/yamllint/config/plugins.yml +++ /dev/null @@ -1,19 +0,0 @@ -extends: default - -rules: - braces: disable - brackets: disable - colons: disable - commas: disable - comments: disable - comments-indentation: disable - document-start: disable - empty-lines: disable - hyphens: disable - indentation: disable - key-duplicates: disable - line-length: disable - new-line-at-end-of-file: disable - new-lines: {type: unix} - trailing-spaces: disable - truthy: disable diff --git a/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py deleted file mode 100644 index b9fc73e59d..0000000000 --- a/test/lib/ansible_test/_data/sanity/yamllint/yamllinter.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/env python -"""Wrapper around yamllint that supports YAML embedded in Ansible modules.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import ast -import json -import os -import re -import sys - -import yaml -from yaml.resolver import Resolver -from yaml.constructor import SafeConstructor -from yaml.error import MarkedYAMLError -from _yaml import CParser # pylint: disable=no-name-in-module - -from yamllint import linter -from yamllint.config import YamlLintConfig - - -def main(): - """Main program body.""" - paths = sys.argv[1:] or sys.stdin.read().splitlines() - - checker = YamlChecker() - checker.check(paths) - checker.report() - - -class TestConstructor(SafeConstructor): - """Yaml Safe Constructor that knows about Ansible tags""" - - def construct_yaml_unsafe(self, node): - try: - constructor = getattr(node, 'id', 'object') - if constructor is not None: - constructor = getattr(self, 'construct_%s' % constructor) - except AttributeError: - constructor = self.construct_object - - value = constructor(node) - - return value - - -TestConstructor.add_constructor( - u'!unsafe', - TestConstructor.construct_yaml_unsafe) - - -TestConstructor.add_constructor( - u'!vault', - TestConstructor.construct_yaml_str) - - -TestConstructor.add_constructor( - u'!vault-encrypted', - TestConstructor.construct_yaml_str) - - -class TestLoader(CParser, TestConstructor, Resolver): - def __init__(self, stream): - CParser.__init__(self, stream) - TestConstructor.__init__(self) - Resolver.__init__(self) - - -class YamlChecker: - """Wrapper around yamllint that supports YAML embedded in Ansible modules.""" - def __init__(self): - self.messages = [] - - def report(self): - """Print yamllint report to stdout.""" - report = dict( - messages=self.messages, - ) - - print(json.dumps(report, indent=4, sort_keys=True)) - - def check(self, paths): - """ - :type paths: t.List[str] - """ - config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config') - - yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml')) - module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml')) - plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml')) - - for path in paths: - extension = os.path.splitext(path)[1] - - with open(path) as f: - contents = f.read() - - if extension in ('.yml', '.yaml'): - self.check_yaml(yaml_conf, path, contents) - elif extension == '.py': - if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'): - conf = module_conf - else: - conf = plugin_conf - - self.check_module(conf, path, contents) - else: - raise Exception('unsupported extension: %s' % extension) - - def check_yaml(self, conf, path, contents): - """ - :type conf: YamlLintConfig - :type path: str - :type contents: str - """ - self.check_parsable(path, contents) - self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)] - - def check_module(self, conf, path, contents): - """ - :type conf: YamlLintConfig - :type path: str - :type contents: str - """ - docs = self.get_module_docs(path, contents) - - for key, value in docs.items(): - yaml_data = value['yaml'] - lineno = value['lineno'] - fmt = value['fmt'] - - if fmt != 'yaml': - continue - - if yaml_data.startswith('\n'): - yaml_data = yaml_data[1:] - lineno += 1 - - self.check_parsable(path, yaml_data, lineno) - - messages = list(linter.run(yaml_data, conf, path)) - - self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages] - - def check_parsable(self, path, contents, lineno=1): - """ - :type path: str - :type contents: str - :type lineno: int - """ - try: - yaml.load(contents, Loader=TestLoader) - except MarkedYAMLError as e: - self.messages += [{'code': 'unparsable-with-libyaml', - 'message': '%s - %s' % (e.args[0], e.args[2]), - 'path': path, - 'line': e.problem_mark.line + lineno, - 'column': e.problem_mark.column + 1, - 'level': 'error', - }] - - @staticmethod - def result_to_message(result, path, line_offset=0, prefix=''): - """ - :type result: any - :type path: str - :type line_offset: int - :type prefix: str - :rtype: dict[str, any] - """ - if prefix: - prefix = '%s: ' % prefix - - return dict( - code=result.rule or result.level, - message=prefix + result.desc, - path=path, - line=result.line + line_offset, - column=result.column, - level=result.level, - ) - - def get_module_docs(self, path, contents): - """ - :type path: str - :type contents: str - :rtype: dict[str, any] - """ - module_doc_types = [ - 'DOCUMENTATION', - 'EXAMPLES', - 'RETURN', - ] - - docs = {} - - fmt_re = re.compile(r'^# fmt:\s+(\S+)') - - def check_assignment(statement, doc_types=None): - """Check the given statement for a documentation assignment.""" - for target in statement.targets: - if not isinstance(target, ast.Name): - continue - - if doc_types and target.id not in doc_types: - continue - - fmt_match = fmt_re.match(statement.value.s.lstrip()) - fmt = 'yaml' - if fmt_match: - fmt = fmt_match.group(1) - - docs[target.id] = dict( - yaml=statement.value.s, - lineno=statement.lineno, - end_lineno=statement.lineno + len(statement.value.s.splitlines()), - fmt=fmt.lower(), - ) - - module_ast = self.parse_module(path, contents) - - if not module_ast: - return {} - - is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/') - is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/') - - if is_plugin and not is_doc_fragment: - for body_statement in module_ast.body: - if isinstance(body_statement, ast.Assign): - check_assignment(body_statement, module_doc_types) - elif is_doc_fragment: - for body_statement in module_ast.body: - if isinstance(body_statement, ast.ClassDef): - for class_statement in body_statement.body: - if isinstance(class_statement, ast.Assign): - check_assignment(class_statement) - else: - raise Exception('unsupported path: %s' % path) - - return docs - - def parse_module(self, path, contents): - """ - :type path: str - :type contents: str - :rtype: ast.Module | None - """ - try: - return ast.parse(contents) - except SyntaxError as ex: - self.messages.append(dict( - code='python-syntax-error', - message=str(ex), - path=path, - line=ex.lineno, - column=ex.offset, - level='error', - )) - except Exception as ex: # pylint: disable=broad-except - self.messages.append(dict( - code='python-parse-error', - message=str(ex), - path=path, - line=0, - column=0, - level='error', - )) - - return None - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 deleted file mode 100644 index 7e039bb415..0000000000 --- a/test/lib/ansible_test/_data/setup/ConfigureRemotingForAnsible.ps1 +++ /dev/null @@ -1,453 +0,0 @@ -#Requires -Version 3.0 - -# Configure a Windows host for remote management with Ansible -# ----------------------------------------------------------- -# -# This script checks the current WinRM (PS Remoting) configuration and makes -# the necessary changes to allow Ansible to connect, authenticate and -# execute PowerShell commands. -# -# All events are logged to the Windows EventLog, useful for unattended runs. -# -# Use option -Verbose in order to see the verbose output messages. -# -# Use option -CertValidityDays to specify how long this certificate is valid -# starting from today. So you would specify -CertValidityDays 3650 to get -# a 10-year valid certificate. -# -# Use option -ForceNewSSLCert if the system has been SysPreped and a new -# SSL Certificate must be forced on the WinRM Listener when re-running this -# script. This is necessary when a new SID and CN name is created. -# -# Use option -EnableCredSSP to enable CredSSP as an authentication option. -# -# Use option -DisableBasicAuth to disable basic authentication. -# -# Use option -SkipNetworkProfileCheck to skip the network profile check. -# Without specifying this the script will only run if the device's interfaces -# are in DOMAIN or PRIVATE zones. Provide this switch if you want to enable -# WinRM on a device with an interface in PUBLIC zone. -# -# Use option -SubjectName to specify the CN name of the certificate. This -# defaults to the system's hostname and generally should not be specified. - -# Written by Trond Hindenes -# Updated by Chris Church -# Updated by Michael Crilly -# Updated by Anton Ouzounov -# Updated by Nicolas Simond -# Updated by Dag Wieërs -# Updated by Jordan Borean -# Updated by Erwan Quélin -# Updated by David Norman -# -# Version 1.0 - 2014-07-06 -# Version 1.1 - 2014-11-11 -# Version 1.2 - 2015-05-15 -# Version 1.3 - 2016-04-04 -# Version 1.4 - 2017-01-05 -# Version 1.5 - 2017-02-09 -# Version 1.6 - 2017-04-18 -# Version 1.7 - 2017-11-23 -# Version 1.8 - 2018-02-23 -# Version 1.9 - 2018-09-21 - -# Support -Verbose option -[CmdletBinding()] - -Param ( - [string]$SubjectName = $env:COMPUTERNAME, - [int]$CertValidityDays = 1095, - [switch]$SkipNetworkProfileCheck, - $CreateSelfSignedCert = $true, - [switch]$ForceNewSSLCert, - [switch]$GlobalHttpFirewallAccess, - [switch]$DisableBasicAuth = $false, - [switch]$EnableCredSSP -) - -Function Write-Log -{ - $Message = $args[0] - Write-EventLog -LogName Application -Source $EventSource -EntryType Information -EventId 1 -Message $Message -} - -Function Write-VerboseLog -{ - $Message = $args[0] - Write-Verbose $Message - Write-Log $Message -} - -Function Write-HostLog -{ - $Message = $args[0] - Write-Output $Message - Write-Log $Message -} - -Function New-LegacySelfSignedCert -{ - Param ( - [string]$SubjectName, - [int]$ValidDays = 1095 - ) - - $hostnonFQDN = $env:computerName - $hostFQDN = [System.Net.Dns]::GetHostByName(($env:computerName)).Hostname - $SignatureAlgorithm = "SHA256" - - $name = New-Object -COM "X509Enrollment.CX500DistinguishedName.1" - $name.Encode("CN=$SubjectName", 0) - - $key = New-Object -COM "X509Enrollment.CX509PrivateKey.1" - $key.ProviderName = "Microsoft Enhanced RSA and AES Cryptographic Provider" - $key.KeySpec = 1 - $key.Length = 4096 - $key.SecurityDescriptor = "D:PAI(A;;0xd01f01ff;;;SY)(A;;0xd01f01ff;;;BA)(A;;0x80120089;;;NS)" - $key.MachineContext = 1 - $key.Create() - - $serverauthoid = New-Object -COM "X509Enrollment.CObjectId.1" - $serverauthoid.InitializeFromValue("1.3.6.1.5.5.7.3.1") - $ekuoids = New-Object -COM "X509Enrollment.CObjectIds.1" - $ekuoids.Add($serverauthoid) - $ekuext = New-Object -COM "X509Enrollment.CX509ExtensionEnhancedKeyUsage.1" - $ekuext.InitializeEncode($ekuoids) - - $cert = New-Object -COM "X509Enrollment.CX509CertificateRequestCertificate.1" - $cert.InitializeFromPrivateKey(2, $key, "") - $cert.Subject = $name - $cert.Issuer = $cert.Subject - $cert.NotBefore = (Get-Date).AddDays(-1) - $cert.NotAfter = $cert.NotBefore.AddDays($ValidDays) - - $SigOID = New-Object -ComObject X509Enrollment.CObjectId - $SigOID.InitializeFromValue(([Security.Cryptography.Oid]$SignatureAlgorithm).Value) - - [string[]] $AlternativeName += $hostnonFQDN - $AlternativeName += $hostFQDN - $IAlternativeNames = New-Object -ComObject X509Enrollment.CAlternativeNames - - foreach ($AN in $AlternativeName) - { - $AltName = New-Object -ComObject X509Enrollment.CAlternativeName - $AltName.InitializeFromString(0x3,$AN) - $IAlternativeNames.Add($AltName) - } - - $SubjectAlternativeName = New-Object -ComObject X509Enrollment.CX509ExtensionAlternativeNames - $SubjectAlternativeName.InitializeEncode($IAlternativeNames) - - [String[]]$KeyUsage = ("DigitalSignature", "KeyEncipherment") - $KeyUsageObj = New-Object -ComObject X509Enrollment.CX509ExtensionKeyUsage - $KeyUsageObj.InitializeEncode([int][Security.Cryptography.X509Certificates.X509KeyUsageFlags]($KeyUsage)) - $KeyUsageObj.Critical = $true - - $cert.X509Extensions.Add($KeyUsageObj) - $cert.X509Extensions.Add($ekuext) - $cert.SignatureInformation.HashAlgorithm = $SigOID - $CERT.X509Extensions.Add($SubjectAlternativeName) - $cert.Encode() - - $enrollment = New-Object -COM "X509Enrollment.CX509Enrollment.1" - $enrollment.InitializeFromRequest($cert) - $certdata = $enrollment.CreateRequest(0) - $enrollment.InstallResponse(2, $certdata, 0, "") - - # extract/return the thumbprint from the generated cert - $parsed_cert = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2 - $parsed_cert.Import([System.Text.Encoding]::UTF8.GetBytes($certdata)) - - return $parsed_cert.Thumbprint -} - -Function Enable-GlobalHttpFirewallAccess -{ - Write-Verbose "Forcing global HTTP firewall access" - # this is a fairly naive implementation; could be more sophisticated about rule matching/collapsing - $fw = New-Object -ComObject HNetCfg.FWPolicy2 - - # try to find/enable the default rule first - $add_rule = $false - $matching_rules = $fw.Rules | Where-Object { $_.Name -eq "Windows Remote Management (HTTP-In)" } - $rule = $null - If ($matching_rules) { - If ($matching_rules -isnot [Array]) { - Write-Verbose "Editing existing single HTTP firewall rule" - $rule = $matching_rules - } - Else { - # try to find one with the All or Public profile first - Write-Verbose "Found multiple existing HTTP firewall rules..." - $rule = $matching_rules | ForEach-Object { $_.Profiles -band 4 }[0] - - If (-not $rule -or $rule -is [Array]) { - Write-Verbose "Editing an arbitrary single HTTP firewall rule (multiple existed)" - # oh well, just pick the first one - $rule = $matching_rules[0] - } - } - } - - If (-not $rule) { - Write-Verbose "Creating a new HTTP firewall rule" - $rule = New-Object -ComObject HNetCfg.FWRule - $rule.Name = "Windows Remote Management (HTTP-In)" - $rule.Description = "Inbound rule for Windows Remote Management via WS-Management. [TCP 5985]" - $add_rule = $true - } - - $rule.Profiles = 0x7FFFFFFF - $rule.Protocol = 6 - $rule.LocalPorts = 5985 - $rule.RemotePorts = "*" - $rule.LocalAddresses = "*" - $rule.RemoteAddresses = "*" - $rule.Enabled = $true - $rule.Direction = 1 - $rule.Action = 1 - $rule.Grouping = "Windows Remote Management" - - If ($add_rule) { - $fw.Rules.Add($rule) - } - - Write-Verbose "HTTP firewall rule $($rule.Name) updated" -} - -# Setup error handling. -Trap -{ - $_ - Exit 1 -} -$ErrorActionPreference = "Stop" - -# Get the ID and security principal of the current user account -$myWindowsID=[System.Security.Principal.WindowsIdentity]::GetCurrent() -$myWindowsPrincipal=new-object System.Security.Principal.WindowsPrincipal($myWindowsID) - -# Get the security principal for the Administrator role -$adminRole=[System.Security.Principal.WindowsBuiltInRole]::Administrator - -# Check to see if we are currently running "as Administrator" -if (-Not $myWindowsPrincipal.IsInRole($adminRole)) -{ - Write-Output "ERROR: You need elevated Administrator privileges in order to run this script." - Write-Output " Start Windows PowerShell by using the Run as Administrator option." - Exit 2 -} - -$EventSource = $MyInvocation.MyCommand.Name -If (-Not $EventSource) -{ - $EventSource = "Powershell CLI" -} - -If ([System.Diagnostics.EventLog]::Exists('Application') -eq $False -or [System.Diagnostics.EventLog]::SourceExists($EventSource) -eq $False) -{ - New-EventLog -LogName Application -Source $EventSource -} - -# Detect PowerShell version. -If ($PSVersionTable.PSVersion.Major -lt 3) -{ - Write-Log "PowerShell version 3 or higher is required." - Throw "PowerShell version 3 or higher is required." -} - -# Find and start the WinRM service. -Write-Verbose "Verifying WinRM service." -If (!(Get-Service "WinRM")) -{ - Write-Log "Unable to find the WinRM service." - Throw "Unable to find the WinRM service." -} -ElseIf ((Get-Service "WinRM").Status -ne "Running") -{ - Write-Verbose "Setting WinRM service to start automatically on boot." - Set-Service -Name "WinRM" -StartupType Automatic - Write-Log "Set WinRM service to start automatically on boot." - Write-Verbose "Starting WinRM service." - Start-Service -Name "WinRM" -ErrorAction Stop - Write-Log "Started WinRM service." - -} - -# WinRM should be running; check that we have a PS session config. -If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener))) -{ - If ($SkipNetworkProfileCheck) { - Write-Verbose "Enabling PS Remoting without checking Network profile." - Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop - Write-Log "Enabled PS Remoting without checking Network profile." - } - Else { - Write-Verbose "Enabling PS Remoting." - Enable-PSRemoting -Force -ErrorAction Stop - Write-Log "Enabled PS Remoting." - } -} -Else -{ - Write-Verbose "PS Remoting is already enabled." -} - -# Ensure LocalAccountTokenFilterPolicy is set to 1 -# https://github.com/ansible/ansible/issues/42978 -$token_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System" -$token_prop_name = "LocalAccountTokenFilterPolicy" -$token_key = Get-Item -Path $token_path -$token_value = $token_key.GetValue($token_prop_name, $null) -if ($token_value -ne 1) { - Write-Verbose "Setting LocalAccountTOkenFilterPolicy to 1" - if ($null -ne $token_value) { - Remove-ItemProperty -Path $token_path -Name $token_prop_name - } - New-ItemProperty -Path $token_path -Name $token_prop_name -Value 1 -PropertyType DWORD > $null -} - -# Make sure there is a SSL listener. -$listeners = Get-ChildItem WSMan:\localhost\Listener -If (!($listeners | Where-Object {$_.Keys -like "TRANSPORT=HTTPS"})) -{ - # We cannot use New-SelfSignedCertificate on 2012R2 and earlier - $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays - Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint" - - # Create the hashtables of settings to be used. - $valueset = @{ - Hostname = $SubjectName - CertificateThumbprint = $thumbprint - } - - $selectorset = @{ - Transport = "HTTPS" - Address = "*" - } - - Write-Verbose "Enabling SSL listener." - New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset - Write-Log "Enabled SSL listener." -} -Else -{ - Write-Verbose "SSL listener is already active." - - # Force a new SSL cert on Listener if the $ForceNewSSLCert - If ($ForceNewSSLCert) - { - - # We cannot use New-SelfSignedCertificate on 2012R2 and earlier - $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays - Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint" - - $valueset = @{ - CertificateThumbprint = $thumbprint - Hostname = $SubjectName - } - - # Delete the listener for SSL - $selectorset = @{ - Address = "*" - Transport = "HTTPS" - } - Remove-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset - - # Add new Listener with new SSL cert - New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset - } -} - -# Check for basic authentication. -$basicAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "Basic"} - -If ($DisableBasicAuth) -{ - If (($basicAuthSetting.Value) -eq $true) - { - Write-Verbose "Disabling basic auth support." - Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $false - Write-Log "Disabled basic auth support." - } - Else - { - Write-Verbose "Basic auth is already disabled." - } -} -Else -{ - If (($basicAuthSetting.Value) -eq $false) - { - Write-Verbose "Enabling basic auth support." - Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $true - Write-Log "Enabled basic auth support." - } - Else - { - Write-Verbose "Basic auth is already enabled." - } -} - -# If EnableCredSSP if set to true -If ($EnableCredSSP) -{ - # Check for CredSSP authentication - $credsspAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "CredSSP"} - If (($credsspAuthSetting.Value) -eq $false) - { - Write-Verbose "Enabling CredSSP auth support." - Enable-WSManCredSSP -role server -Force - Write-Log "Enabled CredSSP auth support." - } -} - -If ($GlobalHttpFirewallAccess) { - Enable-GlobalHttpFirewallAccess -} - -# Configure firewall to allow WinRM HTTPS connections. -$fwtest1 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" -$fwtest2 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" profile=any -If ($fwtest1.count -lt 5) -{ - Write-Verbose "Adding firewall rule to allow WinRM HTTPS." - netsh advfirewall firewall add rule profile=any name="Allow WinRM HTTPS" dir=in localport=5986 protocol=TCP action=allow - Write-Log "Added firewall rule to allow WinRM HTTPS." -} -ElseIf (($fwtest1.count -ge 5) -and ($fwtest2.count -lt 5)) -{ - Write-Verbose "Updating firewall rule to allow WinRM HTTPS for any profile." - netsh advfirewall firewall set rule name="Allow WinRM HTTPS" new profile=any - Write-Log "Updated firewall rule to allow WinRM HTTPS for any profile." -} -Else -{ - Write-Verbose "Firewall rule already exists to allow WinRM HTTPS." -} - -# Test a remoting connection to localhost, which should work. -$httpResult = Invoke-Command -ComputerName "localhost" -ScriptBlock {$env:COMPUTERNAME} -ErrorVariable httpError -ErrorAction SilentlyContinue -$httpsOptions = New-PSSessionOption -SkipCACheck -SkipCNCheck -SkipRevocationCheck - -$httpsResult = New-PSSession -UseSSL -ComputerName "localhost" -SessionOption $httpsOptions -ErrorVariable httpsError -ErrorAction SilentlyContinue - -If ($httpResult -and $httpsResult) -{ - Write-Verbose "HTTP: Enabled | HTTPS: Enabled" -} -ElseIf ($httpsResult -and !$httpResult) -{ - Write-Verbose "HTTP: Disabled | HTTPS: Enabled" -} -ElseIf ($httpResult -and !$httpsResult) -{ - Write-Verbose "HTTP: Enabled | HTTPS: Disabled" -} -Else -{ - Write-Log "Unable to establish an HTTP or HTTPS remoting session." - Throw "Unable to establish an HTTP or HTTPS remoting session." -} -Write-VerboseLog "PS Remoting has been successfully configured for Ansible." diff --git a/test/lib/ansible_test/_data/setup/docker.sh b/test/lib/ansible_test/_data/setup/docker.sh deleted file mode 100644 index ea60e1a6f3..0000000000 --- a/test/lib/ansible_test/_data/setup/docker.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh - -set -eu - -# Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04. -rm -f /usr/sbin/policy-rc.d - -# Improve prompts on remote host for interactive use. -# `cat << EOF > ~/.bashrc` flakes sometimes since /tmp may not be ready yet in -# the container. So don't do that -echo "alias ls='ls --color=auto'" > ~/.bashrc -echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc -echo "cd ~/ansible/" >> ~/.bashrc diff --git a/test/lib/ansible_test/_data/setup/remote.sh b/test/lib/ansible_test/_data/setup/remote.sh deleted file mode 100644 index 9348ac6f9f..0000000000 --- a/test/lib/ansible_test/_data/setup/remote.sh +++ /dev/null @@ -1,185 +0,0 @@ -#!/bin/sh - -set -eu - -platform=#{platform} -platform_version=#{platform_version} -python_version=#{python_version} - -python_interpreter="python${python_version}" - -cd ~/ - -install_pip () { - if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then - case "${python_version}" in - *) - pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py" - ;; - esac - curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py - "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet - rm /tmp/get-pip.py - fi -} - -if [ "${platform}" = "freebsd" ]; then - py_version="$(echo "${python_version}" | tr -d '.')" - - if [ "${py_version}" = "27" ]; then - # on Python 2.7 our only option is to use virtualenv - virtualenv_pkg="py27-virtualenv" - else - # on Python 3.x we'll use the built-in venv instead - virtualenv_pkg="" - fi - - # Declare platform/python version combinations which do not have supporting OS packages available. - # For these combinations ansible-test will use pip to install the requirements instead. - case "${platform_version}/${python_version}" in - "11.4/3.8") - have_os_packages="" - ;; - "12.2/3.8") - have_os_packages="" - ;; - *) - have_os_packages="yes" - ;; - esac - - # PyYAML is never installed with an OS package since it does not include libyaml support. - # Instead, ansible-test will always install it using pip. - if [ "${have_os_packages}" ]; then - jinja2_pkg="py${py_version}-Jinja2" - cryptography_pkg="py${py_version}-cryptography" - else - jinja2_pkg="" - cryptography_pkg="" - fi - - while true; do - # shellcheck disable=SC2086 - env ASSUME_ALWAYS_YES=YES pkg bootstrap && \ - pkg install -q -y \ - bash \ - curl \ - gtar \ - libyaml \ - "python${py_version}" \ - ${jinja2_pkg} \ - ${cryptography_pkg} \ - ${virtualenv_pkg} \ - sudo \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done - - install_pip - - if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then - sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config - service sshd restart - fi -elif [ "${platform}" = "rhel" ]; then - if grep '8\.' /etc/redhat-release; then - py_version="$(echo "${python_version}" | tr -d '.')" - - if [ "${py_version}" = "36" ]; then - py_pkg_prefix="python3" - else - py_pkg_prefix="python${py_version}" - fi - - while true; do - yum module install -q -y "python${py_version}" && \ - yum install -q -y \ - gcc \ - "${py_pkg_prefix}-devel" \ - "${py_pkg_prefix}-jinja2" \ - "${py_pkg_prefix}-cryptography" \ - iptables \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done - else - while true; do - yum install -q -y \ - gcc \ - python-devel \ - python-virtualenv \ - python2-cryptography \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done - - install_pip - fi - - # pin packaging and pyparsing to match the downstream vendored versions - "${python_interpreter}" -m pip install packaging==20.4 pyparsing==2.4.7 --disable-pip-version-check -elif [ "${platform}" = "centos" ]; then - while true; do - yum install -q -y \ - gcc \ - python-devel \ - python-virtualenv \ - python2-cryptography \ - libffi-devel \ - openssl-devel \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done - - install_pip -elif [ "${platform}" = "osx" ]; then - while true; do - pip install --disable-pip-version-check --quiet \ - 'virtualenv==16.7.10' \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done -elif [ "${platform}" = "aix" ]; then - chfs -a size=1G / - chfs -a size=4G /usr - chfs -a size=1G /var - chfs -a size=1G /tmp - chfs -a size=2G /opt - while true; do - yum install -q -y \ - gcc \ - libffi-devel \ - python-jinja2 \ - python-cryptography \ - python-pip && \ - pip install --disable-pip-version-check --quiet \ - 'virtualenv==16.7.10' \ - && break - echo "Failed to install packages. Sleeping before trying again..." - sleep 10 - done -fi - -# Improve prompts on remote host for interactive use. -# shellcheck disable=SC1117 -cat << EOF > ~/.bashrc -if ls --color > /dev/null 2>&1; then - alias ls='ls --color' -elif ls -G > /dev/null 2>&1; then - alias ls='ls -G' -fi -export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ ' -EOF - -# Make sure ~/ansible/ is the starting directory for interactive shells. -if [ "${platform}" = "osx" ]; then - echo "cd ~/ansible/" >> ~/.bashrc -elif [ "${platform}" = "macos" ] ; then - echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc - echo "cd ~/ansible/" >> ~/.bashrc -fi diff --git a/test/lib/ansible_test/_data/setup/ssh-keys.sh b/test/lib/ansible_test/_data/setup/ssh-keys.sh deleted file mode 100644 index 7846f3fef0..0000000000 --- a/test/lib/ansible_test/_data/setup/ssh-keys.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -# Configure SSH keys. - -ssh_public_key=#{ssh_public_key} -ssh_private_key=#{ssh_private_key} -ssh_key_type=#{ssh_key_type} - -ssh_path="${HOME}/.ssh" -private_key_path="${ssh_path}/id_${ssh_key_type}" - -if [ ! -f "${private_key_path}" ]; then - # write public/private ssh key pair - public_key_path="${private_key_path}.pub" - - # shellcheck disable=SC2174 - mkdir -m 0700 -p "${ssh_path}" - touch "${public_key_path}" "${private_key_path}" - chmod 0600 "${public_key_path}" "${private_key_path}" - echo "${ssh_public_key}" > "${public_key_path}" - echo "${ssh_private_key}" > "${private_key_path}" - - # add public key to authorized_keys - authoried_keys_path="${HOME}/.ssh/authorized_keys" - - # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login) - cat "${public_key_path}" > "${authoried_keys_path}" - chmod 0600 "${authoried_keys_path}" - - # add localhost's server keys to known_hosts - known_hosts_path="${HOME}/.ssh/known_hosts" - - for key in /etc/ssh/ssh_host_*_key.pub; do - echo "localhost $(cat "${key}")" >> "${known_hosts_path}" - done -fi diff --git a/test/lib/ansible_test/_data/sslcheck.py b/test/lib/ansible_test/_data/sslcheck.py deleted file mode 100755 index 37b8227936..0000000000 --- a/test/lib/ansible_test/_data/sslcheck.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -"""Show openssl version.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json - -# noinspection PyBroadException -try: - from ssl import OPENSSL_VERSION_INFO - VERSION = list(OPENSSL_VERSION_INFO[:3]) -except Exception: # pylint: disable=broad-except - VERSION = None - - -def main(): - """Main program entry point.""" - print(json.dumps(dict( - version=VERSION, - ))) - - -if __name__ == '__main__': - main() diff --git a/test/lib/ansible_test/_data/versions.py b/test/lib/ansible_test/_data/versions.py deleted file mode 100755 index 4babef0162..0000000000 --- a/test/lib/ansible_test/_data/versions.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python -"""Show python and pip versions.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import os -import sys -import warnings - -warnings.simplefilter('ignore') # avoid python version deprecation warnings when using newer pip dependencies - -try: - import pip -except ImportError: - pip = None - -print(sys.version) - -if pip: - print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__))) diff --git a/test/lib/ansible_test/_data/virtualenvcheck.py b/test/lib/ansible_test/_data/virtualenvcheck.py deleted file mode 100755 index 0c8f768034..0000000000 --- a/test/lib/ansible_test/_data/virtualenvcheck.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -"""Detect the real python interpreter when running in a virtual environment created by the 'virtualenv' module.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json - -try: - from sys import real_prefix -except ImportError: - real_prefix = None - -print(json.dumps(dict( - real_prefix=real_prefix, -))) diff --git a/test/lib/ansible_test/_data/yamlcheck.py b/test/lib/ansible_test/_data/yamlcheck.py deleted file mode 100755 index 591842f4ad..0000000000 --- a/test/lib/ansible_test/_data/yamlcheck.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -"""Show python and pip versions.""" -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type - -import json - -try: - import yaml -except ImportError: - yaml = None - -try: - from yaml import CLoader -except ImportError: - CLoader = None - -print(json.dumps(dict( - yaml=bool(yaml), - cloader=bool(CLoader), -))) diff --git a/test/lib/ansible_test/_internal/ansible_util.py b/test/lib/ansible_test/_internal/ansible_util.py index 6570b896dd..614e8aacb7 100644 --- a/test/lib/ansible_test/_internal/ansible_util.py +++ b/test/lib/ansible_test/_internal/ansible_util.py @@ -24,6 +24,8 @@ from .util import ( ANSIBLE_TEST_DATA_ROOT, ANSIBLE_BIN_PATH, ANSIBLE_SOURCE_ROOT, + ANSIBLE_TEST_TARGET_ROOT, + ANSIBLE_TEST_TOOLS_ROOT, get_ansible_version, ) @@ -97,7 +99,7 @@ def ansible_environment(args, color=True, ansible_config=None): # ansible-connection only requires the injector for code coverage # the correct python interpreter is already selected using the sys.executable used to invoke ansible ansible.update(dict( - ANSIBLE_CONNECTION_PATH=os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector', 'ansible-connection'), + ANSIBLE_CONNECTION_PATH=os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'injector', 'ansible-connection'), )) if isinstance(args, PosixIntegrationConfig): @@ -244,7 +246,7 @@ def check_pyyaml(args, version, required=True, quiet=False): pass python = find_python(version) - stdout, _dummy = run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'yamlcheck.py')], + stdout, _dummy = run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'yamlcheck.py')], capture=True, always=True) result = json.loads(stdout) @@ -284,7 +286,7 @@ def get_collection_detail(args, python): # type: (EnvironmentConfig, str) -> Co collection = data_context().content.collection directory = os.path.join(collection.root, collection.directory) - stdout = run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0] + stdout = run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'collection_detail.py'), directory], capture=True, always=True)[0] result = json.loads(stdout) error = result.get('error') diff --git a/test/lib/ansible_test/_internal/classification/__init__.py b/test/lib/ansible_test/_internal/classification/__init__.py index 69c594ae29..4a3daef701 100644 --- a/test/lib/ansible_test/_internal/classification/__init__.py +++ b/test/lib/ansible_test/_internal/classification/__init__.py @@ -804,30 +804,12 @@ class PathMapper: 'integration': 'ansible-test', # run ansible-test self tests } - if path.startswith('test/lib/ansible_test/_data/sanity/'): - return { - 'sanity': 'all', # test infrastructure, run all sanity checks - 'integration': 'ansible-test', # run ansible-test self tests - } - if path.startswith('test/lib/ansible_test/_internal/commands/units/'): return { 'units': 'all', # test infrastructure, run all unit tests 'integration': 'ansible-test', # run ansible-test self tests } - if path.startswith('test/lib/ansible_test/_data/units/'): - return { - 'units': 'all', # test infrastructure, run all unit tests - 'integration': 'ansible-test', # run ansible-test self tests - } - - if path.startswith('test/lib/ansible_test/_data/pytest/'): - return { - 'units': 'all', # test infrastructure, run all unit tests - 'integration': 'ansible-test', # run ansible-test self tests - } - if path.startswith('test/lib/ansible_test/_data/requirements/'): if name in ( 'integration', @@ -854,6 +836,18 @@ class PathMapper: 'integration': cloud_target, } + if path.startswith('test/lib/ansible_test/_util/controller/sanity/') or path.startswith('test/lib/ansible_test/_util/target/sanity/'): + return { + 'sanity': 'all', # test infrastructure, run all sanity checks + 'integration': 'ansible-test', # run ansible-test self tests + } + + if path.startswith('test/lib/ansible_test/_util/target/pytest/'): + return { + 'units': 'all', # test infrastructure, run all unit tests + 'integration': 'ansible-test', # run ansible-test self tests + } + if path.startswith('test/lib/'): return all_tests(self.args) # test infrastructure, run all tests diff --git a/test/lib/ansible_test/_internal/classification/python.py b/test/lib/ansible_test/_internal/classification/python.py index 0ce0367561..1e3505db5a 100644 --- a/test/lib/ansible_test/_internal/classification/python.py +++ b/test/lib/ansible_test/_internal/classification/python.py @@ -269,7 +269,8 @@ class ModuleUtilFinder(ast.NodeVisitor): path_map = ( ('^hacking/build_library/build_ansible/', 'build_ansible/'), ('^lib/ansible/', 'ansible/'), - ('^test/lib/ansible_test/_data/sanity/validate-modules/', 'validate_modules/'), + ('^test/lib/ansible_test/_util/controller/sanity/validate-modules/', 'validate_modules/'), + ('^test/lib/ansible_test/_util/target/legacy_collection_loader/', 'legacy_collection_loader/'), ('^test/units/', 'test/units/'), ('^test/lib/ansible_test/_internal/', 'ansible_test/_internal/'), ('^test/integration/targets/.*/ansible_collections/(?P[^/]*)/(?P[^/]*)/', r'ansible_collections/\g/\g/'), diff --git a/test/lib/ansible_test/_internal/commands/coverage/combine.py b/test/lib/ansible_test/_internal/commands/coverage/combine.py index f4d664608b..caa4fa92e7 100644 --- a/test/lib/ansible_test/_internal/commands/coverage/combine.py +++ b/test/lib/ansible_test/_internal/commands/coverage/combine.py @@ -17,7 +17,7 @@ from ...io import ( ) from ...util import ( - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TOOLS_ROOT, display, ApplicationError, ) @@ -181,7 +181,7 @@ def _command_coverage_combine_powershell(args): coverage_files = get_powershell_coverage_files() def _default_stub_value(source_paths): - cmd = ['pwsh', os.path.join(ANSIBLE_TEST_DATA_ROOT, 'coverage_stub.ps1')] + cmd = ['pwsh', os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'coverage_stub.ps1')] cmd.extend(source_paths) stubs = json.loads(run_command(args, cmd, capture=True, always=True)[0]) diff --git a/test/lib/ansible_test/_internal/commands/integration/__init__.py b/test/lib/ansible_test/_internal/commands/integration/__init__.py index 75a504bc1a..f6e2721b60 100644 --- a/test/lib/ansible_test/_internal/commands/integration/__init__.py +++ b/test/lib/ansible_test/_internal/commands/integration/__init__.py @@ -72,7 +72,7 @@ from ...util import ( remove_tree, find_executable, raw_command, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TOOLS_ROOT, SUPPORTED_PYTHON_VERSIONS, get_hash, ) @@ -1132,7 +1132,7 @@ class EnvironmentDescription: versions += SUPPORTED_PYTHON_VERSIONS versions += list(set(v.split('.', 1)[0] for v in SUPPORTED_PYTHON_VERSIONS)) - version_check = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'versions.py') + version_check = os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'versions.py') python_paths = dict((v, find_executable('python%s' % v, required=False)) for v in sorted(versions)) pip_paths = dict((v, find_executable('pip%s' % v, required=False)) for v in sorted(versions)) program_versions = dict((v, self.get_version([python_paths[v], version_check], warnings)) for v in sorted(python_paths) if python_paths[v]) diff --git a/test/lib/ansible_test/_internal/commands/sanity/__init__.py b/test/lib/ansible_test/_internal/commands/sanity/__init__.py index 58c838e862..45170ff113 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/__init__.py +++ b/test/lib/ansible_test/_internal/commands/sanity/__init__.py @@ -22,7 +22,8 @@ from ...util import ( load_plugins, parse_to_list_of_dict, ABC, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_CONTROLLER_ROOT, + ANSIBLE_TEST_TARGET_ROOT, is_binary_file, read_lines_without_comments, get_available_python_versions, @@ -80,7 +81,8 @@ from ...content_config import ( ) COMMAND = 'sanity' -SANITY_ROOT = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sanity') +SANITY_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'sanity') +TARGET_SANITY_ROOT = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'sanity') def command_sanity(args): diff --git a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py index 185f22d1fa..934da9f60c 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py +++ b/test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py @@ -28,7 +28,7 @@ from ...payload import ( from ...util import ( ANSIBLE_BIN_PATH, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ) @@ -57,7 +57,7 @@ class BinSymlinksTest(SanityVersionNeutral): bin_names = os.listdir(bin_root) bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names) - injector_root = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector') + injector_root = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'injector') injector_names = os.listdir(injector_root) errors = [] # type: t.List[t.Tuple[str, str]] diff --git a/test/lib/ansible_test/_internal/commands/sanity/compile.py b/test/lib/ansible_test/_internal/commands/sanity/compile.py index 3577c60a33..9f92401efc 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/compile.py +++ b/test/lib/ansible_test/_internal/commands/sanity/compile.py @@ -12,7 +12,7 @@ from . import ( SanityFailure, SanitySuccess, SanityTargets, - SANITY_ROOT, + TARGET_SANITY_ROOT, ) from ...target import ( @@ -53,7 +53,7 @@ class CompileTest(SanityMultipleVersion): paths = [target.path for target in targets.include] - cmd = [find_python(python_version), os.path.join(SANITY_ROOT, 'compile', 'compile.py')] + cmd = [find_python(python_version), os.path.join(TARGET_SANITY_ROOT, 'compile', 'compile.py')] data = '\n'.join(paths) diff --git a/test/lib/ansible_test/_internal/commands/sanity/import.py b/test/lib/ansible_test/_internal/commands/sanity/import.py index d3a884555c..9dfd4f3407 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/import.py +++ b/test/lib/ansible_test/_internal/commands/sanity/import.py @@ -12,7 +12,7 @@ from . import ( SanityFailure, SanitySuccess, SanitySkipped, - SANITY_ROOT, + TARGET_SANITY_ROOT, ) from ...target import ( @@ -138,8 +138,8 @@ class ImportTest(SanityMultipleVersion): importer_path = os.path.join(virtual_environment_bin, 'importer.py') yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py') if not args.explain: - os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path) - os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path) + os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'importer.py')), importer_path) + os.symlink(os.path.abspath(os.path.join(TARGET_SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path) # activate the virtual environment env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH']) diff --git a/test/lib/ansible_test/_internal/commands/sanity/pylint.py b/test/lib/ansible_test/_internal/commands/sanity/pylint.py index 64983a4d49..e69d58b762 100644 --- a/test/lib/ansible_test/_internal/commands/sanity/pylint.py +++ b/test/lib/ansible_test/_internal/commands/sanity/pylint.py @@ -136,10 +136,11 @@ class PylintTest(SanitySingleVersion): if data_context().content.collection: add_context(remaining_paths, 'collection', lambda p: True) else: - add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_data/sanity/validate-modules/')) + add_context(remaining_paths, 'validate-modules', filter_path('test/lib/ansible_test/_util/controller/sanity/validate-modules/')) add_context(remaining_paths, 'validate-modules-unit', filter_path('test/lib/ansible_test/tests/validate-modules-unit/')) - add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_data/sanity/')) - add_context(remaining_paths, 'legacy-collection-loader', filter_path('test/lib/ansible_test/_data/legacy_collection_loader/')) + add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_util/controller/sanity/')) + add_context(remaining_paths, 'sanity', filter_path('test/lib/ansible_test/_util/target/sanity/')) + add_context(remaining_paths, 'legacy-collection-loader', filter_path('test/lib/ansible_test/_util/target/legacy_collection_loader/')) add_context(remaining_paths, 'ansible-test', filter_path('test/lib/')) add_context(remaining_paths, 'test', filter_path('test/')) add_context(remaining_paths, 'hacking', filter_path('hacking/')) diff --git a/test/lib/ansible_test/_internal/commands/units/__init__.py b/test/lib/ansible_test/_internal/commands/units/__init__.py index 51aa804fa0..c6d520acf0 100644 --- a/test/lib/ansible_test/_internal/commands/units/__init__.py +++ b/test/lib/ansible_test/_internal/commands/units/__init__.py @@ -21,6 +21,7 @@ from ...util import ( CONTROLLER_PYTHON_VERSIONS, REMOTE_ONLY_PYTHON_VERSIONS, ANSIBLE_LIB_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ) from ...util_common import ( @@ -203,7 +204,7 @@ def command_units(args): plugins.append('ansible_pytest_collections') if plugins: - env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_DATA_ROOT, 'pytest/plugins') + env['PYTHONPATH'] += ':%s' % os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'pytest/plugins') env['PYTEST_PLUGINS'] = ','.join(plugins) if args.collect_only: @@ -262,7 +263,7 @@ def get_units_ansible_python_path(args, test_context): # type: (UnitsConfig, st # legacy collection loader required by all python versions not supported by the controller write_text_file(os.path.join(ansible_test_path, '__init__.py'), '', True) write_text_file(os.path.join(ansible_test_path, '_internal', '__init__.py'), '', True) - os.symlink(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader')) + os.symlink(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'legacy_collection_loader'), os.path.join(ansible_test_path, '_internal', 'legacy_collection_loader')) elif test_context == TestContext.modules: # only non-collection ansible module tests should have access to ansible built-in modules os.symlink(os.path.join(ANSIBLE_LIB_ROOT, 'modules'), os.path.join(ansible_path, 'modules')) diff --git a/test/lib/ansible_test/_internal/core_ci.py b/test/lib/ansible_test/_internal/core_ci.py index a48fdbe0e1..aad7d1da24 100644 --- a/test/lib/ansible_test/_internal/core_ci.py +++ b/test/lib/ansible_test/_internal/core_ci.py @@ -28,7 +28,7 @@ from .io import ( from .util import ( ApplicationError, display, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ) from .util_common import ( @@ -325,7 +325,7 @@ class AnsibleCoreCI: display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1) if self.platform == 'windows': - winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1')) + winrm_config = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ConfigureRemotingForAnsible.ps1')) else: winrm_config = None diff --git a/test/lib/ansible_test/_internal/delegation.py b/test/lib/ansible_test/_internal/delegation.py index 2ee176a1ba..53b52e1ffa 100644 --- a/test/lib/ansible_test/_internal/delegation.py +++ b/test/lib/ansible_test/_internal/delegation.py @@ -49,7 +49,7 @@ from .util import ( common_environment, display, ANSIBLE_BIN_PATH, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ANSIBLE_LIB_ROOT, ANSIBLE_TEST_ROOT, tempdir, @@ -297,7 +297,7 @@ def delegate_docker(args, exclude, require): test_id = docker_run(args, test_image, options=test_options) - setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'docker.sh')) + setup_sh = read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'docker.sh')) ssh_keys_sh = get_ssh_key_setup(SshKey(args)) diff --git a/test/lib/ansible_test/_internal/executor.py b/test/lib/ansible_test/_internal/executor.py index aeebc9749f..245589327f 100644 --- a/test/lib/ansible_test/_internal/executor.py +++ b/test/lib/ansible_test/_internal/executor.py @@ -26,6 +26,7 @@ from .util import ( find_python, cmd_quote, ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TOOLS_ROOT, str_to_version, version_to_str, ) @@ -99,7 +100,7 @@ def get_openssl_version(args, python, python_version): # type: (EnvironmentConf if not python_version.startswith('2.'): # OpenSSL version checking only works on Python 3.x. # This should be the most accurate, since it is the Python we will be using. - version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version'] + version = json.loads(run_command(args, [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'sslcheck.py')], capture=True, always=True)[0])['version'] if version: display.info('Detected OpenSSL version %s under Python %s.' % (version_to_str(version), python_version), verbosity=1) diff --git a/test/lib/ansible_test/_internal/manage_ci.py b/test/lib/ansible_test/_internal/manage_ci.py index 70da398642..f8d659d8e0 100644 --- a/test/lib/ansible_test/_internal/manage_ci.py +++ b/test/lib/ansible_test/_internal/manage_ci.py @@ -19,7 +19,7 @@ from .util import ( Display, cmd_quote, display, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ) from .util_common import ( @@ -276,7 +276,7 @@ class ManagePosixCI: """Configure remote host for testing. :type python_version: str """ - template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'remote.sh'))) + template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'remote.sh'))) setup_sh = template.substitute( platform=self.core_ci.platform, platform_version=self.core_ci.version, @@ -401,7 +401,7 @@ class ManagePosixCI: def get_ssh_key_setup(ssh_key): # type: (SshKey) -> str """Generate and return a script to configure SSH keys on a host.""" - template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'setup', 'ssh-keys.sh'))) + template = ShellScriptTemplate(read_text_file(os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'setup', 'ssh-keys.sh'))) ssh_keys_sh = template.substitute( ssh_public_key=ssh_key.pub_contents, diff --git a/test/lib/ansible_test/_internal/payload.py b/test/lib/ansible_test/_internal/payload.py index 87d6ad8134..1e6971e60b 100644 --- a/test/lib/ansible_test/_internal/payload.py +++ b/test/lib/ansible_test/_internal/payload.py @@ -47,7 +47,7 @@ ANSIBLE_BIN_SYMLINK_MAP = { 'ansible-inventory': 'ansible', 'ansible-playbook': 'ansible', 'ansible-pull': 'ansible', - 'ansible-test': '../test/lib/ansible_test/_data/cli/ansible_test_cli_stub.py', + 'ansible-test': '../test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py', 'ansible-vault': 'ansible', } diff --git a/test/lib/ansible_test/_internal/util.py b/test/lib/ansible_test/_internal/util.py index db3daaa4ca..ebb9a68f31 100644 --- a/test/lib/ansible_test/_internal/util.py +++ b/test/lib/ansible_test/_internal/util.py @@ -97,8 +97,14 @@ if not os.path.exists(ANSIBLE_LIB_ROOT): ANSIBLE_SOURCE_ROOT = ANSIBLE_ROOT ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data') +ANSIBLE_TEST_UTIL_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_util') ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config') +ANSIBLE_TEST_CONTROLLER_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'controller') +ANSIBLE_TEST_TARGET_ROOT = os.path.join(ANSIBLE_TEST_UTIL_ROOT, 'target') + +ANSIBLE_TEST_TOOLS_ROOT = os.path.join(ANSIBLE_TEST_CONTROLLER_ROOT, 'tools') + # Modes are set to allow all users the same level of access. # This permits files to be used in tests that change users. # The only exception is write access to directories for the user creating them. @@ -265,7 +271,7 @@ def generate_pip_command(python): :type python: str :rtype: list[str] """ - return [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'quiet_pip.py')] + return [python, os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'quiet_pip.py')] def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None, diff --git a/test/lib/ansible_test/_internal/util_common.py b/test/lib/ansible_test/_internal/util_common.py index eac1152234..50f804ce0f 100644 --- a/test/lib/ansible_test/_internal/util_common.py +++ b/test/lib/ansible_test/_internal/util_common.py @@ -29,6 +29,7 @@ from .util import ( raw_command, read_lines_without_comments, ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TARGET_ROOT, ApplicationError, cmd_quote, SubprocessError, @@ -416,7 +417,7 @@ def intercept_command(args, cmd, target_name, env, capture=False, data=None, cwd cmd = list(cmd) version = python_version or args.python_version interpreter = virtualenv or find_python(version) - inject_path = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector') + inject_path = os.path.join(ANSIBLE_TEST_TARGET_ROOT, 'injector') if not virtualenv: # injection of python into the path is required when not activating a virtualenv diff --git a/test/lib/ansible_test/_internal/venv.py b/test/lib/ansible_test/_internal/venv.py index 49235f8662..181c01ba95 100644 --- a/test/lib/ansible_test/_internal/venv.py +++ b/test/lib/ansible_test/_internal/venv.py @@ -16,7 +16,7 @@ from .util import ( find_python, SubprocessError, get_available_python_versions, - ANSIBLE_TEST_DATA_ROOT, + ANSIBLE_TEST_TOOLS_ROOT, display, remove_tree, ) @@ -128,7 +128,7 @@ def get_python_real_prefix(args, path): # type: (EnvironmentConfig, str) -> t.O """ Return the real prefix of the specified interpreter or None if the interpreter is not a virtual environment created by 'virtualenv'. """ - cmd = [path, os.path.join(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'virtualenvcheck.py'))] + cmd = [path, os.path.join(os.path.join(ANSIBLE_TEST_TOOLS_ROOT, 'virtualenvcheck.py'))] check_result = json.loads(run_command(args, cmd, capture=True, always=True)[0]) real_prefix = check_result['real_prefix'] return real_prefix diff --git a/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py b/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py new file mode 100755 index 0000000000..d12b6334ef --- /dev/null +++ b/test/lib/ansible_test/_util/controller/cli/ansible_test_cli_stub.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# PYTHON_ARGCOMPLETE_OK +"""Command line entry point for ansible-test.""" + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main program entry point.""" + ansible_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + source_root = os.path.join(ansible_root, 'test', 'lib') + + if os.path.exists(os.path.join(source_root, 'ansible_test', '_internal', 'cli.py')): + # running from source, use that version of ansible-test instead of any version that may already be installed + sys.path.insert(0, source_root) + + # noinspection PyProtectedMember + from ansible_test._internal.cli import main as cli_main + + cli_main() + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json new file mode 100644 index 0000000000..12bbe0d11e --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.json @@ -0,0 +1,13 @@ +{ + "all_targets": true, + "prefixes": [ + "lib/ansible/modules/", + "lib/ansible/plugins/action/", + "plugins/modules/", + "plugins/action/" + ], + "extensions": [ + ".py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py new file mode 100755 index 0000000000..65142e0033 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/action-plugin-docs.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +"""Test to verify action plugins have an associated module to provide documentation.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main entry point.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + module_names = set() + + module_prefixes = { + 'lib/ansible/modules/': True, + 'plugins/modules/': False, + } + + action_prefixes = { + 'lib/ansible/plugins/action/': True, + 'plugins/action/': False, + } + + for path in paths: + full_name = get_full_name(path, module_prefixes) + + if full_name: + module_names.add(full_name) + + for path in paths: + full_name = get_full_name(path, action_prefixes) + + if full_name and full_name not in module_names: + print('%s: action plugin has no matching module to provide documentation' % path) + + +def get_full_name(path, prefixes): + """Return the full name of the plugin at the given path by matching against the given path prefixes, or None if no match is found.""" + for prefix, flat in prefixes.items(): + if path.startswith(prefix): + relative_path = os.path.relpath(path, prefix) + + if flat: + full_name = os.path.basename(relative_path) + else: + full_name = relative_path + + full_name = os.path.splitext(full_name)[0] + + name = os.path.basename(full_name) + + if name == '__init__': + return None + + if name.startswith('_'): + name = name[1:] + + full_name = os.path.join(os.path.dirname(full_name), name).replace(os.path.sep, '.') + + return full_name + + return None + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json new file mode 100644 index 0000000000..7d19f101f2 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.json @@ -0,0 +1,8 @@ +{ + "intercept": true, + "prefixes": [ + "changelogs/config.yaml", + "changelogs/fragments/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py new file mode 100755 index 0000000000..2ccfb24f23 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys +import subprocess + + +def main(): + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + allowed_extensions = ('.yml', '.yaml') + config_path = 'changelogs/config.yaml' + + # config must be detected independent of the file list since the file list only contains files under test (changed) + has_config = os.path.exists(config_path) + paths_to_check = [] + for path in paths: + if path == config_path: + continue + + if path.startswith('changelogs/fragments/.'): + if path in ('changelogs/fragments/.keep', 'changelogs/fragments/.gitkeep'): + continue + + print('%s:%d:%d: file must not be a dotfile' % (path, 0, 0)) + continue + + ext = os.path.splitext(path)[1] + + if ext not in allowed_extensions: + print('%s:%d:%d: extension must be one of: %s' % (path, 0, 0, ', '.join(allowed_extensions))) + + paths_to_check.append(path) + + if not has_config: + print('changelogs/config.yaml:0:0: config file does not exist') + return + + if not paths_to_check: + return + + cmd = [sys.executable, '-m', 'antsibull_changelog', 'lint'] + paths_to_check + + # The sphinx module is a soft dependency for rstcheck, which is used by the changelog linter. + # If sphinx is found it will be loaded by rstcheck, which can affect the results of the test. + # To maintain consistency across environments, loading of sphinx is blocked, since any version (or no version) of sphinx may be present. + env = os.environ.copy() + env.update(PYTHONPATH='%s:%s' % (os.path.join(os.path.dirname(__file__), 'changelog'), env['PYTHONPATH'])) + + subprocess.call(cmd, env=env) # ignore the return code, rely on the output instead + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py new file mode 100644 index 0000000000..000c29e4e9 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/changelog/sphinx.py @@ -0,0 +1,5 @@ +"""Block the sphinx module from being loaded.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +raise ImportError('The sphinx module has been prevented from loading to maintain consistent test results.') diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json new file mode 100644 index 0000000000..9835f9b6c8 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.json @@ -0,0 +1,14 @@ +{ + "prefixes": [ + "lib/ansible/modules/", + "lib/ansible/module_utils/", + "plugins/modules/", + "plugins/module_utils/", + "test/units/", + "tests/unit/" + ], + "files": [ + "__init__.py" + ], + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py new file mode 100755 index 0000000000..8bcd7f9ed9 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/empty-init.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if os.path.getsize(path) > 0: + print('%s: empty __init__.py required' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json new file mode 100644 index 0000000000..4ebce32c8c --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "py2_compat": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py new file mode 100755 index 0000000000..81081eed7b --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/future-import-boilerplate.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + lines = path_fd.read().splitlines() + + missing = True + if not lines: + # Files are allowed to be empty of everything including boilerplate + missing = False + + for text in lines: + if text in (b'from __future__ import (absolute_import, division, print_function)', + b'from __future__ import absolute_import, division, print_function'): + missing = False + break + + if missing: + with open(path) as file: + contents = file.read() + + # noinspection PyBroadException + try: + node = ast.parse(contents) + + # files consisting of only assignments have no need for future import boilerplate + # the only exception would be division during assignment, but we'll overlook that for simplicity + # the most likely case is that of a documentation only python file + if all(isinstance(statement, ast.Assign) for statement in node.body): + missing = False + except Exception: # pylint: disable=broad-except + pass # the compile sanity test will report this error + + if missing: + print('%s: missing: from __future__ import (absolute_import, division, print_function)' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json new file mode 100644 index 0000000000..db5c3c9809 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.json @@ -0,0 +1,4 @@ +{ + "text": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py new file mode 100755 index 0000000000..1e4212d1b8 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/line-endings.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + contents = path_fd.read() + + if b'\r' in contents: + print('%s: use "\\n" for line endings instead of "\\r\\n"' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json new file mode 100644 index 0000000000..4ebce32c8c --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "py2_compat": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py new file mode 100755 index 0000000000..28d06f363b --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/metaclass-boilerplate.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + lines = path_fd.read().splitlines() + + missing = True + if not lines: + # Files are allowed to be empty of everything including boilerplate + missing = False + + for text in lines: + if text == b'__metaclass__ = type': + missing = False + break + + if missing: + with open(path) as file: + contents = file.read() + + # noinspection PyBroadException + try: + node = ast.parse(contents) + + # files consisting of only assignments have no need for metaclass boilerplate + # the most likely case is that of a documentation only python file + if all(isinstance(statement, ast.Assign) for statement in node.body): + missing = False + except Exception: # pylint: disable=broad-except + pass # the compile sanity test will report this error + + if missing: + print('%s: missing: __metaclass__ = type' % path) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json new file mode 100644 index 0000000000..ccee80a2f1 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.json @@ -0,0 +1,10 @@ +{ + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py new file mode 100755 index 0000000000..78561d966e --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-assert.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + +ASSERT_RE = re.compile(r'^\s*assert[^a-z0-9_:]') + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as f: + for i, line in enumerate(f.readlines()): + matches = ASSERT_RE.findall(line) + + if matches: + lineno = i + 1 + colno = line.index('assert') + 1 + print('%s:%d:%d: raise AssertionError instead of: %s' % (path, lineno, colno, matches[0][colno - 1:])) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json new file mode 100644 index 0000000000..88858aeb61 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py new file mode 100755 index 0000000000..a35650efad --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-basestring.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(isinstance.*basestring)', text) + + if match: + print('%s:%d:%d: do not use `isinstance(s, basestring)`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json new file mode 100644 index 0000000000..88858aeb61 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py new file mode 100755 index 0000000000..e28b24f4a9 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-dict-iteritems.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(?', + b':', + b'"', + b'/', + b'\\', + b'|', + b'?', + b'*' +] + [struct.pack("b", i) for i in range(32)] + +ILLEGAL_NAMES = [ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", +] + +ILLEGAL_END_CHARS = [ + '.', + ' ', +] + + +def check_path(path, is_dir=False): + type_name = 'directory' if is_dir else 'file' + file_name = os.path.basename(path.rstrip(os.path.sep)) + name = os.path.splitext(file_name)[0] + + if name.upper() in ILLEGAL_NAMES: + print("%s: illegal %s name %s" % (path, type_name, name.upper())) + + if file_name[-1] in ILLEGAL_END_CHARS: + print("%s: illegal %s name end-char '%s'" % (path, type_name, file_name[-1])) + + bfile = to_bytes(file_name, encoding='utf-8') + for char in ILLEGAL_CHARS: + if char in bfile: + bpath = to_bytes(path, encoding='utf-8') + print("%s: illegal char '%s' in %s name" % (bpath, char, type_name)) + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + check_path(path, is_dir=path.endswith(os.path.sep)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json new file mode 100644 index 0000000000..ccee80a2f1 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.json @@ -0,0 +1,10 @@ +{ + "extensions": [ + ".py" + ], + "prefixes": [ + "lib/ansible/", + "plugins/" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py new file mode 100755 index 0000000000..74a36ecc58 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-main-display.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +MAIN_DISPLAY_IMPORT = 'from __main__ import display' + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as f: + for i, line in enumerate(f.readlines()): + if MAIN_DISPLAY_IMPORT in line: + lineno = i + 1 + colno = line.index(MAIN_DISPLAY_IMPORT) + 1 + print('%s:%d:%d: Display is a singleton, just import and instantiate' % (path, lineno, colno)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json new file mode 100644 index 0000000000..54d9fff587 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.json @@ -0,0 +1,5 @@ +{ + "text": true, + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py new file mode 100755 index 0000000000..e44005a55f --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-smart-quotes.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + for line, text in enumerate(path_fd.readlines()): + try: + text = text.decode('utf-8') + except UnicodeDecodeError as ex: + print('%s:%d:%d: UnicodeDecodeError: %s' % (path, line + 1, ex.start + 1, ex)) + continue + + match = re.search(u'([‘’“”])', text) + + if match: + print('%s:%d:%d: use ASCII quotes `\'` and `"` instead of Unicode quotes' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json new file mode 100644 index 0000000000..88858aeb61 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py new file mode 100755 index 0000000000..e2201ab106 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/no-unicode-literals.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(unicode_literals)', text) + + if match: + print('%s:%d:%d: do not use `unicode_literals`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json new file mode 100644 index 0000000000..88858aeb61 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.json @@ -0,0 +1,7 @@ +{ + "extensions": [ + ".py" + ], + "ignore_self": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py new file mode 100755 index 0000000000..b2de1ba85d --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/replace-urlopen.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'^(?:[^#]*?)(urlopen)', text) + + if match: + print('%s:%d:%d: use `ansible.module_utils.urls.open_url` instead of `urlopen`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json new file mode 100644 index 0000000000..44003ec0c9 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.json @@ -0,0 +1,11 @@ +{ + "prefixes": [ + "lib/ansible/config/ansible_builtin_runtime.yml", + "meta/routing.yml", + "meta/runtime.yml" + ], + "extensions": [ + ".yml" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py new file mode 100755 index 0000000000..7db04ced7b --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/runtime-metadata.py @@ -0,0 +1,280 @@ +#!/usr/bin/env python +"""Schema validation of ansible-core's ansible_builtin_runtime.yml and collection's meta/runtime.yml""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import os +import re +import sys +import warnings + +from functools import partial + +import yaml + +from voluptuous import All, Any, MultipleInvalid, PREVENT_EXTRA +from voluptuous import Required, Schema, Invalid +from voluptuous.humanize import humanize_error + +from ansible.module_utils.compat.version import StrictVersion, LooseVersion +from ansible.module_utils.six import string_types +from ansible.utils.version import SemanticVersion + + +def isodate(value, check_deprecation_date=False, is_tombstone=False): + """Validate a datetime.date or ISO 8601 date string.""" + # datetime.date objects come from YAML dates, these are ok + if isinstance(value, datetime.date): + removal_date = value + else: + # make sure we have a string + msg = 'Expected ISO 8601 date string (YYYY-MM-DD), or YAML date' + if not isinstance(value, string_types): + raise Invalid(msg) + # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, + # we have to do things manually. + if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value): + raise Invalid(msg) + try: + removal_date = datetime.datetime.strptime(value, '%Y-%m-%d').date() + except ValueError: + raise Invalid(msg) + # Make sure date is correct + today = datetime.date.today() + if is_tombstone: + # For a tombstone, the removal date must be in the past + if today < removal_date: + raise Invalid( + 'The tombstone removal_date (%s) must not be after today (%s)' % (removal_date, today)) + else: + # For a deprecation, the removal date must be in the future. Only test this if + # check_deprecation_date is truish, to avoid checks to suddenly start to fail. + if check_deprecation_date and today > removal_date: + raise Invalid( + 'The deprecation removal_date (%s) must be after today (%s)' % (removal_date, today)) + return value + + +def removal_version(value, is_ansible, current_version=None, is_tombstone=False): + """Validate a removal version string.""" + msg = ( + 'Removal version must be a string' if is_ansible else + 'Removal version must be a semantic version (https://semver.org/)' + ) + if not isinstance(value, string_types): + raise Invalid(msg) + try: + if is_ansible: + version = StrictVersion() + version.parse(value) + version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion + else: + version = SemanticVersion() + version.parse(value) + if version.major != 0 and (version.minor != 0 or version.patch != 0): + raise Invalid('removal_version (%r) must be a major release, not a minor or patch release ' + '(see specification at https://semver.org/)' % (value, )) + if current_version is not None: + if is_tombstone: + # For a tombstone, the removal version must not be in the future + if version > current_version: + raise Invalid('The tombstone removal_version (%r) must not be after the ' + 'current version (%s)' % (value, current_version)) + else: + # For a deprecation, the removal version must be in the future + if version <= current_version: + raise Invalid('The deprecation removal_version (%r) must be after the ' + 'current version (%s)' % (value, current_version)) + except ValueError: + raise Invalid(msg) + return value + + +def any_value(value): + """Accepts anything.""" + return value + + +def get_ansible_version(): + """Return current ansible-core version""" + from ansible.release import __version__ + + return LooseVersion('.'.join(__version__.split('.')[:3])) + + +def get_collection_version(): + """Return current collection version, or None if it is not available""" + import importlib.util + + collection_detail_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), 'tools', 'collection_detail.py') + collection_detail_spec = importlib.util.spec_from_file_location('collection_detail', collection_detail_path) + collection_detail = importlib.util.module_from_spec(collection_detail_spec) + sys.modules['collection_detail'] = collection_detail + collection_detail_spec.loader.exec_module(collection_detail) + + # noinspection PyBroadException + try: + result = collection_detail.read_manifest_json('.') or collection_detail.read_galaxy_yml('.') + return SemanticVersion(result['version']) + except Exception: # pylint: disable=broad-except + # We do not care why it fails, in case we cannot get the version + # just return None to indicate "we don't know". + return None + + +def validate_metadata_file(path, is_ansible, check_deprecation_dates=False): + """Validate explicit runtime metadata file""" + try: + with open(path, 'r') as f_path: + routing = yaml.safe_load(f_path) + except yaml.error.MarkedYAMLError as ex: + print('%s:%d:%d: YAML load failed: %s' % (path, ex.context_mark.line + + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) + return + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: YAML load failed: %s' % + (path, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + return + + if is_ansible: + current_version = get_ansible_version() + else: + current_version = get_collection_version() + + # Updates to schema MUST also be reflected in the documentation + # ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html + + # plugin_routing schema + + avoid_additional_data = Schema( + Any( + { + Required('removal_version'): any_value, + 'warning_text': any_value, + }, + { + Required('removal_date'): any_value, + 'warning_text': any_value, + } + ), + extra=PREVENT_EXTRA + ) + + deprecation_schema = All( + # The first schema validates the input, and the second makes sure no extra keys are specified + Schema( + { + 'removal_version': partial(removal_version, is_ansible=is_ansible, + current_version=current_version), + 'removal_date': partial(isodate, check_deprecation_date=check_deprecation_dates), + 'warning_text': Any(*string_types), + } + ), + avoid_additional_data + ) + + tombstoning_schema = All( + # The first schema validates the input, and the second makes sure no extra keys are specified + Schema( + { + 'removal_version': partial(removal_version, is_ansible=is_ansible, + current_version=current_version, is_tombstone=True), + 'removal_date': partial(isodate, is_tombstone=True), + 'warning_text': Any(*string_types), + } + ), + avoid_additional_data + ) + + plugin_routing_schema = Any( + Schema({ + ('deprecation'): Any(deprecation_schema), + ('tombstone'): Any(tombstoning_schema), + ('redirect'): Any(*string_types), + }, extra=PREVENT_EXTRA), + ) + + list_dict_plugin_routing_schema = [{str_type: plugin_routing_schema} + for str_type in string_types] + + plugin_schema = Schema({ + ('action'): Any(None, *list_dict_plugin_routing_schema), + ('become'): Any(None, *list_dict_plugin_routing_schema), + ('cache'): Any(None, *list_dict_plugin_routing_schema), + ('callback'): Any(None, *list_dict_plugin_routing_schema), + ('cliconf'): Any(None, *list_dict_plugin_routing_schema), + ('connection'): Any(None, *list_dict_plugin_routing_schema), + ('doc_fragments'): Any(None, *list_dict_plugin_routing_schema), + ('filter'): Any(None, *list_dict_plugin_routing_schema), + ('httpapi'): Any(None, *list_dict_plugin_routing_schema), + ('inventory'): Any(None, *list_dict_plugin_routing_schema), + ('lookup'): Any(None, *list_dict_plugin_routing_schema), + ('module_utils'): Any(None, *list_dict_plugin_routing_schema), + ('modules'): Any(None, *list_dict_plugin_routing_schema), + ('netconf'): Any(None, *list_dict_plugin_routing_schema), + ('shell'): Any(None, *list_dict_plugin_routing_schema), + ('strategy'): Any(None, *list_dict_plugin_routing_schema), + ('terminal'): Any(None, *list_dict_plugin_routing_schema), + ('test'): Any(None, *list_dict_plugin_routing_schema), + ('vars'): Any(None, *list_dict_plugin_routing_schema), + }, extra=PREVENT_EXTRA) + + # import_redirection schema + + import_redirection_schema = Any( + Schema({ + ('redirect'): Any(*string_types), + # import_redirect doesn't currently support deprecation + }, extra=PREVENT_EXTRA) + ) + + list_dict_import_redirection_schema = [{str_type: import_redirection_schema} + for str_type in string_types] + + # top level schema + + schema = Schema({ + # All of these are optional + ('plugin_routing'): Any(plugin_schema), + ('import_redirection'): Any(None, *list_dict_import_redirection_schema), + # requires_ansible: In the future we should validate this with SpecifierSet + ('requires_ansible'): Any(*string_types), + ('action_groups'): dict, + }, extra=PREVENT_EXTRA) + + # Ensure schema is valid + + try: + schema(routing) + except MultipleInvalid as ex: + for error in ex.errors: + # No way to get line/column numbers + print('%s:%d:%d: %s' % (path, 0, 0, humanize_error(routing, error))) + + +def main(): + """Validate runtime metadata""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + collection_legacy_file = 'meta/routing.yml' + collection_runtime_file = 'meta/runtime.yml' + + # This is currently disabled, because if it is enabled this test can start failing + # at a random date. For this to be properly activated, we (a) need to be able to return + # codes for this test, and (b) make this error optional. + check_deprecation_dates = False + + for path in paths: + if path == collection_legacy_file: + print('%s:%d:%d: %s' % (path, 0, 0, ("Should be called '%s'" % collection_runtime_file))) + continue + + validate_metadata_file( + path, + is_ansible=path not in (collection_legacy_file, collection_runtime_file), + check_deprecation_dates=check_deprecation_dates) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json new file mode 100644 index 0000000000..5648429eb0 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.json @@ -0,0 +1,4 @@ +{ + "text": true, + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py new file mode 100755 index 0000000000..b945734cf7 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/shebang.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import re +import stat +import sys + + +def main(): + standard_shebangs = set([ + b'#!/bin/bash -eu', + b'#!/bin/bash -eux', + b'#!/bin/sh', + b'#!/usr/bin/env bash', + b'#!/usr/bin/env fish', + b'#!/usr/bin/env pwsh', + b'#!/usr/bin/env python', + b'#!/usr/bin/make -f', + ]) + + integration_shebangs = set([ + b'#!/bin/sh', + b'#!/usr/bin/env bash', + b'#!/usr/bin/env python', + ]) + + module_shebangs = { + '': b'#!/usr/bin/python', + '.py': b'#!/usr/bin/python', + '.ps1': b'#!powershell', + } + + # see https://unicode.org/faq/utf_bom.html#bom1 + byte_order_marks = ( + (b'\x00\x00\xFE\xFF', 'UTF-32 (BE)'), + (b'\xFF\xFE\x00\x00', 'UTF-32 (LE)'), + (b'\xFE\xFF', 'UTF-16 (BE)'), + (b'\xFF\xFE', 'UTF-16 (LE)'), + (b'\xEF\xBB\xBF', 'UTF-8'), + ) + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as path_fd: + shebang = path_fd.readline().strip() + mode = os.stat(path).st_mode + executable = (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & mode + + if not shebang or not shebang.startswith(b'#!'): + if executable: + print('%s:%d:%d: file without shebang should not be executable' % (path, 0, 0)) + + for mark, name in byte_order_marks: + if shebang.startswith(mark): + print('%s:%d:%d: file starts with a %s byte order mark' % (path, 0, 0, name)) + break + + continue + + is_module = False + is_integration = False + + dirname = os.path.dirname(path) + + if path.startswith('lib/ansible/modules/'): + is_module = True + elif re.search('^test/support/[^/]+/plugins/modules/', path): + is_module = True + elif re.search('^test/support/[^/]+/collections/ansible_collections/[^/]+/[^/]+/plugins/modules/', path): + is_module = True + elif path.startswith('test/lib/ansible_test/_data/'): + pass + elif path.startswith('test/lib/ansible_test/_util/'): + pass + elif path.startswith('lib/') or path.startswith('test/lib/'): + if executable: + print('%s:%d:%d: should not be executable' % (path, 0, 0)) + + if shebang: + print('%s:%d:%d: should not have a shebang' % (path, 0, 0)) + + continue + elif path.startswith('test/integration/targets/') or path.startswith('tests/integration/targets/'): + is_integration = True + + if dirname.endswith('/library') or '/plugins/modules' in dirname or dirname in ( + # non-standard module library directories + 'test/integration/targets/module_precedence/lib_no_extension', + 'test/integration/targets/module_precedence/lib_with_extension', + ): + is_module = True + elif path.startswith('plugins/modules/'): + is_module = True + + if is_module: + if executable: + print('%s:%d:%d: module should not be executable' % (path, 0, 0)) + + ext = os.path.splitext(path)[1] + expected_shebang = module_shebangs.get(ext) + expected_ext = ' or '.join(['"%s"' % k for k in module_shebangs]) + + if expected_shebang: + if shebang == expected_shebang: + continue + + print('%s:%d:%d: expected module shebang "%s" but found: %s' % (path, 1, 1, expected_shebang, shebang)) + else: + print('%s:%d:%d: expected module extension %s but found: %s' % (path, 0, 0, expected_ext, ext)) + else: + if is_integration: + allowed = integration_shebangs + else: + allowed = standard_shebangs + + if shebang not in allowed: + print('%s:%d:%d: unexpected non-module shebang: %s' % (path, 1, 1, shebang)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json new file mode 100644 index 0000000000..6f13c86b30 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.json @@ -0,0 +1,5 @@ +{ + "include_directories": true, + "include_symlinks": true, + "output": "path-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py new file mode 100755 index 0000000000..0585c6b1e5 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/symlinks.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + root_dir = os.getcwd() + os.path.sep + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + if not os.path.islink(path.rstrip(os.path.sep)): + continue + + if not os.path.exists(path): + print('%s: broken symlinks are not allowed' % path) + continue + + if path.endswith(os.path.sep): + print('%s: symlinks to directories are not allowed' % path) + continue + + real_path = os.path.realpath(path) + + if not real_path.startswith(root_dir): + print('%s: symlinks outside content tree are not allowed: %s' % (path, os.path.relpath(real_path, os.path.dirname(path)))) + continue + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json new file mode 100644 index 0000000000..36103051b0 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.json @@ -0,0 +1,10 @@ +{ + "prefixes": [ + "lib/ansible/modules/", + "plugins/modules/" + ], + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py new file mode 100755 index 0000000000..687136dcdb --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-argspec-type-path.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'(expanduser)', text) + + if match: + print('%s:%d:%d: use argspec type="path" instead of type="str" to avoid use of `expanduser`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json new file mode 100644 index 0000000000..776590b74d --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.json @@ -0,0 +1,6 @@ +{ + "extensions": [ + ".py" + ], + "output": "path-line-column-message" +} diff --git a/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py new file mode 100755 index 0000000000..49cb76c5e2 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/code-smell/use-compat-six.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re +import sys + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'r') as path_fd: + for line, text in enumerate(path_fd.readlines()): + match = re.search(r'((^\s*import\s+six\b)|(^\s*from\s+six\b))', text) + + if match: + print('%s:%d:%d: use `ansible.module_utils.six` instead of `six`' % ( + path, line + 1, match.start(1) + 1)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py new file mode 100644 index 0000000000..74a45f009f --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/integration-aliases/yaml_to_json.py @@ -0,0 +1,15 @@ +"""Read YAML from stdin and write JSON to stdout.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json +import sys + +from yaml import load + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout) diff --git a/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt new file mode 100644 index 0000000000..659c7f59e7 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pep8/current-ignore.txt @@ -0,0 +1,4 @@ +E402 +W503 +W504 +E741 diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 new file mode 100755 index 0000000000..1ef2743acd --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pslint/pslint.ps1 @@ -0,0 +1,43 @@ +#!/usr/bin/env pwsh +#Requires -Version 6 +#Requires -Modules PSScriptAnalyzer, PSSA-PSCustomUseLiteralPath + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +$WarningPreference = "Stop" + +# Until https://github.com/PowerShell/PSScriptAnalyzer/issues/1217 is fixed we need to import Pester if it's +# available. +if (Get-Module -Name Pester -ListAvailable -ErrorAction SilentlyContinue) { + Import-Module -Name Pester +} + +$LiteralPathRule = Import-Module -Name PSSA-PSCustomUseLiteralPath -PassThru +$LiteralPathRulePath = Join-Path -Path $LiteralPathRule.ModuleBase -ChildPath $LiteralPathRule.RootModule + +$PSSAParams = @{ + CustomRulePath = @($LiteralPathRulePath) + IncludeDefaultRules = $true + Setting = (Join-Path -Path $PSScriptRoot -ChildPath "settings.psd1") +} + +$Results = @() + +ForEach ($Path in $Args) { + $Retries = 3 + + Do { + Try { + $Results += Invoke-ScriptAnalyzer -Path $Path @PSSAParams 3> $null + $Retries = 0 + } + Catch { + If (--$Retries -le 0) { + Throw + } + } + } + Until ($Retries -le 0) +} + +ConvertTo-Json -InputObject $Results diff --git a/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 new file mode 100644 index 0000000000..7646ec35e1 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pslint/settings.psd1 @@ -0,0 +1,13 @@ +@{ + ExcludeRules=@( + 'PSUseOutputTypeCorrectly', + 'PSUseShouldProcessForStateChangingFunctions', + # We send strings as plaintext so will always come across the 3 issues + 'PSAvoidUsingPlainTextForPassword', + 'PSAvoidUsingConvertToSecureStringWithPlainText', + 'PSAvoidUsingUserNameAndPassWordParams', + # We send the module as a base64 encoded string and a BOM will cause + # issues here + 'PSUseBOMForUnicodeEncodedFile' + ) +} diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg new file mode 100644 index 0000000000..187758f409 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/ansible-test.cfg @@ -0,0 +1,54 @@ +[MESSAGES CONTROL] + +disable= + consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + cyclic-import, # consistent results require running with --jobs 1 and testing all files + duplicate-code, # consistent results require running with --jobs 1 and testing all files + import-error, # inconsistent results which depend on the availability of imports + import-outside-toplevel, # common pattern in ansible related code + no-name-in-module, # inconsistent results which depend on the availability of imports + no-self-use, + raise-missing-from, # Python 2.x does not support raise from + super-with-arguments, # Python 2.x does not support super without arguments + too-few-public-methods, + too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ + too-many-arguments, + too-many-branches, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-return-statements, + too-many-statements, + unused-import, # pylint does not understand PEP 484 type hints + +[BASIC] + +bad-names= + _, + bar, + baz, + foo, + tata, + toto, + tutu, + +good-names= + __metaclass__, + C, + ex, + i, + j, + k, + Run, + +class-attribute-rgx=[A-Za-z_][A-Za-z0-9_]{1,40}$ +attr-rgx=[a-z_][a-z0-9_]{1,40}$ +method-rgx=[a-z_][a-z0-9_]{1,40}$ +function-rgx=[a-z_][a-z0-9_]{1,40}$ + +[IMPORTS] + +preferred-modules = + distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg new file mode 100644 index 0000000000..31c140ccbf --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/collection.cfg @@ -0,0 +1,143 @@ +[MESSAGES CONTROL] + +disable= + abstract-method, + access-member-before-definition, + arguments-differ, + assignment-from-no-return, + assignment-from-none, + attribute-defined-outside-init, + bad-continuation, + bad-indentation, + bad-mcs-classmethod-argument, + broad-except, + c-extension-no-member, + cell-var-from-loop, + chained-comparison, + comparison-with-callable, + consider-iterating-dictionary, + consider-merging-isinstance, + consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-dict-items, + consider-using-enumerate, + consider-using-get, + consider-using-in, + consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-ternary, + consider-using-with, + cyclic-import, # consistent results require running with --jobs 1 and testing all files + deprecated-lambda, + deprecated-method, + deprecated-module, + duplicate-code, # consistent results require running with --jobs 1 and testing all files + eval-used, + exec-used, + expression-not-assigned, + fixme, + function-redefined, + global-statement, + global-variable-undefined, + import-error, # inconsistent results which depend on the availability of imports + import-outside-toplevel, # common pattern in ansible related code + import-self, + inconsistent-return-statements, + invalid-envvar-default, + invalid-name, + invalid-sequence-index, + keyword-arg-before-vararg, + len-as-condition, + line-too-long, + literal-comparison, + locally-disabled, + method-hidden, + misplaced-comparison-constant, + missing-docstring, + no-else-break, + no-else-continue, + no-else-raise, + no-else-return, + no-init, + no-member, + no-name-in-module, # inconsistent results which depend on the availability of imports + no-self-use, + no-value-for-parameter, + non-iterator-returned, + not-a-mapping, + not-an-iterable, + not-callable, + old-style-class, + pointless-statement, + pointless-string-statement, + possibly-unused-variable, + protected-access, + raise-missing-from, # Python 2.x does not support raise from + redefined-argument-from-local, + redefined-builtin, + redefined-outer-name, + redefined-variable-type, + reimported, + relative-beyond-top-level, # https://github.com/PyCQA/pylint/issues/2967 + signature-differs, + simplifiable-if-expression, + simplifiable-if-statement, + subprocess-popen-preexec-fn, + super-init-not-called, + super-with-arguments, # Python 2.x does not support super without arguments + superfluous-parens, + too-few-public-methods, + too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ + too-many-arguments, + too-many-boolean-expressions, + too-many-branches, + too-many-function-args, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + trailing-comma-tuple, + trailing-comma-tuple, + try-except-raise, + unbalanced-tuple-unpacking, + undefined-loop-variable, + unexpected-keyword-arg, + ungrouped-imports, + unidiomatic-typecheck, + unnecessary-pass, + unsubscriptable-object, + unsupported-assignment-operation, + unsupported-delete-operation, + unsupported-membership-test, + unused-argument, + unused-import, + unused-variable, + useless-object-inheritance, + useless-return, + useless-super-delegation, + wrong-import-order, + wrong-import-position, + +[BASIC] + +bad-names= + _, + bar, + baz, + foo, + tata, + toto, + tutu, + +good-names= + ex, + i, + j, + k, + Run, + +[TYPECHECK] + +ignored-modules= + _MovedItems, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg new file mode 100644 index 0000000000..a1275aa981 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/default.cfg @@ -0,0 +1,148 @@ +[MESSAGES CONTROL] + +disable= + import-outside-toplevel, # common pattern in ansible related code + abstract-method, + access-member-before-definition, + arguments-differ, + assignment-from-no-return, + assignment-from-none, + attribute-defined-outside-init, + bad-continuation, + bad-indentation, + bad-mcs-classmethod-argument, + broad-except, + c-extension-no-member, + cell-var-from-loop, + chained-comparison, + comparison-with-callable, + consider-iterating-dictionary, + consider-merging-isinstance, + consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-dict-items, + consider-using-enumerate, + consider-using-get, + consider-using-in, + consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-ternary, + consider-using-with, + cyclic-import, # consistent results require running with --jobs 1 and testing all files + deprecated-lambda, + deprecated-method, + deprecated-module, + duplicate-code, # consistent results require running with --jobs 1 and testing all files + eval-used, + exec-used, + expression-not-assigned, + fixme, + function-redefined, + global-statement, + global-variable-undefined, + import-error, # inconsistent results which depend on the availability of imports + import-self, + inconsistent-return-statements, + invalid-envvar-default, + invalid-name, + invalid-sequence-index, + keyword-arg-before-vararg, + len-as-condition, + line-too-long, + literal-comparison, + locally-disabled, + method-hidden, + misplaced-comparison-constant, + missing-docstring, + no-else-break, + no-else-continue, + no-else-raise, + no-else-return, + no-init, + no-member, + no-name-in-module, # inconsistent results which depend on the availability of imports + no-self-use, + no-value-for-parameter, + non-iterator-returned, + not-a-mapping, + not-an-iterable, + not-callable, + old-style-class, + pointless-statement, + pointless-string-statement, + possibly-unused-variable, + protected-access, + raise-missing-from, # Python 2.x does not support raise from + redefined-argument-from-local, + redefined-builtin, + redefined-outer-name, + redefined-variable-type, + reimported, + relative-import, + signature-differs, + simplifiable-if-expression, + simplifiable-if-statement, + subprocess-popen-preexec-fn, + super-init-not-called, + super-with-arguments, # Python 2.x does not support super without arguments + superfluous-parens, + too-few-public-methods, + too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ + too-many-arguments, + too-many-boolean-expressions, + too-many-branches, + too-many-function-args, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-public-methods, + too-many-return-statements, + too-many-statements, + trailing-comma-tuple, + trailing-comma-tuple, + try-except-raise, + unbalanced-tuple-unpacking, + undefined-loop-variable, + unexpected-keyword-arg, + ungrouped-imports, + unidiomatic-typecheck, + unnecessary-pass, + unsubscriptable-object, + unsupported-assignment-operation, + unsupported-delete-operation, + unsupported-membership-test, + unused-argument, + unused-import, + unused-variable, + useless-object-inheritance, + useless-return, + useless-super-delegation, + wrong-import-order, + wrong-import-position, + +[BASIC] + +bad-names= + _, + bar, + baz, + foo, + tata, + toto, + tutu, + +good-names= + ex, + i, + j, + k, + Run, + +[TYPECHECK] + +ignored-modules= + _MovedItems, + +[IMPORTS] + +preferred-modules = + distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg b/test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg new file mode 100644 index 0000000000..bcf9549fd7 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/config/sanity.cfg @@ -0,0 +1,55 @@ +[MESSAGES CONTROL] + +disable= + consider-using-dict-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + consider-using-set-comprehension, # requires Python 2.7+, but we still require Python 2.6 support + cyclic-import, # consistent results require running with --jobs 1 and testing all files + duplicate-code, # consistent results require running with --jobs 1 and testing all files + import-error, # inconsistent results which depend on the availability of imports + import-outside-toplevel, # common pattern in ansible related code + missing-docstring, + no-name-in-module, # inconsistent results which depend on the availability of imports + raise-missing-from, # Python 2.x does not support raise from + super-with-arguments, # Python 2.x does not support super without arguments + too-few-public-methods, + too-many-ancestors, # inconsistent results between python 3.6 and 3.7+ + too-many-arguments, + too-many-branches, + too-many-instance-attributes, + too-many-lines, + too-many-locals, + too-many-nested-blocks, + too-many-return-statements, + too-many-statements, + unused-import, # pylint does not understand PEP 484 type hints + +[BASIC] + +bad-names= + _, + bar, + baz, + foo, + tata, + toto, + tutu, + +good-names= + __metaclass__, + C, + e, + ex, + f, + i, + j, + k, + Run, + +module-rgx=[a-z_][a-z0-9_-]{2,40}$ +method-rgx=[a-z_][a-z0-9_]{2,40}$ +function-rgx=[a-z_][a-z0-9_]{2,40}$ + +[IMPORTS] + +preferred-modules = + distutils.version:ansible.module_utils.compat.version, diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py new file mode 100644 index 0000000000..e39e5214bf --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/deprecated.py @@ -0,0 +1,258 @@ +# (c) 2018, Matt Martz +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import re + +import astroid + +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers.utils import check_messages + +from ansible.module_utils.compat.version import LooseVersion +from ansible.module_utils.six import string_types +from ansible.release import __version__ as ansible_version_raw +from ansible.utils.version import SemanticVersion + +MSGS = { + 'E9501': ("Deprecated version (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "ansible-deprecated-version", + "Used when a call to Display.deprecated specifies a version " + "less than or equal to the current version of Ansible", + {'minversion': (2, 6)}), + 'E9502': ("Display.deprecated call without a version or date", + "ansible-deprecated-no-version", + "Used when a call to Display.deprecated does not specify a " + "version or date", + {'minversion': (2, 6)}), + 'E9503': ("Invalid deprecated version (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-invalid-deprecated-version", + "Used when a call to Display.deprecated specifies an invalid " + "Ansible version number", + {'minversion': (2, 6)}), + 'E9504': ("Deprecated version (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "collection-deprecated-version", + "Used when a call to Display.deprecated specifies a collection " + "version less than or equal to the current version of this " + "collection", + {'minversion': (2, 6)}), + 'E9505': ("Invalid deprecated version (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "collection-invalid-deprecated-version", + "Used when a call to Display.deprecated specifies an invalid " + "collection version number", + {'minversion': (2, 6)}), + 'E9506': ("No collection name found in call to Display.deprecated or " + "AnsibleModule.deprecate", + "ansible-deprecated-no-collection-name", + "The current collection name in format `namespace.name` must " + "be provided as collection_name when calling Display.deprecated " + "or AnsibleModule.deprecate (`ansible.builtin` for ansible-core)", + {'minversion': (2, 6)}), + 'E9507': ("Wrong collection name (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "wrong-collection-deprecated", + "The name of the current collection must be passed to the " + "Display.deprecated resp. AnsibleModule.deprecate calls " + "(`ansible.builtin` for ansible-core)", + {'minversion': (2, 6)}), + 'E9508': ("Expired date (%r) found in call to Display.deprecated " + "or AnsibleModule.deprecate", + "ansible-deprecated-date", + "Used when a call to Display.deprecated specifies a date " + "before today", + {'minversion': (2, 6)}), + 'E9509': ("Invalid deprecated date (%r) found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-invalid-deprecated-date", + "Used when a call to Display.deprecated specifies an invalid " + "date. It must be a string in format `YYYY-MM-DD` (ISO 8601)", + {'minversion': (2, 6)}), + 'E9510': ("Both version and date found in call to " + "Display.deprecated or AnsibleModule.deprecate", + "ansible-deprecated-both-version-and-date", + "Only one of version and date must be specified", + {'minversion': (2, 6)}), + 'E9511': ("Removal version (%r) must be a major release, not a minor or " + "patch release (see the specification at https://semver.org/)", + "removal-version-must-be-major", + "Used when a call to Display.deprecated or " + "AnsibleModule.deprecate for a collection specifies a version " + "which is not of the form x.0.0", + {'minversion': (2, 6)}), +} + + +ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3])) + + +def _get_expr_name(node): + """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr`` + + Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated`` + """ + try: + return node.func.expr.attrname + except AttributeError: + # If this fails too, we'll let it raise, the caller should catch it + return node.func.expr.name + + +def parse_isodate(value): + msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' + if not isinstance(value, string_types): + raise ValueError(msg) + # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, + # we have to do things manually. + if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', value): + raise ValueError(msg) + try: + return datetime.datetime.strptime(value, '%Y-%m-%d').date() + except ValueError: + raise ValueError(msg) + + +class AnsibleDeprecatedChecker(BaseChecker): + """Checks for Display.deprecated calls to ensure that the ``version`` + has not passed or met the time for removal + """ + + __implements__ = (IAstroidChecker,) + name = 'deprecated' + msgs = MSGS + + options = ( + ('collection-name', { + 'default': None, + 'type': 'string', + 'metavar': '', + 'help': 'The collection\'s name used to check collection names in deprecations.', + }), + ('collection-version', { + 'default': None, + 'type': 'string', + 'metavar': '', + 'help': 'The collection\'s version number used to check deprecations.', + }), + ) + + def __init__(self, *args, **kwargs): + self.collection_version = None + self.collection_name = None + super(AnsibleDeprecatedChecker, self).__init__(*args, **kwargs) + + def set_option(self, optname, value, action=None, optdict=None): + super(AnsibleDeprecatedChecker, self).set_option(optname, value, action, optdict) + if optname == 'collection-version' and value is not None: + self.collection_version = SemanticVersion(self.config.collection_version) + if optname == 'collection-name' and value is not None: + self.collection_name = self.config.collection_name + + def _check_date(self, node, date): + if not isinstance(date, str): + self.add_message('invalid-date', node=node, args=(date,)) + return + + try: + date_parsed = parse_isodate(date) + except ValueError: + self.add_message('ansible-invalid-deprecated-date', node=node, args=(date,)) + return + + if date_parsed < datetime.date.today(): + self.add_message('ansible-deprecated-date', node=node, args=(date,)) + + def _check_version(self, node, version, collection_name): + if not isinstance(version, (str, float)): + self.add_message('invalid-version', node=node, args=(version,)) + return + + version_no = str(version) + + if collection_name == 'ansible.builtin': + # Ansible-base + try: + if not version_no: + raise ValueError('Version string should not be empty') + loose_version = LooseVersion(str(version_no)) + if ANSIBLE_VERSION >= loose_version: + self.add_message('ansible-deprecated-version', node=node, args=(version,)) + except ValueError: + self.add_message('ansible-invalid-deprecated-version', node=node, args=(version,)) + elif collection_name: + # Collections + try: + if not version_no: + raise ValueError('Version string should not be empty') + semantic_version = SemanticVersion(version_no) + if collection_name == self.collection_name and self.collection_version is not None: + if self.collection_version >= semantic_version: + self.add_message('collection-deprecated-version', node=node, args=(version,)) + if semantic_version.major != 0 and (semantic_version.minor != 0 or semantic_version.patch != 0): + self.add_message('removal-version-must-be-major', node=node, args=(version,)) + except ValueError: + self.add_message('collection-invalid-deprecated-version', node=node, args=(version,)) + + @check_messages(*(MSGS.keys())) + def visit_call(self, node): + version = None + date = None + collection_name = None + try: + if (node.func.attrname == 'deprecated' and 'display' in _get_expr_name(node) or + node.func.attrname == 'deprecate' and _get_expr_name(node)): + if node.keywords: + for keyword in node.keywords: + if len(node.keywords) == 1 and keyword.arg is None: + # This is likely a **kwargs splat + return + if keyword.arg == 'version': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + version = keyword.value.value + if keyword.arg == 'date': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + date = keyword.value.value + if keyword.arg == 'collection_name': + if isinstance(keyword.value.value, astroid.Name): + # This is likely a variable + return + collection_name = keyword.value.value + if not version and not date: + try: + version = node.args[1].value + except IndexError: + self.add_message('ansible-deprecated-no-version', node=node) + return + if version and date: + self.add_message('ansible-deprecated-both-version-and-date', node=node) + + if collection_name: + this_collection = collection_name == (self.collection_name or 'ansible.builtin') + if not this_collection: + self.add_message('wrong-collection-deprecated', node=node, args=(collection_name,)) + elif self.collection_name is not None: + self.add_message('ansible-deprecated-no-collection-name', node=node) + + if date: + self._check_date(node, date) + elif version: + self._check_version(node, version, collection_name) + except AttributeError: + # Not the type of node we are interested in + pass + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleDeprecatedChecker(linter)) diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py new file mode 100644 index 0000000000..1c22a08b97 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/string_format.py @@ -0,0 +1,84 @@ +# (c) 2018, Matt Martz +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +# -*- coding: utf-8 -*- +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import astroid +from pylint.interfaces import IAstroidChecker +from pylint.checkers import BaseChecker +from pylint.checkers import utils +from pylint.checkers.utils import check_messages +try: + from pylint.checkers.utils import parse_format_method_string +except ImportError: + # noinspection PyUnresolvedReferences + from pylint.checkers.strings import parse_format_method_string + +MSGS = { + 'E9305': ("Format string contains automatic field numbering " + "specification", + "ansible-format-automatic-specification", + "Used when a PEP 3101 format string contains automatic " + "field numbering (e.g. '{}').", + {'minversion': (2, 6)}), + 'E9390': ("bytes object has no .format attribute", + "ansible-no-format-on-bytestring", + "Used when a bytestring was used as a PEP 3101 format string " + "as Python3 bytestrings do not have a .format attribute", + {'minversion': (3, 0)}), +} + + +class AnsibleStringFormatChecker(BaseChecker): + """Checks string formatting operations to ensure that the format string + is valid and the arguments match the format string. + """ + + __implements__ = (IAstroidChecker,) + name = 'string' + msgs = MSGS + + @check_messages(*(MSGS.keys())) + def visit_call(self, node): + func = utils.safe_infer(node.func) + if (isinstance(func, astroid.BoundMethod) + and isinstance(func.bound, astroid.Instance) + and func.bound.name in ('str', 'unicode', 'bytes')): + if func.name == 'format': + self._check_new_format(node, func) + + def _check_new_format(self, node, func): + """ Check the new string formatting """ + if (isinstance(node.func, astroid.Attribute) + and not isinstance(node.func.expr, astroid.Const)): + return + try: + strnode = next(func.bound.infer()) + except astroid.InferenceError: + return + if not isinstance(strnode, astroid.Const): + return + + if isinstance(strnode.value, bytes): + self.add_message('ansible-no-format-on-bytestring', node=node) + return + if not isinstance(strnode.value, str): + return + + if node.starargs or node.kwargs: + return + try: + num_args = parse_format_method_string(strnode.value)[1] + except utils.IncompleteFormatString: + return + + if num_args: + self.add_message('ansible-format-automatic-specification', + node=node) + return + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleStringFormatChecker(linter)) diff --git a/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py new file mode 100644 index 0000000000..3d9877e6a3 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/pylint/plugins/unwanted.py @@ -0,0 +1,240 @@ +"""A plugin for pylint to identify imports and functions which should not be used.""" +from __future__ import (absolute_import, division, print_function) + +__metaclass__ = type + +import os + +import astroid + +from pylint.checkers import BaseChecker +from pylint.interfaces import IAstroidChecker + +ANSIBLE_TEST_MODULES_PATH = os.environ['ANSIBLE_TEST_MODULES_PATH'] +ANSIBLE_TEST_MODULE_UTILS_PATH = os.environ['ANSIBLE_TEST_MODULE_UTILS_PATH'] + + +class UnwantedEntry: + """Defines an unwanted import.""" + def __init__(self, alternative, modules_only=False, names=None, ignore_paths=None): + """ + :type alternative: str + :type modules_only: bool + :type names: tuple[str] | None + :type ignore_paths: tuple[str] | None + """ + self.alternative = alternative + self.modules_only = modules_only + self.names = set(names) if names else set() + self.ignore_paths = ignore_paths + + def applies_to(self, path, name=None): + """ + :type path: str + :type name: str | None + :rtype: bool + """ + if self.names: + if not name: + return False + + if name not in self.names: + return False + + if self.ignore_paths and any(path.endswith(ignore_path) for ignore_path in self.ignore_paths): + return False + + if self.modules_only: + return is_module_path(path) + + return True + + +def is_module_path(path): + """ + :type path: str + :rtype: bool + """ + return path.startswith(ANSIBLE_TEST_MODULES_PATH) or path.startswith(ANSIBLE_TEST_MODULE_UTILS_PATH) + + +class AnsibleUnwantedChecker(BaseChecker): + """Checker for unwanted imports and functions.""" + __implements__ = (IAstroidChecker,) + + name = 'unwanted' + + BAD_IMPORT = 'ansible-bad-import' + BAD_IMPORT_FROM = 'ansible-bad-import-from' + BAD_FUNCTION = 'ansible-bad-function' + BAD_MODULE_IMPORT = 'ansible-bad-module-import' + + msgs = dict( + E5101=('Import %s instead of %s', + BAD_IMPORT, + 'Identifies imports which should not be used.'), + E5102=('Import %s from %s instead of %s', + BAD_IMPORT_FROM, + 'Identifies imports which should not be used.'), + E5103=('Call %s instead of %s', + BAD_FUNCTION, + 'Identifies functions which should not be used.'), + E5104=('Import external package or ansible.module_utils not %s', + BAD_MODULE_IMPORT, + 'Identifies imports which should not be used.'), + ) + + unwanted_imports = dict( + # Additional imports that we may want to start checking: + # boto=UnwantedEntry('boto3', modules_only=True), + # requests=UnwantedEntry('ansible.module_utils.urls', modules_only=True), + # urllib=UnwantedEntry('ansible.module_utils.urls', modules_only=True), + + # see https://docs.python.org/2/library/urllib2.html + urllib2=UnwantedEntry('ansible.module_utils.urls', + ignore_paths=( + '/lib/ansible/module_utils/urls.py', + )), + + # see https://docs.python.org/3/library/collections.abc.html + collections=UnwantedEntry('ansible.module_utils.common._collections_compat', + ignore_paths=( + '/lib/ansible/module_utils/common/_collections_compat.py', + ), + names=( + 'MappingView', + 'ItemsView', + 'KeysView', + 'ValuesView', + 'Mapping', 'MutableMapping', + 'Sequence', 'MutableSequence', + 'Set', 'MutableSet', + 'Container', + 'Hashable', + 'Sized', + 'Callable', + 'Iterable', + 'Iterator', + )), + ) + + unwanted_functions = { + # see https://docs.python.org/3/library/tempfile.html#tempfile.mktemp + 'tempfile.mktemp': UnwantedEntry('tempfile.mkstemp'), + + 'sys.exit': UnwantedEntry('exit_json or fail_json', + ignore_paths=( + '/lib/ansible/module_utils/basic.py', + '/lib/ansible/modules/async_wrapper.py', + ), + modules_only=True), + + 'builtins.print': UnwantedEntry('module.log or module.debug', + ignore_paths=( + '/lib/ansible/module_utils/basic.py', + ), + modules_only=True), + } + + def visit_import(self, node): + """ + :type node: astroid.node_classes.Import + """ + for name in node.names: + self._check_import(node, name[0]) + + def visit_importfrom(self, node): + """ + :type node: astroid.node_classes.ImportFrom + """ + self._check_importfrom(node, node.modname, node.names) + + def visit_attribute(self, node): + """ + :type node: astroid.node_classes.Attribute + """ + last_child = node.last_child() + + # this is faster than using type inference and will catch the most common cases + if not isinstance(last_child, astroid.node_classes.Name): + return + + module = last_child.name + + entry = self.unwanted_imports.get(module) + + if entry and entry.names: + if entry.applies_to(self.linter.current_file, node.attrname): + self.add_message(self.BAD_IMPORT_FROM, args=(node.attrname, entry.alternative, module), node=node) + + def visit_call(self, node): + """ + :type node: astroid.node_classes.Call + """ + try: + for i in node.func.inferred(): + func = None + + if isinstance(i, astroid.scoped_nodes.FunctionDef) and isinstance(i.parent, astroid.scoped_nodes.Module): + func = '%s.%s' % (i.parent.name, i.name) + + if not func: + continue + + entry = self.unwanted_functions.get(func) + + if entry and entry.applies_to(self.linter.current_file): + self.add_message(self.BAD_FUNCTION, args=(entry.alternative, func), node=node) + except astroid.exceptions.InferenceError: + pass + + def _check_import(self, node, modname): + """ + :type node: astroid.node_classes.Import + :type modname: str + """ + self._check_module_import(node, modname) + + entry = self.unwanted_imports.get(modname) + + if not entry: + return + + if entry.applies_to(self.linter.current_file): + self.add_message(self.BAD_IMPORT, args=(entry.alternative, modname), node=node) + + def _check_importfrom(self, node, modname, names): + """ + :type node: astroid.node_classes.ImportFrom + :type modname: str + :type names: list[str[ + """ + self._check_module_import(node, modname) + + entry = self.unwanted_imports.get(modname) + + if not entry: + return + + for name in names: + if entry.applies_to(self.linter.current_file, name[0]): + self.add_message(self.BAD_IMPORT_FROM, args=(name[0], entry.alternative, modname), node=node) + + def _check_module_import(self, node, modname): + """ + :type node: astroid.node_classes.Import | astroid.node_classes.ImportFrom + :type modname: str + """ + if not is_module_path(self.linter.current_file): + return + + if modname == 'ansible.module_utils' or modname.startswith('ansible.module_utils.'): + return + + if modname == 'ansible' or modname.startswith('ansible.'): + self.add_message(self.BAD_MODULE_IMPORT, args=(modname,), node=node) + + +def register(linter): + """required method to auto register this checker """ + linter.register_checker(AnsibleUnwantedChecker(linter)) diff --git a/test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt b/test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt new file mode 100644 index 0000000000..29588ddd86 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/shellcheck/exclude.txt @@ -0,0 +1,3 @@ +SC1090 +SC1091 +SC2164 diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py new file mode 100755 index 0000000000..c1e2bdaaeb --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/main.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from validate_modules.main import main + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules new file mode 120000 index 0000000000..11a5d8e18a --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate-modules @@ -0,0 +1 @@ +main.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py new file mode 100644 index 0000000000..d8ff2dc0d4 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/__init__.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type +__version__ = '0.0.1b' diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py new file mode 100644 index 0000000000..9f4f5c474d --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py @@ -0,0 +1,2425 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import abc +import argparse +import ast +import datetime +import json +import errno +import os +import re +import subprocess +import sys +import tempfile +import traceback + +from collections import OrderedDict +from contextlib import contextmanager +from ansible.module_utils.compat.version import StrictVersion, LooseVersion +from fnmatch import fnmatch + +import yaml + +from ansible import __version__ as ansible_version +from ansible.executor.module_common import REPLACER_WINDOWS +from ansible.module_utils.common._collections_compat import Mapping +from ansible.module_utils.common.parameters import DEFAULT_TYPE_VALIDATORS +from ansible.plugins.loader import fragment_loader +from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder +from ansible.utils.plugin_docs import REJECTLIST, add_collection_to_versions_and_dates, add_fragments, get_docstring +from ansible.utils.version import SemanticVersion + +from .module_args import AnsibleModuleImportError, AnsibleModuleNotInitialized, get_argument_spec + +from .schema import ansible_module_kwargs_schema, doc_schema, return_schema + +from .utils import CaptureStd, NoArgsAnsibleModule, compare_unordered_lists, is_empty, parse_yaml, parse_isodate +from voluptuous.humanize import humanize_error + +from ansible.module_utils.six import PY3, with_metaclass, string_types + +if PY3: + # Because there is no ast.TryExcept in Python 3 ast module + TRY_EXCEPT = ast.Try + # REPLACER_WINDOWS from ansible.executor.module_common is byte + # string but we need unicode for Python 3 + REPLACER_WINDOWS = REPLACER_WINDOWS.decode('utf-8') +else: + TRY_EXCEPT = ast.TryExcept + +REJECTLIST_DIRS = frozenset(('.git', 'test', '.github', '.idea')) +INDENT_REGEX = re.compile(r'([\t]*)') +TYPE_REGEX = re.compile(r'.*(if|or)(\s+[^"\']*|\s+)(? 1: + self.reporter.error( + path=self.object_path, + code='use-short-gplv3-license', + msg='Found old style GPLv3 license header: ' + 'https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_documenting.html#copyright' + ) + + def _check_for_subprocess(self): + for child in self.ast.body: + if isinstance(child, ast.Import): + if child.names[0].name == 'subprocess': + for line_no, line in enumerate(self.text.splitlines()): + sp_match = SUBPROCESS_REGEX.search(line) + if sp_match: + self.reporter.error( + path=self.object_path, + code='use-run-command-not-popen', + msg=('subprocess.Popen call found. Should be module.run_command'), + line=(line_no + 1), + column=(sp_match.span()[0] + 1) + ) + + def _check_for_os_call(self): + if 'os.call' in self.text: + for line_no, line in enumerate(self.text.splitlines()): + os_call_match = OS_CALL_REGEX.search(line) + if os_call_match: + self.reporter.error( + path=self.object_path, + code='use-run-command-not-os-call', + msg=('os.call() call found. Should be module.run_command'), + line=(line_no + 1), + column=(os_call_match.span()[0] + 1) + ) + + def _find_rejectlist_imports(self): + for child in self.ast.body: + names = [] + if isinstance(child, ast.Import): + names.extend(child.names) + elif isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, ast.Import): + names.extend(grandchild.names) + for name in names: + # TODO: Add line/col + for rejectlist_import, options in REJECTLIST_IMPORTS.items(): + if re.search(rejectlist_import, name.name): + new_only = options['new_only'] + if self._is_new_module() and new_only: + self.reporter.error( + path=self.object_path, + **options['error'] + ) + elif not new_only: + self.reporter.error( + path=self.object_path, + **options['error'] + ) + + def _find_module_utils(self): + linenos = [] + found_basic = False + for child in self.ast.body: + if isinstance(child, (ast.Import, ast.ImportFrom)): + names = [] + try: + names.append(child.module) + if child.module.endswith('.basic'): + found_basic = True + except AttributeError: + pass + names.extend([n.name for n in child.names]) + + if [n for n in names if n.startswith('ansible.module_utils')]: + linenos.append(child.lineno) + + for name in child.names: + if ('module_utils' in getattr(child, 'module', '') and + isinstance(name, ast.alias) and + name.name == '*'): + msg = ( + 'module-utils-specific-import', + ('module_utils imports should import specific ' + 'components, not "*"') + ) + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=child.lineno + ) + else: + self.reporter.warning( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=child.lineno + ) + + if (isinstance(name, ast.alias) and + name.name == 'basic'): + found_basic = True + + if not found_basic: + self.reporter.warning( + path=self.object_path, + code='missing-module-utils-basic-import', + msg='Did not find "ansible.module_utils.basic" import' + ) + + return linenos + + def _get_first_callable(self): + linenos = [] + for child in self.ast.body: + if isinstance(child, (ast.FunctionDef, ast.ClassDef)): + linenos.append(child.lineno) + + return min(linenos) + + def _find_has_import(self): + for child in self.ast.body: + found_try_except_import = False + found_has = False + if isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, ast.Import): + found_try_except_import = True + if isinstance(grandchild, ast.Assign): + for target in grandchild.targets: + if not isinstance(target, ast.Name): + continue + if target.id.lower().startswith('has_'): + found_has = True + if found_try_except_import and not found_has: + # TODO: Add line/col + self.reporter.warning( + path=self.object_path, + code='try-except-missing-has', + msg='Found Try/Except block without HAS_ assignment' + ) + + def _ensure_imports_below_docs(self, doc_info, first_callable): + try: + min_doc_line = min( + [doc_info[key]['lineno'] for key in doc_info if doc_info[key]['lineno']] + ) + except ValueError: + # We can't perform this validation, as there are no DOCs provided at all + return + + max_doc_line = max( + [doc_info[key]['end_lineno'] for key in doc_info if doc_info[key]['end_lineno']] + ) + + import_lines = [] + + for child in self.ast.body: + if isinstance(child, (ast.Import, ast.ImportFrom)): + if isinstance(child, ast.ImportFrom) and child.module == '__future__': + # allowed from __future__ imports + for future_import in child.names: + if future_import.name not in self.ACCEPTLIST_FUTURE_IMPORTS: + self.reporter.error( + path=self.object_path, + code='illegal-future-imports', + msg=('Only the following from __future__ imports are allowed: %s' + % ', '.join(self.ACCEPTLIST_FUTURE_IMPORTS)), + line=child.lineno + ) + break + else: # for-else. If we didn't find a problem nad break out of the loop, then this is a legal import + continue + import_lines.append(child.lineno) + if child.lineno < min_doc_line: + self.reporter.error( + path=self.object_path, + code='import-before-documentation', + msg=('Import found before documentation variables. ' + 'All imports must appear below ' + 'DOCUMENTATION/EXAMPLES/RETURN.'), + line=child.lineno + ) + break + elif isinstance(child, TRY_EXCEPT): + bodies = child.body + for handler in child.handlers: + bodies.extend(handler.body) + for grandchild in bodies: + if isinstance(grandchild, (ast.Import, ast.ImportFrom)): + import_lines.append(grandchild.lineno) + if grandchild.lineno < min_doc_line: + self.reporter.error( + path=self.object_path, + code='import-before-documentation', + msg=('Import found before documentation ' + 'variables. All imports must appear below ' + 'DOCUMENTATION/EXAMPLES/RETURN.'), + line=child.lineno + ) + break + + for import_line in import_lines: + if not (max_doc_line < import_line < first_callable): + msg = ( + 'import-placement', + ('Imports should be directly below DOCUMENTATION/EXAMPLES/' + 'RETURN.') + ) + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=import_line + ) + else: + self.reporter.warning( + path=self.object_path, + code=msg[0], + msg=msg[1], + line=import_line + ) + + def _validate_ps_replacers(self): + # loop all (for/else + error) + # get module list for each + # check "shape" of each module name + + module_requires = r'(?im)^#\s*requires\s+\-module(?:s?)\s*(Ansible\.ModuleUtils\..+)' + csharp_requires = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*(Ansible\..+)' + found_requires = False + + for req_stmt in re.finditer(module_requires, self.text): + found_requires = True + # this will bomb on dictionary format - "don't do that" + module_list = [x.strip() for x in req_stmt.group(1).split(',')] + if len(module_list) > 1: + self.reporter.error( + path=self.object_path, + code='multiple-utils-per-requires', + msg='Ansible.ModuleUtils requirements do not support multiple modules per statement: "%s"' % req_stmt.group(0) + ) + continue + + module_name = module_list[0] + + if module_name.lower().endswith('.psm1'): + self.reporter.error( + path=self.object_path, + code='invalid-requires-extension', + msg='Module #Requires should not end in .psm1: "%s"' % module_name + ) + + for req_stmt in re.finditer(csharp_requires, self.text): + found_requires = True + # this will bomb on dictionary format - "don't do that" + module_list = [x.strip() for x in req_stmt.group(1).split(',')] + if len(module_list) > 1: + self.reporter.error( + path=self.object_path, + code='multiple-csharp-utils-per-requires', + msg='Ansible C# util requirements do not support multiple utils per statement: "%s"' % req_stmt.group(0) + ) + continue + + module_name = module_list[0] + + if module_name.lower().endswith('.cs'): + self.reporter.error( + path=self.object_path, + code='illegal-extension-cs', + msg='Module #AnsibleRequires -CSharpUtil should not end in .cs: "%s"' % module_name + ) + + # also accept the legacy #POWERSHELL_COMMON replacer signal + if not found_requires and REPLACER_WINDOWS not in self.text: + self.reporter.error( + path=self.object_path, + code='missing-module-utils-import-csharp-requirements', + msg='No Ansible.ModuleUtils or C# Ansible util requirements/imports found' + ) + + def _find_ps_docs_py_file(self): + if self.object_name in self.PS_DOC_REJECTLIST: + return + py_path = self.path.replace('.ps1', '.py') + if not os.path.isfile(py_path): + self.reporter.error( + path=self.object_path, + code='missing-python-doc', + msg='Missing python documentation file' + ) + return py_path + + def _get_docs(self): + docs = { + 'DOCUMENTATION': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + 'EXAMPLES': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + 'RETURN': { + 'value': None, + 'lineno': 0, + 'end_lineno': 0, + }, + } + for child in self.ast.body: + if isinstance(child, ast.Assign): + for grandchild in child.targets: + if not isinstance(grandchild, ast.Name): + continue + + if grandchild.id == 'DOCUMENTATION': + docs['DOCUMENTATION']['value'] = child.value.s + docs['DOCUMENTATION']['lineno'] = child.lineno + docs['DOCUMENTATION']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + elif grandchild.id == 'EXAMPLES': + docs['EXAMPLES']['value'] = child.value.s + docs['EXAMPLES']['lineno'] = child.lineno + docs['EXAMPLES']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + elif grandchild.id == 'RETURN': + docs['RETURN']['value'] = child.value.s + docs['RETURN']['lineno'] = child.lineno + docs['RETURN']['end_lineno'] = ( + child.lineno + len(child.value.s.splitlines()) + ) + + return docs + + def _validate_docs_schema(self, doc, schema, name, error_code): + # TODO: Add line/col + errors = [] + try: + schema(doc) + except Exception as e: + for error in e.errors: + error.data = doc + errors.extend(e.errors) + + for error in errors: + path = [str(p) for p in error.path] + + local_error_code = getattr(error, 'ansible_error_code', error_code) + + if isinstance(error.data, dict): + error_message = humanize_error(error.data, error) + else: + error_message = error + + if path: + combined_path = '%s.%s' % (name, '.'.join(path)) + else: + combined_path = name + + self.reporter.error( + path=self.object_path, + code=local_error_code, + msg='%s: %s' % (combined_path, error_message) + ) + + def _validate_docs(self): + doc_info = self._get_docs() + doc = None + documentation_exists = False + examples_exist = False + returns_exist = False + # We have three ways of marking deprecated/removed files. Have to check each one + # individually and then make sure they all agree + filename_deprecated_or_removed = False + deprecated = False + removed = False + doc_deprecated = None # doc legally might not exist + routing_says_deprecated = False + + if self.object_name.startswith('_') and not os.path.islink(self.object_path): + filename_deprecated_or_removed = True + + # We are testing a collection + if self.routing: + routing_deprecation = self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}) + if routing_deprecation: + # meta/runtime.yml says this is deprecated + routing_says_deprecated = True + deprecated = True + + if not removed: + if not bool(doc_info['DOCUMENTATION']['value']): + self.reporter.error( + path=self.object_path, + code='missing-documentation', + msg='No DOCUMENTATION provided' + ) + else: + documentation_exists = True + doc, errors, traces = parse_yaml( + doc_info['DOCUMENTATION']['value'], + doc_info['DOCUMENTATION']['lineno'], + self.name, 'DOCUMENTATION' + ) + if doc: + add_collection_to_versions_and_dates(doc, self.collection_name, is_module=True) + for error in errors: + self.reporter.error( + path=self.object_path, + code='documentation-syntax-error', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + if not errors and not traces: + missing_fragment = False + with CaptureStd(): + try: + get_docstring(self.path, fragment_loader, verbose=True, + collection_name=self.collection_name, is_module=True) + except AssertionError: + fragment = doc['extends_documentation_fragment'] + self.reporter.error( + path=self.object_path, + code='missing-doc-fragment', + msg='DOCUMENTATION fragment missing: %s' % fragment + ) + missing_fragment = True + except Exception as e: + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + self.reporter.error( + path=self.object_path, + code='documentation-error', + msg='Unknown DOCUMENTATION error, see TRACE: %s' % e + ) + + if not missing_fragment: + add_fragments(doc, self.object_path, fragment_loader=fragment_loader, is_module=True) + + if 'options' in doc and doc['options'] is None: + self.reporter.error( + path=self.object_path, + code='invalid-documentation-options', + msg='DOCUMENTATION.options must be a dictionary/hash when used', + ) + + if 'deprecated' in doc and doc.get('deprecated'): + doc_deprecated = True + doc_deprecation = doc['deprecated'] + documentation_collection = doc_deprecation.get('removed_from_collection') + if documentation_collection != self.collection_name: + self.reporter.error( + path=self.object_path, + code='deprecation-wrong-collection', + msg='"DOCUMENTATION.deprecation.removed_from_collection must be the current collection name: %r vs. %r' % ( + documentation_collection, self.collection_name) + ) + else: + doc_deprecated = False + + if os.path.islink(self.object_path): + # This module has an alias, which we can tell as it's a symlink + # Rather than checking for `module: $filename` we need to check against the true filename + self._validate_docs_schema( + doc, + doc_schema( + os.readlink(self.object_path).split('.')[0], + for_collection=bool(self.collection), + deprecated_module=deprecated, + ), + 'DOCUMENTATION', + 'invalid-documentation', + ) + else: + # This is the normal case + self._validate_docs_schema( + doc, + doc_schema( + self.object_name.split('.')[0], + for_collection=bool(self.collection), + deprecated_module=deprecated, + ), + 'DOCUMENTATION', + 'invalid-documentation', + ) + + if not self.collection: + existing_doc = self._check_for_new_args(doc) + self._check_version_added(doc, existing_doc) + + if not bool(doc_info['EXAMPLES']['value']): + self.reporter.error( + path=self.object_path, + code='missing-examples', + msg='No EXAMPLES provided' + ) + else: + _doc, errors, traces = parse_yaml(doc_info['EXAMPLES']['value'], + doc_info['EXAMPLES']['lineno'], + self.name, 'EXAMPLES', load_all=True, + ansible_loader=True) + for error in errors: + self.reporter.error( + path=self.object_path, + code='invalid-examples', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + + if not bool(doc_info['RETURN']['value']): + if self._is_new_module(): + self.reporter.error( + path=self.object_path, + code='missing-return', + msg='No RETURN provided' + ) + else: + self.reporter.warning( + path=self.object_path, + code='missing-return-legacy', + msg='No RETURN provided' + ) + else: + data, errors, traces = parse_yaml(doc_info['RETURN']['value'], + doc_info['RETURN']['lineno'], + self.name, 'RETURN') + if data: + add_collection_to_versions_and_dates(data, self.collection_name, is_module=True, return_docs=True) + self._validate_docs_schema(data, return_schema(for_collection=bool(self.collection)), + 'RETURN', 'return-syntax-error') + + for error in errors: + self.reporter.error( + path=self.object_path, + code='return-syntax-error', + **error + ) + for trace in traces: + self.reporter.trace( + path=self.object_path, + tracebk=trace + ) + + # Check for mismatched deprecation + if not self.collection: + mismatched_deprecation = True + if not (filename_deprecated_or_removed or removed or deprecated or doc_deprecated): + mismatched_deprecation = False + else: + if (filename_deprecated_or_removed and doc_deprecated): + mismatched_deprecation = False + if (filename_deprecated_or_removed and removed and not (documentation_exists or examples_exist or returns_exist)): + mismatched_deprecation = False + + if mismatched_deprecation: + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='Module deprecation/removed must agree in documentation, by prepending filename with' + ' "_", and setting DOCUMENTATION.deprecated for deprecation or by removing all' + ' documentation for removed' + ) + else: + # We are testing a collection + if self.object_name.startswith('_'): + self.reporter.error( + path=self.object_path, + code='collections-no-underscore-on-deprecation', + msg='Deprecated content in collections MUST NOT start with "_", update meta/runtime.yml instead', + ) + + if not (doc_deprecated == routing_says_deprecated): + # DOCUMENTATION.deprecated and meta/runtime.yml disagree + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.' + ) + elif routing_says_deprecated: + # Both DOCUMENTATION.deprecated and meta/runtime.yml agree that the module is deprecated. + # Make sure they give the same version or date. + routing_date = routing_deprecation.get('removal_date') + routing_version = routing_deprecation.get('removal_version') + # The versions and dates in the module documentation are auto-tagged, so remove the tag + # to make comparison possible and to avoid confusing the user. + documentation_date = doc_deprecation.get('removed_at_date') + documentation_version = doc_deprecation.get('removed_in') + if not compare_dates(routing_date, documentation_date): + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal date: %r vs. %r' % ( + routing_date, documentation_date) + ) + if routing_version != documentation_version: + self.reporter.error( + path=self.object_path, + code='deprecation-mismatch', + msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree on removal version: %r vs. %r' % ( + routing_version, documentation_version) + ) + + # In the future we should error if ANSIBLE_METADATA exists in a collection + + return doc_info, doc + + def _check_version_added(self, doc, existing_doc): + version_added_raw = doc.get('version_added') + try: + collection_name = doc.get('version_added_collection') + version_added = self._create_strict_version( + str(version_added_raw or '0.0'), + collection_name=collection_name) + except ValueError as e: + version_added = version_added_raw or '0.0' + if self._is_new_module() or version_added != 'historical': + # already reported during schema validation, except: + if version_added == 'historical': + self.reporter.error( + path=self.object_path, + code='module-invalid-version-added', + msg='version_added is not a valid version number: %r. Error: %s' % (version_added, e) + ) + return + + if existing_doc and str(version_added_raw) != str(existing_doc.get('version_added')): + self.reporter.error( + path=self.object_path, + code='module-incorrect-version-added', + msg='version_added should be %r. Currently %r' % (existing_doc.get('version_added'), version_added_raw) + ) + + if not self._is_new_module(): + return + + should_be = '.'.join(ansible_version.split('.')[:2]) + strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') + + if (version_added < strict_ansible_version or + strict_ansible_version < version_added): + self.reporter.error( + path=self.object_path, + code='module-incorrect-version-added', + msg='version_added should be %r. Currently %r' % (should_be, version_added_raw) + ) + + def _validate_ansible_module_call(self, docs): + try: + spec, kwargs = get_argument_spec(self.path, self.collection) + except AnsibleModuleNotInitialized: + self.reporter.error( + path=self.object_path, + code='ansible-module-not-initialized', + msg="Execution of the module did not result in initialization of AnsibleModule", + ) + return + except AnsibleModuleImportError as e: + self.reporter.error( + path=self.object_path, + code='import-error', + msg="Exception attempting to import module for argument_spec introspection, '%s'" % e + ) + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + return + + schema = ansible_module_kwargs_schema(self.object_name.split('.')[0], for_collection=bool(self.collection)) + self._validate_docs_schema(kwargs, schema, 'AnsibleModule', 'invalid-ansiblemodule-schema') + + self._validate_argument_spec(docs, spec, kwargs) + + def _validate_list_of_module_args(self, name, terms, spec, context): + if terms is None: + return + if not isinstance(terms, (list, tuple)): + # This is already reported by schema checking + return + for check in terms: + if not isinstance(check, (list, tuple)): + # This is already reported by schema checking + continue + bad_term = False + for term in check: + if not isinstance(term, string_types): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must contain strings in the lists or tuples; found value %r" % (term, ) + self.reporter.error( + path=self.object_path, + code=name + '-type', + msg=msg, + ) + bad_term = True + if bad_term: + continue + if len(set(check)) != len(check): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms" + self.reporter.error( + path=self.object_path, + code=name + '-collision', + msg=msg, + ) + if not set(check) <= set(spec): + msg = name + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(check).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code=name + '-unknown', + msg=msg, + ) + + def _validate_required_if(self, terms, spec, context, module): + if terms is None: + return + if not isinstance(terms, (list, tuple)): + # This is already reported by schema checking + return + for check in terms: + if not isinstance(check, (list, tuple)) or len(check) not in [3, 4]: + # This is already reported by schema checking + continue + if len(check) == 4 and not isinstance(check[3], bool): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have forth value omitted or of type bool; got %r" % (check[3], ) + self.reporter.error( + path=self.object_path, + code='required_if-is_one_of-type', + msg=msg, + ) + requirements = check[2] + if not isinstance(requirements, (list, tuple)): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have third value (requirements) being a list or tuple; got type %r" % (requirements, ) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-type', + msg=msg, + ) + continue + bad_term = False + for term in requirements: + if not isinstance(term, string_types): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have only strings in third value (requirements); got %r" % (term, ) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-type', + msg=msg, + ) + bad_term = True + if bad_term: + continue + if len(set(requirements)) != len(requirements): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms in requirements" + self.reporter.error( + path=self.object_path, + code='required_if-requirements-collision', + msg=msg, + ) + if not set(requirements) <= set(spec): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms in requirements which are not part of argument_spec: %s" % ", ".join(sorted(set(requirements).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code='required_if-requirements-unknown', + msg=msg, + ) + key = check[0] + if key not in spec: + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must have its key %s in argument_spec" % key + self.reporter.error( + path=self.object_path, + code='required_if-unknown-key', + msg=msg, + ) + continue + if key in requirements: + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains its key %s in requirements" % key + self.reporter.error( + path=self.object_path, + code='required_if-key-in-requirements', + msg=msg, + ) + value = check[1] + if value is not None: + _type = spec[key].get('type', 'str') + if callable(_type): + _type_checker = _type + else: + _type_checker = DEFAULT_TYPE_VALIDATORS.get(_type) + try: + with CaptureStd(): + dummy = _type_checker(value) + except (Exception, SystemExit): + msg = "required_if" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has value %r which does not fit to %s's parameter type %r" % (value, key, _type) + self.reporter.error( + path=self.object_path, + code='required_if-value-type', + msg=msg, + ) + + def _validate_required_by(self, terms, spec, context): + if terms is None: + return + if not isinstance(terms, Mapping): + # This is already reported by schema checking + return + for key, value in terms.items(): + if isinstance(value, string_types): + value = [value] + if not isinstance(value, (list, tuple)): + # This is already reported by schema checking + continue + for term in value: + if not isinstance(term, string_types): + # This is already reported by schema checking + continue + if len(set(value)) != len(value) or key in value: + msg = "required_by" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has repeated terms" + self.reporter.error( + path=self.object_path, + code='required_by-collision', + msg=msg, + ) + if not set(value) <= set(spec) or key not in spec: + msg = "required_by" + if context: + msg += " found in %s" % " -> ".join(context) + msg += " contains terms which are not part of argument_spec: %s" % ", ".join(sorted(set(value).difference(set(spec)))) + self.reporter.error( + path=self.object_path, + code='required_by-unknown', + msg=msg, + ) + + def _validate_argument_spec(self, docs, spec, kwargs, context=None, last_context_spec=None): + if not self.analyze_arg_spec: + return + + if docs is None: + docs = {} + + if context is None: + context = [] + + if last_context_spec is None: + last_context_spec = kwargs + + try: + if not context: + add_fragments(docs, self.object_path, fragment_loader=fragment_loader, is_module=True) + except Exception: + # Cannot merge fragments + return + + # Use this to access type checkers later + module = NoArgsAnsibleModule({}) + + self._validate_list_of_module_args('mutually_exclusive', last_context_spec.get('mutually_exclusive'), spec, context) + self._validate_list_of_module_args('required_together', last_context_spec.get('required_together'), spec, context) + self._validate_list_of_module_args('required_one_of', last_context_spec.get('required_one_of'), spec, context) + self._validate_required_if(last_context_spec.get('required_if'), spec, context, module) + self._validate_required_by(last_context_spec.get('required_by'), spec, context) + + provider_args = set() + args_from_argspec = set() + deprecated_args_from_argspec = set() + doc_options = docs.get('options', {}) + if doc_options is None: + doc_options = {} + for arg, data in spec.items(): + restricted_argument_names = ('message', 'syslog_facility') + if arg.lower() in restricted_argument_names: + msg = "Argument '%s' in argument_spec " % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += "must not be one of %s as it is used " \ + "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) + self.reporter.error( + path=self.object_path, + code='invalid-argument-name', + msg=msg, + ) + continue + if 'aliases' in data: + for al in data['aliases']: + if al.lower() in restricted_argument_names: + msg = "Argument alias '%s' in argument_spec " % al + if context: + msg += " found in %s" % " -> ".join(context) + msg += "must not be one of %s as it is used " \ + "internally by Ansible Core Engine" % (",".join(restricted_argument_names)) + self.reporter.error( + path=self.object_path, + code='invalid-argument-name', + msg=msg, + ) + continue + + # Could this a place where secrets are leaked? + # If it is type: path we know it's not a secret key as it's a file path. + # If it is type: bool it is more likely a flag indicating that something is secret, than an actual secret. + if all(( + data.get('no_log') is None, is_potential_secret_option(arg), + data.get('type') not in ("path", "bool"), data.get('choices') is None, + )): + msg = "Argument '%s' in argument_spec could be a secret, though doesn't have `no_log` set" % arg + if context: + msg += " found in %s" % " -> ".join(context) + self.reporter.error( + path=self.object_path, + code='no-log-needed', + msg=msg, + ) + + if not isinstance(data, dict): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " must be a dictionary/hash when used" + self.reporter.error( + path=self.object_path, + code='invalid-argument-spec', + msg=msg, + ) + continue + + removed_at_date = data.get('removed_at_date', None) + if removed_at_date is not None: + try: + if parse_isodate(removed_at_date, allow_date=False) < datetime.date.today(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has a removed_at_date '%s' before today" % removed_at_date + self.reporter.error( + path=self.object_path, + code='deprecated-date', + msg=msg, + ) + except ValueError: + # This should only happen when removed_at_date is not in ISO format. Since schema + # validation already reported this as an error, don't report it a second time. + pass + + deprecated_aliases = data.get('deprecated_aliases', None) + if deprecated_aliases is not None: + for deprecated_alias in deprecated_aliases: + if 'name' in deprecated_alias and 'date' in deprecated_alias: + try: + date = deprecated_alias['date'] + if parse_isodate(date, allow_date=False) < datetime.date.today(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with removal date '%s' before today" % ( + deprecated_alias['name'], deprecated_alias['date']) + self.reporter.error( + path=self.object_path, + code='deprecated-date', + msg=msg, + ) + except ValueError: + # This should only happen when deprecated_alias['date'] is not in ISO format. Since + # schema validation already reported this as an error, don't report it a second + # time. + pass + + has_version = False + if self.collection and self.collection_version is not None: + compare_version = self.collection_version + version_of_what = "this collection (%s)" % self.collection_version_str + code_prefix = 'collection' + has_version = True + elif not self.collection: + compare_version = LOOSE_ANSIBLE_VERSION + version_of_what = "Ansible (%s)" % ansible_version + code_prefix = 'ansible' + has_version = True + + removed_in_version = data.get('removed_in_version', None) + if removed_in_version is not None: + try: + collection_name = data.get('removed_from_collection') + removed_in = self._create_version(str(removed_in_version), collection_name=collection_name) + if has_version and collection_name == self.collection_name and compare_version >= removed_in: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has a deprecated removed_in_version %r," % removed_in_version + msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code=code_prefix + '-deprecated-version', + msg=msg, + ) + except ValueError as e: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has an invalid removed_in_version number %r: %s" % (removed_in_version, e) + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + except TypeError: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has an invalid removed_in_version number %r: " % (removed_in_version, ) + msg += " error while comparing to version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + + if deprecated_aliases is not None: + for deprecated_alias in deprecated_aliases: + if 'name' in deprecated_alias and 'version' in deprecated_alias: + try: + collection_name = deprecated_alias.get('collection_name') + version = self._create_version(str(deprecated_alias['version']), collection_name=collection_name) + if has_version and collection_name == self.collection_name and compare_version >= version: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with removal in version %r," % ( + deprecated_alias['name'], deprecated_alias['version']) + msg += " i.e. the version is less than or equal to the current version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code=code_prefix + '-deprecated-version', + msg=msg, + ) + except ValueError as e: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with invalid removal version %r: %s" % ( + deprecated_alias['name'], deprecated_alias['version'], e) + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + except TypeError: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has deprecated aliases '%s' with invalid removal version %r:" % ( + deprecated_alias['name'], deprecated_alias['version']) + msg += " error while comparing to version of %s" % version_of_what + self.reporter.error( + path=self.object_path, + code='invalid-deprecated-version', + msg=msg, + ) + + aliases = data.get('aliases', []) + if arg in aliases: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is specified as its own alias" + self.reporter.error( + path=self.object_path, + code='parameter-alias-self', + msg=msg + ) + if len(aliases) > len(set(aliases)): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has at least one alias specified multiple times in aliases" + self.reporter.error( + path=self.object_path, + code='parameter-alias-repeated', + msg=msg + ) + if not context and arg == 'state': + bad_states = set(['list', 'info', 'get']) & set(data.get('choices', set())) + for bad_state in bad_states: + self.reporter.error( + path=self.object_path, + code='parameter-state-invalid-choice', + msg="Argument 'state' includes the value '%s' as a choice" % bad_state) + if not data.get('removed_in_version', None) and not data.get('removed_at_date', None): + args_from_argspec.add(arg) + args_from_argspec.update(aliases) + else: + deprecated_args_from_argspec.add(arg) + deprecated_args_from_argspec.update(aliases) + if arg == 'provider' and self.object_path.startswith('lib/ansible/modules/network/'): + if data.get('options') is not None and not isinstance(data.get('options'), Mapping): + self.reporter.error( + path=self.object_path, + code='invalid-argument-spec-options', + msg="Argument 'options' in argument_spec['provider'] must be a dictionary/hash when used", + ) + elif data.get('options'): + # Record provider options from network modules, for later comparison + for provider_arg, provider_data in data.get('options', {}).items(): + provider_args.add(provider_arg) + provider_args.update(provider_data.get('aliases', [])) + + if data.get('required') and data.get('default', object) != object: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is marked as required but specifies a default. Arguments with a" \ + " default should not be marked as required" + self.reporter.error( + path=self.object_path, + code='no-default-for-required-parameter', + msg=msg + ) + + if arg in provider_args: + # Provider args are being removed from network module top level + # don't validate docs<->arg_spec checks below + continue + + _type = data.get('type', 'str') + if callable(_type): + _type_checker = _type + else: + _type_checker = DEFAULT_TYPE_VALIDATORS.get(_type) + + _elements = data.get('elements') + if (_type == 'list') and not _elements: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as list but elements is not defined" + self.reporter.error( + path=self.object_path, + code='parameter-list-no-elements', + msg=msg + ) + if _elements: + if not callable(_elements): + DEFAULT_TYPE_VALIDATORS.get(_elements) + if _type != 'list': + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines elements as %s but it is valid only when value of parameter type is list" % _elements + self.reporter.error( + path=self.object_path, + code='parameter-invalid-elements', + msg=msg + ) + + arg_default = None + if 'default' in data and not is_empty(data['default']): + try: + with CaptureStd(): + arg_default = _type_checker(data['default']) + except (Exception, SystemExit): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but this is incompatible with parameter type %r" % (data['default'], _type) + self.reporter.error( + path=self.object_path, + code='incompatible-default-type', + msg=msg + ) + continue + + doc_options_args = [] + for alias in sorted(set([arg] + list(aliases))): + if alias in doc_options: + doc_options_args.append(alias) + if len(doc_options_args) == 0: + # Undocumented arguments will be handled later (search for undocumented-parameter) + doc_options_arg = {} + else: + doc_options_arg = doc_options[doc_options_args[0]] + if len(doc_options_args) > 1: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " with aliases %s is documented multiple times, namely as %s" % ( + ", ".join([("'%s'" % alias) for alias in aliases]), + ", ".join([("'%s'" % alias) for alias in doc_options_args]) + ) + self.reporter.error( + path=self.object_path, + code='parameter-documented-multiple-times', + msg=msg + ) + + try: + doc_default = None + if 'default' in doc_options_arg and not is_empty(doc_options_arg['default']): + with CaptureStd(): + doc_default = _type_checker(doc_options_arg['default']) + except (Exception, SystemExit): + msg = "Argument '%s' in documentation" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but this is incompatible with parameter type %r" % (doc_options_arg.get('default'), _type) + self.reporter.error( + path=self.object_path, + code='doc-default-incompatible-type', + msg=msg + ) + continue + + if arg_default != doc_default: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines default as (%r) but documentation defines default as (%r)" % (arg_default, doc_default) + self.reporter.error( + path=self.object_path, + code='doc-default-does-not-match-spec', + msg=msg + ) + + doc_type = doc_options_arg.get('type') + if 'type' in data and data['type'] is not None: + if doc_type is None: + if not arg.startswith('_'): # hidden parameter, for example _raw_params + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as %r but documentation doesn't define type" % (data['type']) + self.reporter.error( + path=self.object_path, + code='parameter-type-not-in-doc', + msg=msg + ) + elif data['type'] != doc_type: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines type as %r but documentation defines type as %r" % (data['type'], doc_type) + self.reporter.error( + path=self.object_path, + code='doc-type-does-not-match-spec', + msg=msg + ) + else: + if doc_type is None: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " uses default type ('str') but documentation doesn't define type" + self.reporter.error( + path=self.object_path, + code='doc-missing-type', + msg=msg + ) + elif doc_type != 'str': + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " implies type as 'str' but documentation defines as %r" % doc_type + self.reporter.error( + path=self.object_path, + code='implied-parameter-type-mismatch', + msg=msg + ) + + doc_choices = [] + try: + for choice in doc_options_arg.get('choices', []): + try: + with CaptureStd(): + doc_choices.append(_type_checker(choice)) + except (Exception, SystemExit): + msg = "Argument '%s' in documentation" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) + self.reporter.error( + path=self.object_path, + code='doc-choices-incompatible-type', + msg=msg + ) + raise StopIteration() + except StopIteration: + continue + + arg_choices = [] + try: + for choice in data.get('choices', []): + try: + with CaptureStd(): + arg_choices.append(_type_checker(choice)) + except (Exception, SystemExit): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but this is incompatible with argument type %r" % (choice, _type) + self.reporter.error( + path=self.object_path, + code='incompatible-choices', + msg=msg + ) + raise StopIteration() + except StopIteration: + continue + + if not compare_unordered_lists(arg_choices, doc_choices): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines choices as (%r) but documentation defines choices as (%r)" % (arg_choices, doc_choices) + self.reporter.error( + path=self.object_path, + code='doc-choices-do-not-match-spec', + msg=msg + ) + + doc_required = doc_options_arg.get('required', False) + data_required = data.get('required', False) + if (doc_required or data_required) and not (doc_required and data_required): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + if doc_required: + msg += " is not required, but is documented as being required" + else: + msg += " is required, but is not documented as being required" + self.reporter.error( + path=self.object_path, + code='doc-required-mismatch', + msg=msg + ) + + doc_elements = doc_options_arg.get('elements', None) + doc_type = doc_options_arg.get('type', 'str') + data_elements = data.get('elements', None) + if (doc_elements and not doc_type == 'list'): + msg = "Argument '%s' " % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " defines parameter elements as %s but it is valid only when value of parameter type is list" % doc_elements + self.reporter.error( + path=self.object_path, + code='doc-elements-invalid', + msg=msg + ) + if (doc_elements or data_elements) and not (doc_elements == data_elements): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + if data_elements: + msg += " specifies elements as %s," % data_elements + else: + msg += " does not specify elements," + if doc_elements: + msg += "but elements is documented as being %s" % doc_elements + else: + msg += "but elements is not documented" + self.reporter.error( + path=self.object_path, + code='doc-elements-mismatch', + msg=msg + ) + + spec_suboptions = data.get('options') + doc_suboptions = doc_options_arg.get('suboptions', {}) + if spec_suboptions: + if not doc_suboptions: + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " has sub-options but documentation does not define it" + self.reporter.error( + path=self.object_path, + code='missing-suboption-docs', + msg=msg + ) + self._validate_argument_spec({'options': doc_suboptions}, spec_suboptions, kwargs, + context=context + [arg], last_context_spec=data) + + for arg in args_from_argspec: + if not str(arg).isidentifier(): + msg = "Argument '%s' in argument_spec" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is not a valid python identifier" + self.reporter.error( + path=self.object_path, + code='parameter-invalid', + msg=msg + ) + + if docs: + args_from_docs = set() + for arg, data in doc_options.items(): + args_from_docs.add(arg) + args_from_docs.update(data.get('aliases', [])) + + args_missing_from_docs = args_from_argspec.difference(args_from_docs) + docs_missing_from_args = args_from_docs.difference(args_from_argspec | deprecated_args_from_argspec) + for arg in args_missing_from_docs: + if arg in provider_args: + # Provider args are being removed from network module top level + # So they are likely not documented on purpose + continue + msg = "Argument '%s'" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is listed in the argument_spec, but not documented in the module documentation" + self.reporter.error( + path=self.object_path, + code='undocumented-parameter', + msg=msg + ) + for arg in docs_missing_from_args: + msg = "Argument '%s'" % arg + if context: + msg += " found in %s" % " -> ".join(context) + msg += " is listed in DOCUMENTATION.options, but not accepted by the module argument_spec" + self.reporter.error( + path=self.object_path, + code='nonexistent-parameter-documented', + msg=msg + ) + + def _check_for_new_args(self, doc): + if not self.base_branch or self._is_new_module(): + return + + with CaptureStd(): + try: + existing_doc, dummy_examples, dummy_return, existing_metadata = get_docstring( + self.base_module, fragment_loader, verbose=True, collection_name=self.collection_name, is_module=True) + existing_options = existing_doc.get('options', {}) or {} + except AssertionError: + fragment = doc['extends_documentation_fragment'] + self.reporter.warning( + path=self.object_path, + code='missing-existing-doc-fragment', + msg='Pre-existing DOCUMENTATION fragment missing: %s' % fragment + ) + return + except Exception as e: + self.reporter.warning_trace( + path=self.object_path, + tracebk=e + ) + self.reporter.warning( + path=self.object_path, + code='unknown-doc-fragment', + msg=('Unknown pre-existing DOCUMENTATION error, see TRACE. Submodule refs may need updated') + ) + return + + try: + mod_collection_name = existing_doc.get('version_added_collection') + mod_version_added = self._create_strict_version( + str(existing_doc.get('version_added', '0.0')), + collection_name=mod_collection_name) + except ValueError: + mod_collection_name = self.collection_name + mod_version_added = self._create_strict_version('0.0') + + options = doc.get('options', {}) or {} + + should_be = '.'.join(ansible_version.split('.')[:2]) + strict_ansible_version = self._create_strict_version(should_be, collection_name='ansible.builtin') + + for option, details in options.items(): + try: + names = [option] + details.get('aliases', []) + except (TypeError, AttributeError): + # Reporting of this syntax error will be handled by schema validation. + continue + + if any(name in existing_options for name in names): + # The option already existed. Make sure version_added didn't change. + for name in names: + existing_collection_name = existing_options.get(name, {}).get('version_added_collection') + existing_version = existing_options.get(name, {}).get('version_added') + if existing_version: + break + current_collection_name = details.get('version_added_collection') + current_version = details.get('version_added') + if current_collection_name != existing_collection_name: + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added-collection', + msg=('version_added for existing option (%s) should ' + 'belong to collection %r. Currently belongs to %r' % + (option, current_collection_name, existing_collection_name)) + ) + elif str(current_version) != str(existing_version): + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added', + msg=('version_added for existing option (%s) should ' + 'be %r. Currently %r' % + (option, existing_version, current_version)) + ) + continue + + try: + collection_name = details.get('version_added_collection') + version_added = self._create_strict_version( + str(details.get('version_added', '0.0')), + collection_name=collection_name) + except ValueError as e: + # already reported during schema validation + continue + + if collection_name != self.collection_name: + continue + if (strict_ansible_version != mod_version_added and + (version_added < strict_ansible_version or + strict_ansible_version < version_added)): + self.reporter.error( + path=self.object_path, + code='option-incorrect-version-added', + msg=('version_added for new option (%s) should ' + 'be %r. Currently %r' % + (option, should_be, version_added)) + ) + + return existing_doc + + @staticmethod + def is_on_rejectlist(path): + base_name = os.path.basename(path) + file_name = os.path.splitext(base_name)[0] + + if file_name.startswith('_') and os.path.islink(path): + return True + + if not frozenset((base_name, file_name)).isdisjoint(ModuleValidator.REJECTLIST): + return True + + for pat in ModuleValidator.REJECTLIST_PATTERNS: + if fnmatch(base_name, pat): + return True + + return False + + def validate(self): + super(ModuleValidator, self).validate() + if not self._python_module() and not self._powershell_module(): + self.reporter.error( + path=self.object_path, + code='invalid-extension', + msg=('Official Ansible modules must have a .py ' + 'extension for python modules or a .ps1 ' + 'for powershell modules') + ) + self._python_module_override = True + + if self._python_module() and self.ast is None: + self.reporter.error( + path=self.object_path, + code='python-syntax-error', + msg='Python SyntaxError while parsing module' + ) + try: + compile(self.text, self.path, 'exec') + except Exception: + self.reporter.trace( + path=self.object_path, + tracebk=traceback.format_exc() + ) + return + + end_of_deprecation_should_be_removed_only = False + if self._python_module(): + doc_info, docs = self._validate_docs() + + # See if current version => deprecated.removed_in, ie, should be docs only + if docs and docs.get('deprecated', False): + + if 'removed_in' in docs['deprecated']: + removed_in = None + collection_name = docs['deprecated'].get('removed_from_collection') + version = docs['deprecated']['removed_in'] + if collection_name != self.collection_name: + self.reporter.error( + path=self.object_path, + code='invalid-module-deprecation-source', + msg=('The deprecation version for a module must be added in this collection') + ) + else: + try: + removed_in = self._create_strict_version(str(version), collection_name=collection_name) + except ValueError as e: + self.reporter.error( + path=self.object_path, + code='invalid-module-deprecation-version', + msg=('The deprecation version %r cannot be parsed: %s' % (version, e)) + ) + + if removed_in: + if not self.collection: + strict_ansible_version = self._create_strict_version( + '.'.join(ansible_version.split('.')[:2]), self.collection_name) + end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in + + if end_of_deprecation_should_be_removed_only: + self.reporter.error( + path=self.object_path, + code='ansible-deprecated-module', + msg='Module is marked for removal in version %s of Ansible when the current version is %s' % ( + version, ansible_version), + ) + elif self.collection_version: + strict_ansible_version = self.collection_version + end_of_deprecation_should_be_removed_only = strict_ansible_version >= removed_in + + if end_of_deprecation_should_be_removed_only: + self.reporter.error( + path=self.object_path, + code='collection-deprecated-module', + msg='Module is marked for removal in version %s of this collection when the current version is %s' % ( + version, self.collection_version_str), + ) + + # handle deprecation by date + if 'removed_at_date' in docs['deprecated']: + try: + removed_at_date = docs['deprecated']['removed_at_date'] + if parse_isodate(removed_at_date, allow_date=True) < datetime.date.today(): + msg = "Module's deprecated.removed_at_date date '%s' is before today" % removed_at_date + self.reporter.error(path=self.object_path, code='deprecated-date', msg=msg) + except ValueError: + # This happens if the date cannot be parsed. This is already checked by the schema. + pass + + if self._python_module() and not self._just_docs() and not end_of_deprecation_should_be_removed_only: + self._validate_ansible_module_call(docs) + self._check_for_sys_exit() + self._find_rejectlist_imports() + self._find_module_utils() + self._find_has_import() + first_callable = self._get_first_callable() + self._ensure_imports_below_docs(doc_info, first_callable) + self._check_for_subprocess() + self._check_for_os_call() + + if self._powershell_module(): + if self.basename in self.PS_DOC_REJECTLIST: + return + + self._validate_ps_replacers() + docs_path = self._find_ps_docs_py_file() + + # We can only validate PowerShell arg spec if it is using the new Ansible.Basic.AnsibleModule util + pattern = r'(?im)^#\s*ansiblerequires\s+\-csharputil\s*Ansible\.Basic' + if re.search(pattern, self.text) and self.object_name not in self.PS_ARG_VALIDATE_REJECTLIST: + with ModuleValidator(docs_path, base_branch=self.base_branch, git_cache=self.git_cache) as docs_mv: + docs = docs_mv._validate_docs()[1] + self._validate_ansible_module_call(docs) + + self._check_gpl3_header() + if not self._just_docs() and not end_of_deprecation_should_be_removed_only: + self._check_interpreter(powershell=self._powershell_module()) + self._check_type_instead_of_isinstance( + powershell=self._powershell_module() + ) + + +class PythonPackageValidator(Validator): + REJECTLIST_FILES = frozenset(('__pycache__',)) + + def __init__(self, path, reporter=None): + super(PythonPackageValidator, self).__init__(reporter=reporter or Reporter()) + + self.path = path + self.basename = os.path.basename(path) + + @property + def object_name(self): + return self.basename + + @property + def object_path(self): + return self.path + + def validate(self): + super(PythonPackageValidator, self).validate() + + if self.basename in self.REJECTLIST_FILES: + return + + init_file = os.path.join(self.path, '__init__.py') + if not os.path.exists(init_file): + self.reporter.error( + path=self.object_path, + code='subdirectory-missing-init', + msg='Ansible module subdirectories must contain an __init__.py' + ) + + +def setup_collection_loader(): + collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATH', '').split(os.pathsep) + _AnsibleCollectionFinder(collections_paths) + + +def re_compile(value): + """ + Argparse expects things to raise TypeError, re.compile raises an re.error + exception + + This function is a shorthand to convert the re.error exception to a + TypeError + """ + + try: + return re.compile(value) + except re.error as e: + raise TypeError(e) + + +def run(): + parser = argparse.ArgumentParser(prog="validate-modules") + parser.add_argument('modules', nargs='+', + help='Path to module or module directory') + parser.add_argument('-w', '--warnings', help='Show warnings', + action='store_true') + parser.add_argument('--exclude', help='RegEx exclusion pattern', + type=re_compile) + parser.add_argument('--arg-spec', help='Analyze module argument spec', + action='store_true', default=False) + parser.add_argument('--base-branch', default=None, + help='Used in determining if new options were added') + parser.add_argument('--format', choices=['json', 'plain'], default='plain', + help='Output format. Default: "%(default)s"') + parser.add_argument('--output', default='-', + help='Output location, use "-" for stdout. ' + 'Default "%(default)s"') + parser.add_argument('--collection', + help='Specifies the path to the collection, when ' + 'validating files within a collection. Ensure ' + 'that ANSIBLE_COLLECTIONS_PATH is set so the ' + 'contents of the collection can be located') + parser.add_argument('--collection-version', + help='The collection\'s version number used to check ' + 'deprecations') + + args = parser.parse_args() + + args.modules = [m.rstrip('/') for m in args.modules] + + reporter = Reporter() + git_cache = GitCache(args.base_branch) + + check_dirs = set() + + routing = None + if args.collection: + setup_collection_loader() + routing_file = 'meta/runtime.yml' + # Load meta/runtime.yml if it exists, as it may contain deprecation information + if os.path.isfile(routing_file): + try: + with open(routing_file) as f: + routing = yaml.safe_load(f) + except yaml.error.MarkedYAMLError as ex: + print('%s:%d:%d: YAML load failed: %s' % (routing_file, ex.context_mark.line + 1, ex.context_mark.column + 1, re.sub(r'\s+', ' ', str(ex)))) + except Exception as ex: # pylint: disable=broad-except + print('%s:%d:%d: YAML load failed: %s' % (routing_file, 0, 0, re.sub(r'\s+', ' ', str(ex)))) + + for module in args.modules: + if os.path.isfile(module): + path = module + if args.exclude and args.exclude.search(path): + continue + if ModuleValidator.is_on_rejectlist(path): + continue + with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, + analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, + git_cache=git_cache, reporter=reporter, routing=routing) as mv1: + mv1.validate() + check_dirs.add(os.path.dirname(path)) + + for root, dirs, files in os.walk(module): + basedir = root[len(module) + 1:].split('/', 1)[0] + if basedir in REJECTLIST_DIRS: + continue + for dirname in dirs: + if root == module and dirname in REJECTLIST_DIRS: + continue + path = os.path.join(root, dirname) + if args.exclude and args.exclude.search(path): + continue + check_dirs.add(path) + + for filename in files: + path = os.path.join(root, filename) + if args.exclude and args.exclude.search(path): + continue + if ModuleValidator.is_on_rejectlist(path): + continue + with ModuleValidator(path, collection=args.collection, collection_version=args.collection_version, + analyze_arg_spec=args.arg_spec, base_branch=args.base_branch, + git_cache=git_cache, reporter=reporter, routing=routing) as mv2: + mv2.validate() + + if not args.collection: + for path in sorted(check_dirs): + pv = PythonPackageValidator(path, reporter=reporter) + pv.validate() + + if args.format == 'plain': + sys.exit(reporter.plain(warnings=args.warnings, output=args.output)) + else: + sys.exit(reporter.json(warnings=args.warnings, output=args.output)) + + +class GitCache: + def __init__(self, base_branch): + self.base_branch = base_branch + + if self.base_branch: + self.base_tree = self._git(['ls-tree', '-r', '--name-only', self.base_branch, 'lib/ansible/modules/']) + else: + self.base_tree = [] + + try: + self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/']) + except GitError as ex: + if ex.status == 128: + # fallback when there is no .git directory + self.head_tree = self._get_module_files() + else: + raise + except OSError as ex: + if ex.errno == errno.ENOENT: + # fallback when git is not installed + self.head_tree = self._get_module_files() + else: + raise + + self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1')) + + self.base_module_paths.pop('__init__.py', None) + + self.head_aliased_modules = set() + + for path in self.head_tree: + filename = os.path.basename(path) + + if filename.startswith('_') and filename != '__init__.py': + if os.path.islink(path): + self.head_aliased_modules.add(os.path.basename(os.path.realpath(path))) + + @staticmethod + def _get_module_files(): + module_files = [] + + for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'): + for file_name in file_names: + module_files.append(os.path.join(dir_path, file_name)) + + return module_files + + @staticmethod + def _git(args): + cmd = ['git'] + args + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + if p.returncode != 0: + raise GitError(stderr, p.returncode) + return stdout.decode('utf-8').splitlines() + + +class GitError(Exception): + def __init__(self, message, status): + super(GitError, self).__init__(message) + + self.status = status + + +def main(): + try: + run() + except KeyboardInterrupt: + pass diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py new file mode 100644 index 0000000000..8cd0e5e560 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/module_args.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2016 Matt Martz +# Copyright (C) 2016 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import runpy +import inspect +import json +import os +import subprocess +import sys + +from contextlib import contextmanager + +from ansible.executor.powershell.module_manifest import PSModuleDepFinder +from ansible.module_utils.basic import FILE_COMMON_ARGUMENTS, AnsibleModule +from ansible.module_utils.six import reraise +from ansible.module_utils._text import to_bytes, to_text + +from .utils import CaptureStd, find_executable, get_module_name_from_filename + + +ANSIBLE_MODULE_CONSTRUCTOR_ARGS = tuple(list(inspect.signature(AnsibleModule.__init__).parameters)[1:]) + + +class AnsibleModuleCallError(RuntimeError): + pass + + +class AnsibleModuleImportError(ImportError): + pass + + +class AnsibleModuleNotInitialized(Exception): + pass + + +class _FakeAnsibleModuleInit: + def __init__(self): + self.args = tuple() + self.kwargs = {} + self.called = False + + def __call__(self, *args, **kwargs): + if args and isinstance(args[0], AnsibleModule): + # Make sure, due to creative calling, that we didn't end up with + # ``self`` in ``args`` + self.args = args[1:] + else: + self.args = args + self.kwargs = kwargs + self.called = True + raise AnsibleModuleCallError('AnsibleModuleCallError') + + +def _fake_load_params(): + pass + + +@contextmanager +def setup_env(filename): + # Used to clean up imports later + pre_sys_modules = list(sys.modules.keys()) + + fake = _FakeAnsibleModuleInit() + module = __import__('ansible.module_utils.basic').module_utils.basic + _original_init = module.AnsibleModule.__init__ + _original_load_params = module._load_params + setattr(module.AnsibleModule, '__init__', fake) + setattr(module, '_load_params', _fake_load_params) + + try: + yield fake + finally: + setattr(module.AnsibleModule, '__init__', _original_init) + setattr(module, '_load_params', _original_load_params) + + # Clean up imports to prevent issues with mutable data being used in modules + for k in list(sys.modules.keys()): + # It's faster if we limit to items in ansible.module_utils + # But if this causes problems later, we should remove it + if k not in pre_sys_modules and k.startswith('ansible.module_utils.'): + del sys.modules[k] + + +def get_ps_argument_spec(filename, collection): + fqc_name = get_module_name_from_filename(filename, collection) + + pwsh = find_executable('pwsh') + if not pwsh: + raise FileNotFoundError('Required program for PowerShell arg spec inspection "pwsh" not found.') + + module_path = os.path.join(os.getcwd(), filename) + b_module_path = to_bytes(module_path, errors='surrogate_or_strict') + with open(b_module_path, mode='rb') as module_fd: + b_module_data = module_fd.read() + + ps_dep_finder = PSModuleDepFinder() + ps_dep_finder.scan_module(b_module_data, fqn=fqc_name) + + # For ps_argspec.ps1 to compile Ansible.Basic it also needs the AddType module_util. + ps_dep_finder._add_module((b"Ansible.ModuleUtils.AddType", ".psm1", None), wrapper=False) + + util_manifest = json.dumps({ + 'module_path': to_text(module_path, errors='surrogiate_or_strict'), + 'ansible_basic': ps_dep_finder.cs_utils_module["Ansible.Basic"]['path'], + 'ps_utils': dict([(name, info['path']) for name, info in ps_dep_finder.ps_modules.items()]), + }) + + script_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ps_argspec.ps1') + proc = subprocess.Popen([script_path, util_manifest], stdout=subprocess.PIPE, stderr=subprocess.PIPE, + shell=False) + stdout, stderr = proc.communicate() + + if proc.returncode != 0: + raise AnsibleModuleImportError("STDOUT:\n%s\nSTDERR:\n%s" % (stdout.decode('utf-8'), stderr.decode('utf-8'))) + + kwargs = json.loads(stdout) + + # the validate-modules code expects the options spec to be under the argument_spec key not options as set in PS + kwargs['argument_spec'] = kwargs.pop('options', {}) + + return kwargs['argument_spec'], kwargs + + +def get_py_argument_spec(filename, collection): + name = get_module_name_from_filename(filename, collection) + + with setup_env(filename) as fake: + try: + with CaptureStd(): + runpy.run_module(name, run_name='__main__', alter_sys=True) + except AnsibleModuleCallError: + pass + except BaseException as e: + # we want to catch all exceptions here, including sys.exit + reraise(AnsibleModuleImportError, AnsibleModuleImportError('%s' % e), sys.exc_info()[2]) + + if not fake.called: + raise AnsibleModuleNotInitialized() + + try: + # Convert positional arguments to kwargs to make sure that all parameters are actually checked + for arg, arg_name in zip(fake.args, ANSIBLE_MODULE_CONSTRUCTOR_ARGS): + fake.kwargs[arg_name] = arg + # for ping kwargs == {'argument_spec':{'data':{'type':'str','default':'pong'}}, 'supports_check_mode':True} + argument_spec = fake.kwargs.get('argument_spec') or {} + # If add_file_common_args is truish, add options from FILE_COMMON_ARGUMENTS when not present. + # This is the only modification to argument_spec done by AnsibleModule itself, and which is + # not caught by setup_env's AnsibleModule replacement + if fake.kwargs.get('add_file_common_args'): + for k, v in FILE_COMMON_ARGUMENTS.items(): + if k not in argument_spec: + argument_spec[k] = v + return argument_spec, fake.kwargs + except (TypeError, IndexError): + return {}, {} + + +def get_argument_spec(filename, collection): + if filename.endswith('.py'): + return get_py_argument_spec(filename, collection) + else: + return get_ps_argument_spec(filename, collection) diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 new file mode 100755 index 0000000000..5ceb9d50b7 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/ps_argspec.ps1 @@ -0,0 +1,110 @@ +#!/usr/bin/env pwsh +#Requires -Version 6 + +Set-StrictMode -Version 2.0 +$ErrorActionPreference = "Stop" +$WarningPreference = "Stop" + +Function Resolve-CircularReference { + <# + .SYNOPSIS + Removes known types that cause a circular reference in their json serialization. + + .PARAMETER Hash + The hash to scan for circular references + #> + [CmdletBinding()] + param ( + [Parameter(Mandatory=$true)] + [System.Collections.IDictionary] + $Hash + ) + + foreach ($key in [String[]]$Hash.Keys) { + $value = $Hash[$key] + if ($value -is [System.Collections.IDictionary]) { + Resolve-CircularReference -Hash $value + } elseif ($value -is [Array] -or $value -is [System.Collections.IList]) { + $values = @(foreach ($v in $value) { + if ($v -is [System.Collections.IDictionary]) { + Resolve-CircularReference -Hash $v + } + ,$v + }) + $Hash[$key] = $values + } elseif ($value -is [DateTime]) { + $Hash[$key] = $value.ToString("yyyy-MM-dd") + } elseif ($value -is [delegate]) { + # Type can be set to a delegate function which defines it's own type. For the documentation we just + # reflection that as raw + if ($key -eq 'type') { + $Hash[$key] = 'raw' + } else { + $Hash[$key] = $value.ToString() # Shouldn't ever happen but just in case. + } + } + } +} + +$manifest = ConvertFrom-Json -InputObject $args[0] -AsHashtable +if (-not $manifest.Contains('module_path') -or -not $manifest.module_path) { + Write-Error -Message "No module specified." + exit 1 +} +$module_path = $manifest.module_path + +# Check if the path is relative and get the full path to the module +if (-not ([System.IO.Path]::IsPathRooted($module_path))) { + $module_path = $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($module_path) +} + +if (-not (Test-Path -LiteralPath $module_path -PathType Leaf)) { + Write-Error -Message "The module at '$module_path' does not exist." + exit 1 +} + +$module_code = Get-Content -LiteralPath $module_path -Raw + +$powershell = [PowerShell]::Create() +$powershell.Runspace.SessionStateProxy.SetVariable("ErrorActionPreference", "Stop") + +# Load the PowerShell module utils as the module may be using them to refer to shared module options. Currently we +# can only load the PowerShell utils due to cross platform compatibility issues. +if ($manifest.Contains('ps_utils')) { + foreach ($util_info in $manifest.ps_utils.GetEnumerator()) { + $util_name = $util_info.Key + $util_path = $util_info.Value + + if (-not (Test-Path -LiteralPath $util_path -PathType Leaf)) { + # Failed to find the util path, just silently ignore for now and hope for the best. + continue + } + + $util_sb = [ScriptBlock]::Create((Get-Content -LiteralPath $util_path -Raw)) + $powershell.AddCommand('New-Module').AddParameters(@{ + Name = $util_name + ScriptBlock = $util_sb + }) > $null + $powershell.AddCommand('Import-Module').AddParameter('WarningAction', 'SilentlyContinue') > $null + $powershell.AddCommand('Out-Null').AddStatement() > $null + + # Also import it into the current runspace in case ps_argspec.ps1 needs to use it. + $null = New-Module -Name $util_name -ScriptBlock $util_sb | Import-Module -WarningAction SilentlyContinue + } +} + +Add-CSharpType -References @(Get-Content -LiteralPath $manifest.ansible_basic -Raw) +[Ansible.Basic.AnsibleModule]::_DebugArgSpec = $true + +$powershell.AddScript($module_code) > $null +$powershell.Invoke() > $null + +if ($powershell.HadErrors) { + $powershell.Streams.Error + exit 1 +} + +$arg_spec = $powershell.Runspace.SessionStateProxy.GetVariable('ansibleTestArgSpec') +Resolve-CircularReference -Hash $arg_spec + +ConvertTo-Json -InputObject $arg_spec -Compress -Depth 99 diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py new file mode 100644 index 0000000000..07034530c1 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/schema.py @@ -0,0 +1,587 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2015, Matt Martz +# Copyright: (c) 2015, Rackspace US, Inc. +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import re + +from ansible.module_utils.compat.version import StrictVersion +from functools import partial + +from voluptuous import ALLOW_EXTRA, PREVENT_EXTRA, All, Any, Invalid, Length, Required, Schema, Self, ValueInvalid +from ansible.module_utils.six import string_types +from ansible.module_utils.common.collections import is_iterable +from ansible.utils.version import SemanticVersion +from ansible.release import __version__ + +from .utils import parse_isodate + +list_string_types = list(string_types) +tuple_string_types = tuple(string_types) +any_string_types = Any(*string_types) + +# Valid DOCUMENTATION.author lines +# Based on Ansibulbot's extract_github_id() +# author: First Last (@name) [optional anything] +# "Ansible Core Team" - Used by the Bot +# "Michael DeHaan" - nop +# "OpenStack Ansible SIG" - OpenStack does not use GitHub +# "Name (!UNKNOWN)" - For the few untraceable authors +author_line = re.compile(r'^\w.*(\(@([\w-]+)\)|!UNKNOWN)(?![\w.])|^Ansible Core Team$|^Michael DeHaan$|^OpenStack Ansible SIG$') + + +def _add_ansible_error_code(exception, error_code): + setattr(exception, 'ansible_error_code', error_code) + return exception + + +def isodate(v, error_code=None): + try: + parse_isodate(v, allow_date=True) + except ValueError as e: + raise _add_ansible_error_code(Invalid(str(e)), error_code or 'ansible-invalid-date') + return v + + +COLLECTION_NAME_RE = re.compile(r'^([^.]+(\.[^.]+)+)$') + + +def collection_name(v, error_code=None): + if not isinstance(v, string_types): + raise _add_ansible_error_code( + Invalid('Collection name must be a string'), error_code or 'collection-invalid-name') + m = COLLECTION_NAME_RE.match(v) + if not m: + raise _add_ansible_error_code( + Invalid('Collection name must be of format `.`'), error_code or 'collection-invalid-name') + return v + + +def deprecation_versions(): + """Create a list of valid version for deprecation entries, current+4""" + major, minor = [int(version) for version in __version__.split('.')[0:2]] + return Any(*['{0}.{1}'.format(major, minor + increment) for increment in range(0, 5)]) + + +def version(for_collection=False): + if for_collection: + # We do not accept floats for versions in collections + return Any(*string_types) + return Any(float, *string_types) + + +def date(error_code=None): + return Any(isodate, error_code=error_code) + + +def is_callable(v): + if not callable(v): + raise ValueInvalid('not a valid value') + return v + + +def sequence_of_sequences(min=None, max=None): + return All( + Any( + None, + [Any(list, tuple)], + tuple([Any(list, tuple)]), + ), + Any( + None, + [Length(min=min, max=max)], + tuple([Length(min=min, max=max)]), + ), + ) + + +seealso_schema = Schema( + [ + Any( + { + Required('module'): Any(*string_types), + 'description': Any(*string_types), + }, + { + Required('ref'): Any(*string_types), + Required('description'): Any(*string_types), + }, + { + Required('name'): Any(*string_types), + Required('link'): Any(*string_types), + Required('description'): Any(*string_types), + }, + ), + ] +) + + +argument_spec_types = ['bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', + 'sid', 'str'] + + +argument_spec_modifiers = { + 'mutually_exclusive': sequence_of_sequences(min=2), + 'required_together': sequence_of_sequences(min=2), + 'required_one_of': sequence_of_sequences(min=2), + 'required_if': sequence_of_sequences(min=3, max=4), + 'required_by': Schema({str: Any(list_string_types, tuple_string_types, *string_types)}), +} + + +def no_required_with_default(v): + if v.get('default') and v.get('required'): + raise Invalid('required=True cannot be supplied with a default') + return v + + +def elements_with_list(v): + if v.get('elements') and v.get('type') != 'list': + raise Invalid('type must be list to use elements') + return v + + +def options_with_apply_defaults(v): + if v.get('apply_defaults') and not v.get('options'): + raise Invalid('apply_defaults=True requires options to be set') + return v + + +def check_removal_version(v, version_field, collection_name_field, error_code='invalid-removal-version'): + version = v.get(version_field) + collection_name = v.get(collection_name_field) + if not isinstance(version, string_types) or not isinstance(collection_name, string_types): + # If they are not strings, schema validation will have already complained. + return v + if collection_name == 'ansible.builtin': + try: + parsed_version = StrictVersion() + parsed_version.parse(version) + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('%s (%r) is not a valid ansible-core version: %s' % (version_field, version, exc)), + error_code=error_code) + return v + try: + parsed_version = SemanticVersion() + parsed_version.parse(version) + if parsed_version.major != 0 and (parsed_version.minor != 0 or parsed_version.patch != 0): + raise _add_ansible_error_code( + Invalid('%s (%r) must be a major release, not a minor or patch release (see specification at ' + 'https://semver.org/)' % (version_field, version)), + error_code='removal-version-must-be-major') + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('%s (%r) is not a valid collection version (see specification at https://semver.org/): ' + '%s' % (version_field, version, exc)), + error_code=error_code) + return v + + +def option_deprecation(v): + if v.get('removed_in_version') or v.get('removed_at_date'): + if v.get('removed_in_version') and v.get('removed_at_date'): + raise _add_ansible_error_code( + Invalid('Only one of removed_in_version and removed_at_date must be specified'), + error_code='deprecation-either-date-or-version') + if not v.get('removed_from_collection'): + raise _add_ansible_error_code( + Invalid('If removed_in_version or removed_at_date is specified, ' + 'removed_from_collection must be specified as well'), + error_code='deprecation-collection-missing') + check_removal_version(v, + version_field='removed_in_version', + collection_name_field='removed_from_collection', + error_code='invalid-removal-version') + return + if v.get('removed_from_collection'): + raise Invalid('removed_from_collection cannot be specified without either ' + 'removed_in_version or removed_at_date') + + +def argument_spec_schema(for_collection): + any_string_types = Any(*string_types) + schema = { + any_string_types: { + 'type': Any(is_callable, *argument_spec_types), + 'elements': Any(*argument_spec_types), + 'default': object, + 'fallback': Any( + (is_callable, list_string_types), + [is_callable, list_string_types], + ), + 'choices': Any([object], (object,)), + 'required': bool, + 'no_log': bool, + 'aliases': Any(list_string_types, tuple(list_string_types)), + 'apply_defaults': bool, + 'removed_in_version': version(for_collection), + 'removed_at_date': date(), + 'removed_from_collection': collection_name, + 'options': Self, + 'deprecated_aliases': Any([All( + Any( + { + Required('name'): Any(*string_types), + Required('date'): date(), + Required('collection_name'): collection_name, + }, + { + Required('name'): Any(*string_types), + Required('version'): version(for_collection), + Required('collection_name'): collection_name, + }, + ), + partial(check_removal_version, + version_field='version', + collection_name_field='collection_name', + error_code='invalid-removal-version') + )]), + } + } + schema[any_string_types].update(argument_spec_modifiers) + schemas = All( + schema, + Schema({any_string_types: no_required_with_default}), + Schema({any_string_types: elements_with_list}), + Schema({any_string_types: options_with_apply_defaults}), + Schema({any_string_types: option_deprecation}), + ) + return Schema(schemas) + + +def ansible_module_kwargs_schema(module_name, for_collection): + schema = { + 'argument_spec': argument_spec_schema(for_collection), + 'bypass_checks': bool, + 'no_log': bool, + 'check_invalid_arguments': Any(None, bool), + 'add_file_common_args': bool, + 'supports_check_mode': bool, + } + if module_name.endswith(('_info', '_facts')): + del schema['supports_check_mode'] + schema[Required('supports_check_mode')] = True + schema.update(argument_spec_modifiers) + return Schema(schema) + + +json_value = Schema(Any( + None, + int, + float, + [Self], + *(list({str_type: Self} for str_type in string_types) + list(string_types)) +)) + + +def version_added(v, error_code='version-added-invalid', accept_historical=False): + if 'version_added' in v: + version_added = v.get('version_added') + if isinstance(version_added, string_types): + # If it is not a string, schema validation will have already complained + # - or we have a float and we are in ansible/ansible, in which case we're + # also happy. + if v.get('version_added_collection') == 'ansible.builtin': + if version_added == 'historical' and accept_historical: + return v + try: + version = StrictVersion() + version.parse(version_added) + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('version_added (%r) is not a valid ansible-core version: ' + '%s' % (version_added, exc)), + error_code=error_code) + else: + try: + version = SemanticVersion() + version.parse(version_added) + if version.major != 0 and version.patch != 0: + raise _add_ansible_error_code( + Invalid('version_added (%r) must be a major or minor release, ' + 'not a patch release (see specification at ' + 'https://semver.org/)' % (version_added, )), + error_code='version-added-must-be-major-or-minor') + except ValueError as exc: + raise _add_ansible_error_code( + Invalid('version_added (%r) is not a valid collection version ' + '(see specification at https://semver.org/): ' + '%s' % (version_added, exc)), + error_code=error_code) + elif 'version_added_collection' in v: + # Must have been manual intervention, since version_added_collection is only + # added automatically when version_added is present + raise Invalid('version_added_collection cannot be specified without version_added') + return v + + +def list_dict_option_schema(for_collection): + suboption_schema = Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'required': bool, + 'choices': list, + 'aliases': Any(list_string_types), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'default': json_value, + # Note: Types are strings, not literal bools, such as True or False + 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # Recursive suboptions + 'suboptions': Any(None, *list({str_type: Self} for str_type in string_types)), + }, + extra=PREVENT_EXTRA + ) + + # This generates list of dicts with keys from string_types and suboption_schema value + # for example in Python 3: {str: suboption_schema} + list_dict_suboption_schema = [{str_type: suboption_schema} for str_type in string_types] + + option_schema = Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'required': bool, + 'choices': list, + 'aliases': Any(list_string_types), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'default': json_value, + 'suboptions': Any(None, *list_dict_suboption_schema), + # Note: Types are strings, not literal bools, such as True or False + 'type': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + }, + extra=PREVENT_EXTRA + ) + + option_version_added = Schema( + All({ + 'suboptions': Any(None, *[{str_type: Self} for str_type in string_types]), + }, partial(version_added, error_code='option-invalid-version-added')), + extra=ALLOW_EXTRA + ) + + # This generates list of dicts with keys from string_types and option_schema value + # for example in Python 3: {str: option_schema} + return [{str_type: All(option_schema, option_version_added)} for str_type in string_types] + + +def return_contains(v): + schema = Schema( + { + Required('contains'): Any(dict, list, *string_types) + }, + extra=ALLOW_EXTRA + ) + if v.get('type') == 'complex': + return schema(v) + return v + + +def return_schema(for_collection): + return_contains_schema = Any( + All( + Schema( + { + Required('description'): Any(list_string_types, *string_types), + 'returned': Any(*string_types), # only returned on top level + Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'sample': json_value, + 'example': json_value, + 'contains': Any(None, *list({str_type: Self} for str_type in string_types)), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + } + ), + Schema(return_contains), + Schema(partial(version_added, error_code='option-invalid-version-added')), + ), + Schema(type(None)), + ) + + # This generates list of dicts with keys from string_types and return_contains_schema value + # for example in Python 3: {str: return_contains_schema} + list_dict_return_contains_schema = [{str_type: return_contains_schema} for str_type in string_types] + + return Any( + All( + Schema( + { + any_string_types: { + Required('description'): Any(list_string_types, *string_types), + Required('returned'): Any(*string_types), + Required('type'): Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'), + 'version_added': version(for_collection), + 'version_added_collection': collection_name, + 'sample': json_value, + 'example': json_value, + 'contains': Any(None, *list_dict_return_contains_schema), + # in case of type='list' elements define type of individual item in list + 'elements': Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json', 'jsonarg', 'list', 'path', 'raw', 'sid', 'str'), + } + } + ), + Schema({any_string_types: return_contains}), + Schema({any_string_types: partial(version_added, error_code='option-invalid-version-added')}), + ), + Schema(type(None)), + ) + + +def deprecation_schema(for_collection): + main_fields = { + Required('why'): Any(*string_types), + Required('alternative'): Any(*string_types), + Required('removed_from_collection'): collection_name, + 'removed': Any(True), + } + + date_schema = { + Required('removed_at_date'): date(), + } + date_schema.update(main_fields) + + if for_collection: + version_schema = { + Required('removed_in'): version(for_collection), + } + else: + version_schema = { + Required('removed_in'): deprecation_versions(), + } + version_schema.update(main_fields) + + result = Any( + Schema(version_schema, extra=PREVENT_EXTRA), + Schema(date_schema, extra=PREVENT_EXTRA), + ) + + if for_collection: + result = All( + result, + partial(check_removal_version, + version_field='removed_in', + collection_name_field='removed_from_collection', + error_code='invalid-removal-version')) + + return result + + +def author(value): + if value is None: + return value # let schema checks handle + + if not is_iterable(value): + value = [value] + + for line in value: + if not isinstance(line, string_types): + continue # let schema checks handle + m = author_line.search(line) + if not m: + raise Invalid("Invalid author") + + return value + + +def doc_schema(module_name, for_collection=False, deprecated_module=False): + + if module_name.startswith('_'): + module_name = module_name[1:] + deprecated_module = True + doc_schema_dict = { + Required('module'): module_name, + Required('short_description'): Any(*string_types), + Required('description'): Any(list_string_types, *string_types), + Required('author'): All(Any(None, list_string_types, *string_types), author), + 'notes': Any(None, list_string_types), + 'seealso': Any(None, seealso_schema), + 'requirements': list_string_types, + 'todo': Any(None, list_string_types, *string_types), + 'options': Any(None, *list_dict_option_schema(for_collection)), + 'extends_documentation_fragment': Any(list_string_types, *string_types), + 'version_added_collection': collection_name, + } + + if for_collection: + # Optional + doc_schema_dict['version_added'] = version(for_collection=True) + else: + doc_schema_dict[Required('version_added')] = version(for_collection=False) + + if deprecated_module: + deprecation_required_scheme = { + Required('deprecated'): Any(deprecation_schema(for_collection=for_collection)), + } + + doc_schema_dict.update(deprecation_required_scheme) + + def add_default_attributes(more=None): + schema = { + 'description': any_string_types, + 'support': any_string_types, + 'version_added_collection': any_string_types, + 'version_added': any_string_types, + } + if more: + schema.update(more) + return schema + + doc_schema_dict['attributes'] = Schema( + All( + Schema({ + any_string_types: { + Required('description'): any_string_types, + Required('support'): Any('full', 'partial', 'none'), + 'version_added_collection': collection_name, + 'version_added': version(for_collection=for_collection), + }, + }, extra=ALLOW_EXTRA), + partial(version_added, error_code='attribute-invalid-version-added', accept_historical=False), + Schema({ + any_string_types: add_default_attributes(), + 'action_group': add_default_attributes({ + Required('membership'): list_string_types, + }), + 'forced_action_plugin': add_default_attributes({ + Required('action_plugin'): any_string_types, + }), + 'proprietary': add_default_attributes({ + Required('platforms'): list_string_types, + }), + }, extra=PREVENT_EXTRA), + ) + ) + return Schema( + All( + Schema( + doc_schema_dict, + extra=PREVENT_EXTRA + ), + partial(version_added, error_code='module-invalid-version-added', accept_historical=not for_collection), + ) + ) + + +# Things to add soon +#################### +# 1) Recursively validate `type: complex` fields +# This will improve documentation, though require fair amount of module tidyup + +# Possible Future Enhancements +############################## + +# 1) Don't allow empty options for choices, aliases, etc +# 2) If type: bool ensure choices isn't set - perhaps use Exclusive +# 3) both version_added should be quoted floats + +# Tool that takes JSON and generates RETURN skeleton (needs to support complex structures) diff --git a/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py new file mode 100644 index 0000000000..ac46f6669f --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/validate-modules/validate_modules/utils.py @@ -0,0 +1,225 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2015 Matt Martz +# Copyright (C) 2015 Rackspace US, Inc. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import datetime +import os +import re +import sys + +from io import BytesIO, TextIOWrapper + +import yaml +import yaml.reader + +from ansible.module_utils._text import to_text +from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.common.yaml import SafeLoader +from ansible.module_utils.six import string_types +from ansible.parsing.yaml.loader import AnsibleLoader + + +class AnsibleTextIOWrapper(TextIOWrapper): + def write(self, s): + super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace')) + + +def find_executable(executable, cwd=None, path=None): + """Finds the full path to the executable specified""" + match = None + real_cwd = os.getcwd() + + if not cwd: + cwd = real_cwd + + if os.path.dirname(executable): + target = os.path.join(cwd, executable) + if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK): + match = executable + else: + path = os.environ.get('PATH', os.path.defpath) + + path_dirs = path.split(os.path.pathsep) + seen_dirs = set() + + for path_dir in path_dirs: + if path_dir in seen_dirs: + continue + + seen_dirs.add(path_dir) + + if os.path.abspath(path_dir) == real_cwd: + path_dir = cwd + + candidate = os.path.join(path_dir, executable) + + if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK): + match = candidate + break + + return match + + +def find_globals(g, tree): + """Uses AST to find globals in an ast tree""" + for child in tree: + if hasattr(child, 'body') and isinstance(child.body, list): + find_globals(g, child.body) + elif isinstance(child, (ast.FunctionDef, ast.ClassDef)): + g.add(child.name) + continue + elif isinstance(child, ast.Assign): + try: + g.add(child.targets[0].id) + except (IndexError, AttributeError): + pass + elif isinstance(child, ast.Import): + g.add(child.names[0].name) + elif isinstance(child, ast.ImportFrom): + for name in child.names: + g_name = name.asname or name.name + if g_name == '*': + continue + g.add(g_name) + + +class CaptureStd(): + """Context manager to handle capturing stderr and stdout""" + + def __enter__(self): + self.sys_stdout = sys.stdout + self.sys_stderr = sys.stderr + sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding) + sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding) + return self + + def __exit__(self, exc_type, exc_value, traceback): + sys.stdout = self.sys_stdout + sys.stderr = self.sys_stderr + + def get(self): + """Return ``(stdout, stderr)``""" + + return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue() + + +def get_module_name_from_filename(filename, collection): + # Calculate the module's name so that relative imports work correctly + if collection: + # collection is a relative path, example: ansible_collections/my_namespace/my_collection + # filename is a relative path, example: plugins/modules/my_module.py + path = os.path.join(collection, filename) + else: + # filename is a relative path, example: lib/ansible/modules/system/ping.py + path = os.path.relpath(filename, 'lib') + + name = os.path.splitext(path)[0].replace(os.path.sep, '.') + + return name + + +def parse_yaml(value, lineno, module, name, load_all=False, ansible_loader=False): + traces = [] + errors = [] + data = None + + if load_all: + yaml_load = yaml.load_all + else: + yaml_load = yaml.load + + if ansible_loader: + loader = AnsibleLoader + else: + loader = SafeLoader + + try: + data = yaml_load(value, Loader=loader) + if load_all: + data = list(data) + except yaml.MarkedYAMLError as e: + e.problem_mark.line += lineno - 1 + e.problem_mark.name = '%s.%s' % (module, name) + errors.append({ + 'msg': '%s is not valid YAML' % name, + 'line': e.problem_mark.line + 1, + 'column': e.problem_mark.column + 1 + }) + traces.append(e) + except yaml.reader.ReaderError as e: + traces.append(e) + # TODO: Better line/column detection + errors.append({ + 'msg': ('%s is not valid YAML. Character ' + '0x%x at position %d.' % (name, e.character, e.position)), + 'line': lineno + }) + except yaml.YAMLError as e: + traces.append(e) + errors.append({ + 'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e), + 'line': lineno + }) + + return data, errors, traces + + +def is_empty(value): + """Evaluate null like values excluding False""" + if value is False: + return False + return not bool(value) + + +def compare_unordered_lists(a, b): + """Safe list comparisons + + Supports: + - unordered lists + - unhashable elements + """ + return len(a) == len(b) and all(x in b for x in a) + + +class NoArgsAnsibleModule(AnsibleModule): + """AnsibleModule that does not actually load params. This is used to get access to the + methods within AnsibleModule without having to fake a bunch of data + """ + def _load_params(self): + self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False} + + +def parse_isodate(v, allow_date): + if allow_date: + if isinstance(v, datetime.date): + return v + msg = 'Expected ISO 8601 date string (YYYY-MM-DD) or YAML date' + else: + msg = 'Expected ISO 8601 date string (YYYY-MM-DD)' + if not isinstance(v, string_types): + raise ValueError(msg) + # From Python 3.7 in, there is datetime.date.fromisoformat(). For older versions, + # we have to do things manually. + if not re.match('^[0-9]{4}-[0-9]{2}-[0-9]{2}$', v): + raise ValueError(msg) + try: + return datetime.datetime.strptime(v, '%Y-%m-%d').date() + except ValueError: + raise ValueError(msg) diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml new file mode 100644 index 0000000000..45d8b7adcf --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/default.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: {max-spaces-inside: 1, level: error} + brackets: {max-spaces-inside: 1, level: error} + colons: {max-spaces-after: -1, level: error} + commas: {max-spaces-after: -1, level: error} + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: {max: 3, level: error} + hyphens: {level: error} + indentation: disable + key-duplicates: enable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml new file mode 100644 index 0000000000..da7e604999 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/modules.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: disable + brackets: disable + colons: disable + commas: disable + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: disable + hyphens: disable + indentation: disable + key-duplicates: enable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml new file mode 100644 index 0000000000..6d41813787 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/config/plugins.yml @@ -0,0 +1,19 @@ +extends: default + +rules: + braces: disable + brackets: disable + colons: disable + commas: disable + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: disable + hyphens: disable + indentation: disable + key-duplicates: disable + line-length: disable + new-line-at-end-of-file: disable + new-lines: {type: unix} + trailing-spaces: disable + truthy: disable diff --git a/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py new file mode 100644 index 0000000000..b9fc73e59d --- /dev/null +++ b/test/lib/ansible_test/_util/controller/sanity/yamllint/yamllinter.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python +"""Wrapper around yamllint that supports YAML embedded in Ansible modules.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import ast +import json +import os +import re +import sys + +import yaml +from yaml.resolver import Resolver +from yaml.constructor import SafeConstructor +from yaml.error import MarkedYAMLError +from _yaml import CParser # pylint: disable=no-name-in-module + +from yamllint import linter +from yamllint.config import YamlLintConfig + + +def main(): + """Main program body.""" + paths = sys.argv[1:] or sys.stdin.read().splitlines() + + checker = YamlChecker() + checker.check(paths) + checker.report() + + +class TestConstructor(SafeConstructor): + """Yaml Safe Constructor that knows about Ansible tags""" + + def construct_yaml_unsafe(self, node): + try: + constructor = getattr(node, 'id', 'object') + if constructor is not None: + constructor = getattr(self, 'construct_%s' % constructor) + except AttributeError: + constructor = self.construct_object + + value = constructor(node) + + return value + + +TestConstructor.add_constructor( + u'!unsafe', + TestConstructor.construct_yaml_unsafe) + + +TestConstructor.add_constructor( + u'!vault', + TestConstructor.construct_yaml_str) + + +TestConstructor.add_constructor( + u'!vault-encrypted', + TestConstructor.construct_yaml_str) + + +class TestLoader(CParser, TestConstructor, Resolver): + def __init__(self, stream): + CParser.__init__(self, stream) + TestConstructor.__init__(self) + Resolver.__init__(self) + + +class YamlChecker: + """Wrapper around yamllint that supports YAML embedded in Ansible modules.""" + def __init__(self): + self.messages = [] + + def report(self): + """Print yamllint report to stdout.""" + report = dict( + messages=self.messages, + ) + + print(json.dumps(report, indent=4, sort_keys=True)) + + def check(self, paths): + """ + :type paths: t.List[str] + """ + config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config') + + yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml')) + module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml')) + plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml')) + + for path in paths: + extension = os.path.splitext(path)[1] + + with open(path) as f: + contents = f.read() + + if extension in ('.yml', '.yaml'): + self.check_yaml(yaml_conf, path, contents) + elif extension == '.py': + if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'): + conf = module_conf + else: + conf = plugin_conf + + self.check_module(conf, path, contents) + else: + raise Exception('unsupported extension: %s' % extension) + + def check_yaml(self, conf, path, contents): + """ + :type conf: YamlLintConfig + :type path: str + :type contents: str + """ + self.check_parsable(path, contents) + self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)] + + def check_module(self, conf, path, contents): + """ + :type conf: YamlLintConfig + :type path: str + :type contents: str + """ + docs = self.get_module_docs(path, contents) + + for key, value in docs.items(): + yaml_data = value['yaml'] + lineno = value['lineno'] + fmt = value['fmt'] + + if fmt != 'yaml': + continue + + if yaml_data.startswith('\n'): + yaml_data = yaml_data[1:] + lineno += 1 + + self.check_parsable(path, yaml_data, lineno) + + messages = list(linter.run(yaml_data, conf, path)) + + self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages] + + def check_parsable(self, path, contents, lineno=1): + """ + :type path: str + :type contents: str + :type lineno: int + """ + try: + yaml.load(contents, Loader=TestLoader) + except MarkedYAMLError as e: + self.messages += [{'code': 'unparsable-with-libyaml', + 'message': '%s - %s' % (e.args[0], e.args[2]), + 'path': path, + 'line': e.problem_mark.line + lineno, + 'column': e.problem_mark.column + 1, + 'level': 'error', + }] + + @staticmethod + def result_to_message(result, path, line_offset=0, prefix=''): + """ + :type result: any + :type path: str + :type line_offset: int + :type prefix: str + :rtype: dict[str, any] + """ + if prefix: + prefix = '%s: ' % prefix + + return dict( + code=result.rule or result.level, + message=prefix + result.desc, + path=path, + line=result.line + line_offset, + column=result.column, + level=result.level, + ) + + def get_module_docs(self, path, contents): + """ + :type path: str + :type contents: str + :rtype: dict[str, any] + """ + module_doc_types = [ + 'DOCUMENTATION', + 'EXAMPLES', + 'RETURN', + ] + + docs = {} + + fmt_re = re.compile(r'^# fmt:\s+(\S+)') + + def check_assignment(statement, doc_types=None): + """Check the given statement for a documentation assignment.""" + for target in statement.targets: + if not isinstance(target, ast.Name): + continue + + if doc_types and target.id not in doc_types: + continue + + fmt_match = fmt_re.match(statement.value.s.lstrip()) + fmt = 'yaml' + if fmt_match: + fmt = fmt_match.group(1) + + docs[target.id] = dict( + yaml=statement.value.s, + lineno=statement.lineno, + end_lineno=statement.lineno + len(statement.value.s.splitlines()), + fmt=fmt.lower(), + ) + + module_ast = self.parse_module(path, contents) + + if not module_ast: + return {} + + is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/') + is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/') + + if is_plugin and not is_doc_fragment: + for body_statement in module_ast.body: + if isinstance(body_statement, ast.Assign): + check_assignment(body_statement, module_doc_types) + elif is_doc_fragment: + for body_statement in module_ast.body: + if isinstance(body_statement, ast.ClassDef): + for class_statement in body_statement.body: + if isinstance(class_statement, ast.Assign): + check_assignment(class_statement) + else: + raise Exception('unsupported path: %s' % path) + + return docs + + def parse_module(self, path, contents): + """ + :type path: str + :type contents: str + :rtype: ast.Module | None + """ + try: + return ast.parse(contents) + except SyntaxError as ex: + self.messages.append(dict( + code='python-syntax-error', + message=str(ex), + path=path, + line=ex.lineno, + column=ex.offset, + level='error', + )) + except Exception as ex: # pylint: disable=broad-except + self.messages.append(dict( + code='python-parse-error', + message=str(ex), + path=path, + line=0, + column=0, + level='error', + )) + + return None + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/tools/collection_detail.py b/test/lib/ansible_test/_util/controller/tools/collection_detail.py new file mode 100644 index 0000000000..e7c883ca01 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/collection_detail.py @@ -0,0 +1,95 @@ +"""Retrieve collection detail.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json +import os +import re +import sys + +import yaml + + +# See semantic versioning specification (https://semver.org/) +NUMERIC_IDENTIFIER = r'(?:0|[1-9][0-9]*)' +ALPHANUMERIC_IDENTIFIER = r'(?:[0-9]*[a-zA-Z-][a-zA-Z0-9-]*)' + +PRE_RELEASE_IDENTIFIER = r'(?:' + NUMERIC_IDENTIFIER + r'|' + ALPHANUMERIC_IDENTIFIER + r')' +BUILD_IDENTIFIER = r'[a-zA-Z0-9-]+' # equivalent to r'(?:[0-9]+|' + ALPHANUMERIC_IDENTIFIER + r')' + +VERSION_CORE = NUMERIC_IDENTIFIER + r'\.' + NUMERIC_IDENTIFIER + r'\.' + NUMERIC_IDENTIFIER +PRE_RELEASE = r'(?:-' + PRE_RELEASE_IDENTIFIER + r'(?:\.' + PRE_RELEASE_IDENTIFIER + r')*)?' +BUILD = r'(?:\+' + BUILD_IDENTIFIER + r'(?:\.' + BUILD_IDENTIFIER + r')*)?' + +SEMVER_REGULAR_EXPRESSION = r'^' + VERSION_CORE + PRE_RELEASE + BUILD + r'$' + + +def validate_version(version): + """Raise exception if the provided version is not None or a valid semantic version.""" + if version is None: + return + if not re.match(SEMVER_REGULAR_EXPRESSION, version): + raise Exception('Invalid version number "{0}". Collection version numbers must ' + 'follow semantic versioning (https://semver.org/).'.format(version)) + + +def read_manifest_json(collection_path): + """Return collection information from the MANIFEST.json file.""" + manifest_path = os.path.join(collection_path, 'MANIFEST.json') + + if not os.path.exists(manifest_path): + return None + + try: + with open(manifest_path) as manifest_file: + manifest = json.load(manifest_file) + + collection_info = manifest.get('collection_info') or dict() + + result = dict( + version=collection_info.get('version'), + ) + validate_version(result['version']) + except Exception as ex: # pylint: disable=broad-except + raise Exception('{0}: {1}'.format(os.path.basename(manifest_path), ex)) + + return result + + +def read_galaxy_yml(collection_path): + """Return collection information from the galaxy.yml file.""" + galaxy_path = os.path.join(collection_path, 'galaxy.yml') + + if not os.path.exists(galaxy_path): + return None + + try: + with open(galaxy_path) as galaxy_file: + galaxy = yaml.safe_load(galaxy_file) + + result = dict( + version=galaxy.get('version'), + ) + validate_version(result['version']) + except Exception as ex: # pylint: disable=broad-except + raise Exception('{0}: {1}'.format(os.path.basename(galaxy_path), ex)) + + return result + + +def main(): + """Retrieve collection detail.""" + collection_path = sys.argv[1] + + try: + result = read_manifest_json(collection_path) or read_galaxy_yml(collection_path) or dict() + except Exception as ex: # pylint: disable=broad-except + result = dict( + error='{0}'.format(ex), + ) + + print(json.dumps(result)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 new file mode 100644 index 0000000000..83c27ff73c --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/coverage_stub.ps1 @@ -0,0 +1,38 @@ +<# +.SYNOPSIS +Gets the lines to hit from a sourcefile for coverage stubs. +#> +[CmdletBinding()] +param ( + [Parameter(Mandatory, ValueFromRemainingArguments)] + [String[]] + $Path +) + +$stubInfo = @(foreach ($sourcePath in $Path) { + # Default is to just no lines for missing files + [Collections.Generic.HashSet[int]]$lines = @() + + if (Test-Path -LiteralPath $sourcePath) { + $code = [ScriptBlock]::Create([IO.File]::ReadAllText($sourcePath)) + + # We set our breakpoints with this predicate so our stubs should match + # that logic. + $predicate = { + $args[0] -is [System.Management.Automation.Language.CommandBaseAst] + } + $cmds = $code.Ast.FindAll($predicate, $true) + + # We only care about unique lines not multiple commands on 1 line. + $lines = @(foreach ($cmd in $cmds) { + $cmd.Extent.StartLineNumber + }) + } + + [PSCustomObject]@{ + Path = $sourcePath + Lines = $lines + } +}) + +ConvertTo-Json -InputObject $stubInfo -Depth 2 -Compress diff --git a/test/lib/ansible_test/_util/controller/tools/quiet_pip.py b/test/lib/ansible_test/_util/controller/tools/quiet_pip.py new file mode 100644 index 0000000000..e1bb824646 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/quiet_pip.py @@ -0,0 +1,75 @@ +"""Custom entry-point for pip that filters out unwanted logging and warnings.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import logging +import re +import runpy +import warnings + +BUILTIN_FILTERER_FILTER = logging.Filterer.filter + +LOGGING_MESSAGE_FILTER = re.compile("^(" + ".*Running pip install with root privileges is generally not a good idea.*|" # custom Fedora patch [1] + "DEPRECATION: Python 2.7 will reach the end of its life .*|" # pip 19.2.3 + "Ignoring .*: markers .* don't match your environment|" + "Looking in indexes: .*|" # pypi-test-container + "Requirement already satisfied.*" + ")$") + +# [1] https://src.fedoraproject.org/rpms/python-pip/blob/master/f/emit-a-warning-when-running-with-root-privileges.patch + +WARNING_MESSAGE_FILTERS = ( + # DEPRECATION: Python 2.6 is no longer supported by the Python core team, please upgrade your Python. + # A future version of pip will drop support for Python 2.6 + 'Python 2.6 is no longer supported by the Python core team, ', + + # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:137: InsecurePlatformWarning: + # A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. + # You can upgrade to a newer version of Python to solve this. + # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings + 'A true SSLContext object is not available. ', + + # {path}/python2.6/lib/python2.6/site-packages/pip/_vendor/urllib3/util/ssl_.py:339: SNIMissingWarning: + # An HTTPS request has been made, but the SNI (Subject Name Indication) extension to TLS is not available on this platform. + # This may cause the server to present an incorrect TLS certificate, which can cause validation failures. + # You can upgrade to a newer version of Python to solve this. + # For more information, see https://urllib3.readthedocs.io/en/latest/advanced-usage.html#ssl-warnings + 'An HTTPS request has been made, but the SNI ', + + # DEPRECATION: Python 2.7 reached the end of its life on January 1st, 2020. Please upgrade your Python as Python 2.7 is no longer maintained. + # pip 21.0 will drop support for Python 2.7 in January 2021. + # More details about Python 2 support in pip, can be found at https://pip.pypa.io/en/latest/development/release-process/#python-2-support + 'DEPRECATION: Python 2.7 reached the end of its life ', + + # DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained. + # pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality. + 'DEPRECATION: Python 3.5 reached the end of its life ', +) + + +def custom_filterer_filter(self, record): + """Globally omit logging of unwanted messages.""" + if LOGGING_MESSAGE_FILTER.search(record.getMessage()): + return 0 + + return BUILTIN_FILTERER_FILTER(self, record) + + +def main(): + """Main program entry point.""" + # Filtering logging output globally avoids having to intercept stdout/stderr. + # It also avoids problems with loss of color output and mixing up the order of stdout/stderr messages. + logging.Filterer.filter = custom_filterer_filter + + for message_filter in WARNING_MESSAGE_FILTERS: + # Setting filterwarnings in code is necessary because of the following: + # Python 2.6 does not support the PYTHONWARNINGS environment variable. It does support the -W option. + # Python 2.7 cannot use the -W option to match warning text after a colon. This makes it impossible to match specific warning messages. + warnings.filterwarnings('ignore', message_filter) + + runpy.run_module('pip.__main__', run_name='__main__', alter_sys=True) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/tools/sslcheck.py b/test/lib/ansible_test/_util/controller/tools/sslcheck.py new file mode 100755 index 0000000000..37b8227936 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/sslcheck.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +"""Show openssl version.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json + +# noinspection PyBroadException +try: + from ssl import OPENSSL_VERSION_INFO + VERSION = list(OPENSSL_VERSION_INFO[:3]) +except Exception: # pylint: disable=broad-except + VERSION = None + + +def main(): + """Main program entry point.""" + print(json.dumps(dict( + version=VERSION, + ))) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/controller/tools/versions.py b/test/lib/ansible_test/_util/controller/tools/versions.py new file mode 100755 index 0000000000..4babef0162 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/versions.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python +"""Show python and pip versions.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys +import warnings + +warnings.simplefilter('ignore') # avoid python version deprecation warnings when using newer pip dependencies + +try: + import pip +except ImportError: + pip = None + +print(sys.version) + +if pip: + print('pip %s from %s' % (pip.__version__, os.path.dirname(pip.__file__))) diff --git a/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py new file mode 100755 index 0000000000..0c8f768034 --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/virtualenvcheck.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +"""Detect the real python interpreter when running in a virtual environment created by the 'virtualenv' module.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json + +try: + from sys import real_prefix +except ImportError: + real_prefix = None + +print(json.dumps(dict( + real_prefix=real_prefix, +))) diff --git a/test/lib/ansible_test/_util/controller/tools/yamlcheck.py b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py new file mode 100755 index 0000000000..591842f4ad --- /dev/null +++ b/test/lib/ansible_test/_util/controller/tools/yamlcheck.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +"""Show python and pip versions.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json + +try: + import yaml +except ImportError: + yaml = None + +try: + from yaml import CLoader +except ImportError: + CLoader = None + +print(json.dumps(dict( + yaml=bool(yaml), + cloader=bool(CLoader), +))) diff --git a/test/lib/ansible_test/_util/target/injector/ansible b/test/lib/ansible_test/_util/target/injector/ansible new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-config b/test/lib/ansible_test/_util/target/injector/ansible-config new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-config @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-connection b/test/lib/ansible_test/_util/target/injector/ansible-connection new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-connection @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-console b/test/lib/ansible_test/_util/target/injector/ansible-console new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-console @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-doc b/test/lib/ansible_test/_util/target/injector/ansible-doc new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-doc @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-galaxy b/test/lib/ansible_test/_util/target/injector/ansible-galaxy new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-galaxy @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-inventory b/test/lib/ansible_test/_util/target/injector/ansible-inventory new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-inventory @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-playbook b/test/lib/ansible_test/_util/target/injector/ansible-playbook new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-playbook @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-pull b/test/lib/ansible_test/_util/target/injector/ansible-pull new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-pull @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-test b/test/lib/ansible_test/_util/target/injector/ansible-test new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-test @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/ansible-vault b/test/lib/ansible_test/_util/target/injector/ansible-vault new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/ansible-vault @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/importer.py b/test/lib/ansible_test/_util/target/injector/importer.py new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/importer.py @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/pytest b/test/lib/ansible_test/_util/target/injector/pytest new file mode 120000 index 0000000000..6bbbfe4d91 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/pytest @@ -0,0 +1 @@ +python.py \ No newline at end of file diff --git a/test/lib/ansible_test/_util/target/injector/python.py b/test/lib/ansible_test/_util/target/injector/python.py new file mode 100755 index 0000000000..1063d1f011 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/python.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +"""Provides an entry point for python scripts and python modules on the controller with the current python interpreter and optional code coverage collection.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + + +def main(): + """Main entry point.""" + name = os.path.basename(__file__) + args = [sys.executable] + + coverage_config = os.environ.get('COVERAGE_CONF') + coverage_output = os.environ.get('COVERAGE_FILE') + + if coverage_config: + if coverage_output: + args += ['-m', 'coverage.__main__', 'run', '--rcfile', coverage_config] + else: + if sys.version_info >= (3, 4): + # noinspection PyUnresolvedReferences + import importlib.util + + # noinspection PyUnresolvedReferences + found = bool(importlib.util.find_spec('coverage')) + else: + # noinspection PyDeprecation + import imp # pylint: disable=deprecated-module + + try: + # noinspection PyDeprecation + imp.find_module('coverage') + found = True + except ImportError: + found = False + + if not found: + sys.exit('ERROR: Could not find `coverage` module. ' + 'Did you use a virtualenv created without --system-site-packages or with the wrong interpreter?') + + if name == 'python.py': + if sys.argv[1] == '-c': + # prevent simple misuse of python.py with -c which does not work with coverage + sys.exit('ERROR: Use `python -c` instead of `python.py -c` to avoid errors when code coverage is collected.') + elif name == 'pytest': + args += ['-m', 'pytest'] + else: + args += [find_executable(name)] + + args += sys.argv[1:] + + os.execv(args[0], args) + + +def find_executable(name): + """ + :type name: str + :rtype: str + """ + path = os.environ.get('PATH', os.path.defpath) + seen = set([os.path.abspath(__file__)]) + + for base in path.split(os.path.pathsep): + candidate = os.path.abspath(os.path.join(base, name)) + + if candidate in seen: + continue + + seen.add(candidate) + + if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK): + return candidate + + raise Exception('Executable "%s" not found in path: %s' % (name, path)) + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/target/injector/virtualenv.sh b/test/lib/ansible_test/_util/target/injector/virtualenv.sh new file mode 100644 index 0000000000..cb19a7ce47 --- /dev/null +++ b/test/lib/ansible_test/_util/target/injector/virtualenv.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +# Create and activate a fresh virtual environment with `source virtualenv.sh`. + +rm -rf "${OUTPUT_DIR}/venv" + +# Try to use 'venv' if it is available, then fallback to 'virtualenv' since some systems provide 'venv' although it is non-functional. +if [[ "${ANSIBLE_TEST_PYTHON_VERSION}" =~ ^2\. ]] || ! "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m venv --system-site-packages "${OUTPUT_DIR}/venv" > /dev/null 2>&1; then + rm -rf "${OUTPUT_DIR}/venv" + "${ANSIBLE_TEST_PYTHON_INTERPRETER}" -m virtualenv --system-site-packages --python "${ANSIBLE_TEST_PYTHON_INTERPRETER}" "${OUTPUT_DIR}/venv" +fi + +set +ux +source "${OUTPUT_DIR}/venv/bin/activate" +set -ux diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py new file mode 100644 index 0000000000..21c49c4730 --- /dev/null +++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/__init__.py @@ -0,0 +1,31 @@ +# (c) 2019 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# CAUTION: There are two implementations of the collection loader. +# They must be kept functionally identical, although their implementations may differ. +# +# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. +# It must function on all Python versions supported on the controller. +# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory. +# It must function on all Python versions supported on managed hosts which are not supported by the controller. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class? +from ._collection_config import AnsibleCollectionConfig +from ._collection_finder import AnsibleCollectionRef +from ansible.module_utils.common.text.converters import to_text + + +def resource_from_fqcr(ref): + """ + Return resource from a fully-qualified collection reference, + or from a simple resource name. + For fully-qualified collection references, this is equivalent to + ``AnsibleCollectionRef.from_fqcr(ref).resource``. + :param ref: collection reference to parse + :return: the resource as a unicode string + """ + ref = to_text(ref, errors='strict') + return ref.split(u'.')[-1] diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py new file mode 100644 index 0000000000..a20319319c --- /dev/null +++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_config.py @@ -0,0 +1,107 @@ +# (c) 2019 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# CAUTION: There are two implementations of the collection loader. +# They must be kept functionally identical, although their implementations may differ. +# +# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. +# It must function on all Python versions supported on the controller. +# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory. +# It must function on all Python versions supported on managed hosts which are not supported by the controller. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +from ansible.module_utils.common.text.converters import to_text +from ansible.module_utils.six import with_metaclass + + +class _EventSource: + def __init__(self): + self._handlers = set() + + def __iadd__(self, handler): + if not callable(handler): + raise ValueError('handler must be callable') + self._handlers.add(handler) + return self + + def __isub__(self, handler): + try: + self._handlers.remove(handler) + except KeyError: + pass + + return self + + def _on_exception(self, handler, exc, *args, **kwargs): + # if we return True, we want the caller to re-raise + return True + + def fire(self, *args, **kwargs): + for h in self._handlers: + try: + h(*args, **kwargs) + except Exception as ex: + if self._on_exception(h, ex, *args, **kwargs): + raise + + +class _AnsibleCollectionConfig(type): + def __init__(cls, meta, name, bases): + cls._collection_finder = None + cls._default_collection = None + cls._on_collection_load = _EventSource() + + @property + def collection_finder(cls): + return cls._collection_finder + + @collection_finder.setter + def collection_finder(cls, value): + if cls._collection_finder: + raise ValueError('an AnsibleCollectionFinder has already been configured') + + cls._collection_finder = value + + @property + def collection_paths(cls): + cls._require_finder() + return [to_text(p) for p in cls._collection_finder._n_collection_paths] + + @property + def default_collection(cls): + return cls._default_collection + + @default_collection.setter + def default_collection(cls, value): + + cls._default_collection = value + + @property + def on_collection_load(cls): + return cls._on_collection_load + + @on_collection_load.setter + def on_collection_load(cls, value): + if value is not cls._on_collection_load: + raise ValueError('on_collection_load is not directly settable (use +=)') + + @property + def playbook_paths(cls): + cls._require_finder() + return [to_text(p) for p in cls._collection_finder._n_playbook_paths] + + @playbook_paths.setter + def playbook_paths(cls, value): + cls._require_finder() + cls._collection_finder.set_playbook_paths(value) + + def _require_finder(cls): + if not cls._collection_finder: + raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process') + + +# concrete class of our metaclass type that defines the class properties we want +class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)): + pass diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py new file mode 100644 index 0000000000..8b4b1b98d4 --- /dev/null +++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_finder.py @@ -0,0 +1,1067 @@ +# (c) 2019 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# CAUTION: There are two implementations of the collection loader. +# They must be kept functionally identical, although their implementations may differ. +# +# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. +# It must function on all Python versions supported on the controller. +# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory. +# It must function on all Python versions supported on managed hosts which are not supported by the controller. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import os.path +import pkgutil +import re +import sys +from keyword import iskeyword +from tokenize import Name as _VALID_IDENTIFIER_REGEX + + +# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity) +# that only allow stdlib and module_utils +from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes +from ansible.module_utils.six import string_types, PY3 +from ._collection_config import AnsibleCollectionConfig + +from contextlib import contextmanager +from types import ModuleType + +try: + from importlib import import_module +except ImportError: + def import_module(name): + __import__(name) + return sys.modules[name] + +try: + from importlib import reload as reload_module +except ImportError: + # 2.7 has a global reload function instead... + reload_module = reload # pylint:disable=undefined-variable + +# NB: this supports import sanity test providing a different impl +try: + from ._collection_meta import _meta_yml_to_dict +except ImportError: + _meta_yml_to_dict = None + + +if not hasattr(__builtins__, 'ModuleNotFoundError'): + # this was introduced in Python 3.6 + ModuleNotFoundError = ImportError + + +_VALID_IDENTIFIER_STRING_REGEX = re.compile( + ''.join((_VALID_IDENTIFIER_REGEX, r'\Z')), +) + + +try: # NOTE: py3/py2 compat + # py2 mypy can't deal with try/excepts + is_python_identifier = str.isidentifier # type: ignore[attr-defined] +except AttributeError: # Python 2 + def is_python_identifier(tested_str): # type: (str) -> bool + """Determine whether the given string is a Python identifier.""" + # Ref: https://stackoverflow.com/a/55802320/595220 + return bool(re.match(_VALID_IDENTIFIER_STRING_REGEX, tested_str)) + + +PB_EXTENSIONS = ('.yml', '.yaml') + + +class _AnsibleCollectionFinder: + def __init__(self, paths=None, scan_sys_paths=True): + # TODO: accept metadata loader override + self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__))) + + if isinstance(paths, string_types): + paths = [paths] + elif paths is None: + paths = [] + + # expand any placeholders in configured paths + paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths] + + # add syspaths if needed + if scan_sys_paths: + paths.extend(sys.path) + + good_paths = [] + # expand any placeholders in configured paths + for p in paths: + + # ensure we always have ansible_collections + if os.path.basename(p) == 'ansible_collections': + p = os.path.dirname(p) + + if p not in good_paths and os.path.isdir(to_bytes(os.path.join(p, 'ansible_collections'), errors='surrogate_or_strict')): + good_paths.append(p) + + self._n_configured_paths = good_paths + self._n_cached_collection_paths = None + self._n_cached_collection_qualified_paths = None + + self._n_playbook_paths = [] + + @classmethod + def _remove(cls): + for mps in sys.meta_path: + if isinstance(mps, _AnsibleCollectionFinder): + sys.meta_path.remove(mps) + + # remove any path hooks that look like ours + for ph in sys.path_hooks: + if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder): + sys.path_hooks.remove(ph) + + # zap any cached path importer cache entries that might refer to us + sys.path_importer_cache.clear() + + AnsibleCollectionConfig._collection_finder = None + + # validate via the public property that we really killed it + if AnsibleCollectionConfig.collection_finder is not None: + raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder') + + def _install(self): + self._remove() + sys.meta_path.insert(0, self) + + sys.path_hooks.insert(0, self._ansible_collection_path_hook) + + AnsibleCollectionConfig.collection_finder = self + + def _ansible_collection_path_hook(self, path): + path = to_native(path) + interesting_paths = self._n_cached_collection_qualified_paths + if not interesting_paths: + interesting_paths = [] + for p in self._n_collection_paths: + if os.path.basename(p) != 'ansible_collections': + p = os.path.join(p, 'ansible_collections') + + if p not in interesting_paths: + interesting_paths.append(p) + + interesting_paths.insert(0, self._ansible_pkg_path) + self._n_cached_collection_qualified_paths = interesting_paths + + if any(path.startswith(p) for p in interesting_paths): + return _AnsiblePathHookFinder(self, path) + + raise ImportError('not interested') + + @property + def _n_collection_paths(self): + paths = self._n_cached_collection_paths + if not paths: + self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths + return paths + + def set_playbook_paths(self, playbook_paths): + if isinstance(playbook_paths, string_types): + playbook_paths = [playbook_paths] + + # track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins) + added_paths = set() + + # de-dupe + self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))] + self._n_cached_collection_paths = None + # HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up. + # NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init + # to prevent this from occurring + for pkg in ['ansible_collections', 'ansible_collections.ansible']: + self._reload_hack(pkg) + + def _reload_hack(self, fullname): + m = sys.modules.get(fullname) + if not m: + return + reload_module(m) + + def find_module(self, fullname, path=None): + # Figure out what's being asked for, and delegate to a special-purpose loader + + split_name = fullname.split('.') + toplevel_pkg = split_name[0] + module_to_find = split_name[-1] + part_count = len(split_name) + + if toplevel_pkg not in ['ansible', 'ansible_collections']: + # not interested in anything other than ansible_collections (and limited cases under ansible) + return None + + # sanity check what we're getting from import, canonicalize path values + if part_count == 1: + if path: + raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname)) + else: + # seed the path to the configured collection roots + path = self._n_collection_paths + + if part_count > 1 and path is None: + raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname)) + + # NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found + try: + if toplevel_pkg == 'ansible': + # something under the ansible package, delegate to our internal loader in case of redirections + return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path) + if part_count == 1: + return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path) + if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens + return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path) + elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll + return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path) + # anything below the collection + return _AnsibleCollectionLoader(fullname=fullname, path_list=path) + except ImportError: + # TODO: log attempt to load context + return None + + +# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually +# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except +# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet. +class _AnsiblePathHookFinder: + def __init__(self, collection_finder, pathctx): + # when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context + self._pathctx = to_native(pathctx) + self._collection_finder = collection_finder + if PY3: + # cache the native FileFinder (take advantage of its filesystem cache for future find/load requests) + self._file_finder = None + + # class init is fun- this method has a self arg that won't get used + def _get_filefinder_path_hook(self=None): + _file_finder_hook = None + if PY3: + # try to find the FileFinder hook to call for fallback path-based imports in Py3 + _file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)] + if len(_file_finder_hook) != 1: + raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook))) + _file_finder_hook = _file_finder_hook[0] + + return _file_finder_hook + + _filefinder_path_hook = _get_filefinder_path_hook() + + def find_module(self, fullname, path=None): + # we ignore the passed in path here- use what we got from the path hook init + split_name = fullname.split('.') + toplevel_pkg = split_name[0] + + if toplevel_pkg == 'ansible_collections': + # collections content? delegate to the collection finder + return self._collection_finder.find_module(fullname, path=[self._pathctx]) + else: + # Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader + # behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test + # scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and + # it's too late by the time we've reached this point, but also too expensive for the path_hook to figure + # out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the + # normal path-based loader as best we can to service it. This also allows us to take advantage of Python's + # built-in FS caching and byte-compilation for most things. + if PY3: + # create or consult our cached file finder for this path + if not self._file_finder: + try: + self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx) + except ImportError: + # FUTURE: log at a high logging level? This is normal for things like python36.zip on the path, but + # might not be in some other situation... + return None + + spec = self._file_finder.find_spec(fullname) + if not spec: + return None + return spec.loader + else: + # call py2's internal loader + # noinspection PyDeprecation + return pkgutil.ImpImporter(self._pathctx).find_module(fullname) # pylint: disable=deprecated-class + + def iter_modules(self, prefix): + # NB: this currently represents only what's on disk, and does not handle package redirection + return _iter_modules_impl([self._pathctx], prefix) + + def __repr__(self): + return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx) + + +class _AnsibleCollectionPkgLoaderBase: + _allows_package_code = False + + def __init__(self, fullname, path_list=None): + self._fullname = fullname + self._redirect_module = None + self._split_name = fullname.split('.') + self._rpart_name = fullname.rpartition('.') + self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel + self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens + + self._source_code_path = None + self._decoded_source = None + self._compiled_code = None + + self._validate_args() + + self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list]) + self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths) + + self._validate_final() + + # allow subclasses to validate args and sniff split values before we start digging around + def _validate_args(self): + if self._split_name[0] != 'ansible_collections': + raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname)) + + # allow subclasses to customize candidate path filtering + def _get_candidate_paths(self, path_list): + return [os.path.join(p, self._package_to_load) for p in path_list] + + # allow subclasses to customize finding paths + def _get_subpackage_search_paths(self, candidate_paths): + # filter candidate paths for existence (NB: silently ignoring package init code and same-named modules) + return [p for p in candidate_paths if os.path.isdir(to_bytes(p))] + + # allow subclasses to customize state validation/manipulation before we return the loader instance + def _validate_final(self): + return + + @staticmethod + @contextmanager + def _new_or_existing_module(name, **kwargs): + # handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior + created_module = False + module = sys.modules.get(name) + try: + if not module: + module = ModuleType(name) + created_module = True + sys.modules[name] = module + # always override the values passed, except name (allow reference aliasing) + for attr, value in kwargs.items(): + setattr(module, attr, value) + yield module + except Exception: + if created_module: + if sys.modules.get(name): + sys.modules.pop(name) + raise + + # basic module/package location support + # NB: this does not support distributed packages! + @staticmethod + def _module_file_from_path(leaf_name, path): + has_code = True + package_path = os.path.join(to_native(path), to_native(leaf_name)) + module_path = None + + # if the submodule is a package, assemble valid submodule paths, but stop looking for a module + if os.path.isdir(to_bytes(package_path)): + # is there a package init? + module_path = os.path.join(package_path, '__init__.py') + if not os.path.isfile(to_bytes(module_path)): + module_path = os.path.join(package_path, '__synthetic__') + has_code = False + else: + module_path = package_path + '.py' + package_path = None + if not os.path.isfile(to_bytes(module_path)): + raise ImportError('{0} not found at {1}'.format(leaf_name, path)) + + return module_path, has_code, package_path + + def load_module(self, fullname): + # short-circuit redirect; we've already imported the redirected module, so just alias it and return it + if self._redirect_module: + sys.modules[self._fullname] = self._redirect_module + return self._redirect_module + + # we're actually loading a module/package + module_attrs = dict( + __loader__=self, + __file__=self.get_filename(fullname), + __package__=self._parent_package_name # sane default for non-packages + ) + + # eg, I am a package + if self._subpackage_search_paths is not None: # empty is legal + module_attrs['__path__'] = self._subpackage_search_paths + module_attrs['__package__'] = fullname # per PEP366 + + with self._new_or_existing_module(fullname, **module_attrs) as module: + # execute the module's code in its namespace + code_obj = self.get_code(fullname) + if code_obj is not None: # things like NS packages that can't have code on disk will return None + exec(code_obj, module.__dict__) + + return module + + def is_package(self, fullname): + if fullname != self._fullname: + raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname)) + return self._subpackage_search_paths is not None + + def get_source(self, fullname): + if self._decoded_source: + return self._decoded_source + if fullname != self._fullname: + raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname)) + if not self._source_code_path: + return None + # FIXME: what do we want encoding/newline requirements to be? + self._decoded_source = self.get_data(self._source_code_path) + return self._decoded_source + + def get_data(self, path): + if not path: + raise ValueError('a path must be specified') + + # TODO: ensure we're being asked for a path below something we own + # TODO: try to handle redirects internally? + + if not path[0] == '/': + # relative to current package, search package paths if possible (this may not be necessary) + # candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths] + raise ValueError('relative resource paths not supported') + else: + candidate_paths = [path] + + for p in candidate_paths: + b_path = to_bytes(p) + if os.path.isfile(b_path): + with open(b_path, 'rb') as fd: + return fd.read() + # HACK: if caller asks for __init__.py and the parent dir exists, return empty string (this keep consistency + # with "collection subpackages don't require __init__.py" working everywhere with get_data + elif b_path.endswith(b'__init__.py') and os.path.isdir(os.path.dirname(b_path)): + return '' + + return None + + def _synthetic_filename(self, fullname): + return '' + + def get_filename(self, fullname): + if fullname != self._fullname: + raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname)) + + filename = self._source_code_path + + if not filename and self.is_package(fullname): + if len(self._subpackage_search_paths) == 1: + filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__') + else: + filename = self._synthetic_filename(fullname) + + return filename + + def get_code(self, fullname): + if self._compiled_code: + return self._compiled_code + + # this may or may not be an actual filename, but it's the value we'll use for __file__ + filename = self.get_filename(fullname) + if not filename: + filename = '' + + source_code = self.get_source(fullname) + + # for things like synthetic modules that really have no source on disk, don't return a code object at all + # vs things like an empty package init (which has an empty string source on disk) + if source_code is None: + return None + + self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True) + + return self._compiled_code + + def iter_modules(self, prefix): + return _iter_modules_impl(self._subpackage_search_paths, prefix) + + def __repr__(self): + return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path) + + +class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase): + def _validate_args(self): + super(_AnsibleCollectionRootPkgLoader, self)._validate_args() + if len(self._split_name) != 1: + raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname)) + + +# Implements Ansible's custom namespace package support. +# The ansible_collections package and one level down (collections namespaces) are Python namespace packages +# that search across all configured collection roots. The collection package (two levels down) is the first one found +# on the configured collection root path, and Python namespace package aggregation is not allowed at or below +# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored +# by this loader. +class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase): + def _validate_args(self): + super(_AnsibleCollectionNSPkgLoader, self)._validate_args() + if len(self._split_name) != 2: + raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname)) + + def _validate_final(self): + # special-case the `ansible` namespace, since `ansible.builtin` is magical + if not self._subpackage_search_paths and self._package_to_load != 'ansible': + raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths)) + + +# handles locating the actual collection package and associated metadata +class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase): + def _validate_args(self): + super(_AnsibleCollectionPkgLoader, self)._validate_args() + if len(self._split_name) != 3: + raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname)) + + def _validate_final(self): + if self._split_name[1:3] == ['ansible', 'builtin']: + # we don't want to allow this one to have on-disk search capability + self._subpackage_search_paths = [] + elif not self._subpackage_search_paths: + raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths)) + else: + # only search within the first collection we found + self._subpackage_search_paths = [self._subpackage_search_paths[0]] + + def load_module(self, fullname): + if not _meta_yml_to_dict: + raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set') + + module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname) + + module._collection_meta = {} + # TODO: load collection metadata, cache in __loader__ state + + collection_name = '.'.join(self._split_name[1:3]) + + if collection_name == 'ansible.builtin': + # ansible.builtin is a synthetic collection, get its routing config from the Ansible distro + ansible_pkg_path = os.path.dirname(import_module('ansible').__file__) + metadata_path = os.path.join(ansible_pkg_path, 'config/ansible_builtin_runtime.yml') + with open(to_bytes(metadata_path), 'rb') as fd: + raw_routing = fd.read() + else: + b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml')) + if os.path.isfile(b_routing_meta_path): + with open(b_routing_meta_path, 'rb') as fd: + raw_routing = fd.read() + else: + raw_routing = '' + try: + if raw_routing: + routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml')) + module._collection_meta = self._canonicalize_meta(routing_dict) + except Exception as ex: + raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex))) + + AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__)) + + return module + + def _canonicalize_meta(self, meta_dict): + # TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection) + # OR we could do it all on the fly? + # if not meta_dict: + # return {} + # + # ns_name = '.'.join(self._split_name[0:2]) + # collection_name = '.'.join(self._split_name[0:3]) + # + # # + # for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})): + # for plugin_key, plugin_dict in iteritems(routing_type_dict): + # redirect = plugin_dict.get('redirect', '') + # if redirect.startswith('..'): + # redirect = redirect[2:] + + return meta_dict + + +# loads everything under a collection, including handling redirections defined by the collection +class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase): + # HACK: stash this in a better place + _redirected_package_map = {} + _allows_package_code = True + + def _validate_args(self): + super(_AnsibleCollectionLoader, self)._validate_args() + if len(self._split_name) < 4: + raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname)) + + def _get_candidate_paths(self, path_list): + if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']: + raise ValueError('this loader requires exactly one path to search') + + return path_list + + def _get_subpackage_search_paths(self, candidate_paths): + collection_name = '.'.join(self._split_name[1:3]) + collection_meta = _get_collection_metadata(collection_name) + + # check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!) + redirect = None + explicit_redirect = False + + routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname]) + if routing_entry: + redirect = routing_entry.get('redirect') + + if redirect: + explicit_redirect = True + else: + redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname) + + # NB: package level redirection requires hooking all future imports beneath the redirected source package + # in order to ensure sanity on future relative imports. We always import everything under its "real" name, + # then add a sys.modules entry with the redirected name using the same module instance. If we naively imported + # the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module + # (one for each name), and relative imports that ascend above the redirected package would break (since they'd + # see the redirected ancestor package contents instead of the package where they actually live). + if redirect: + # FIXME: wrap this so we can be explicit about a failed redirection + self._redirect_module = import_module(redirect) + if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__: + # if the import target looks like a package, store its name so we can rewrite future descendent loads + self._redirected_package_map[self._fullname] = redirect + + # if we redirected, don't do any further custom package logic + return None + + # we're not doing a redirect- try to find what we need to actually load a module/package + + # this will raise ImportError if we can't find the requested module/package at all + if not candidate_paths: + # noplace to look, just ImportError + raise ImportError('package has no paths') + + found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0]) + + # still here? we found something to load... + if has_code: + self._source_code_path = found_path + + if package_path: + return [package_path] # always needs to be a list + + return None + + +# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later +# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur) +class _AnsibleInternalRedirectLoader: + def __init__(self, fullname, path_list): + self._redirect = None + + split_name = fullname.split('.') + toplevel_pkg = split_name[0] + module_to_load = split_name[-1] + + if toplevel_pkg != 'ansible': + raise ImportError('not interested') + + builtin_meta = _get_collection_metadata('ansible.builtin') + + routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname]) + if routing_entry: + self._redirect = routing_entry.get('redirect') + + if not self._redirect: + raise ImportError('not redirected, go ask path_hook') + + def load_module(self, fullname): + # since we're delegating to other loaders, this should only be called for internal redirects where we answered + # find_module with this loader, in which case we'll just directly import the redirection target, insert it into + # sys.modules under the name it was requested by, and return the original module. + + # should never see this + if not self._redirect: + raise ValueError('no redirect found for {0}'.format(fullname)) + + # FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect + mod = import_module(self._redirect) + sys.modules[fullname] = mod + return mod + + +class AnsibleCollectionRef: + # FUTURE: introspect plugin loaders to get these dynamically? + VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection', + 'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup', + 'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy', + 'terminal', 'test', 'vars', 'playbook']) + + # FIXME: tighten this up to match Python identifier reqs, etc + VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$')) + VALID_FQCR_RE = re.compile(to_text(r'^\w+(\.\w+){2,}$')) # can have 0-N included subdirs as well + + def __init__(self, collection_name, subdirs, resource, ref_type): + """ + Create an AnsibleCollectionRef from components + :param collection_name: a collection name of the form 'namespace.collectionname' + :param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2') + :param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role') + :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' + """ + collection_name = to_text(collection_name, errors='strict') + if subdirs is not None: + subdirs = to_text(subdirs, errors='strict') + resource = to_text(resource, errors='strict') + ref_type = to_text(ref_type, errors='strict') + + if not self.is_valid_collection_name(collection_name): + raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name))) + + if ref_type not in self.VALID_REF_TYPES: + raise ValueError('invalid collection ref_type: {0}'.format(ref_type)) + + self.collection = collection_name + if subdirs: + if not re.match(self.VALID_SUBDIRS_RE, subdirs): + raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs))) + self.subdirs = subdirs + else: + self.subdirs = u'' + + self.resource = resource + self.ref_type = ref_type + + package_components = [u'ansible_collections', self.collection] + fqcr_components = [self.collection] + + self.n_python_collection_package_name = to_native('.'.join(package_components)) + + if self.ref_type == u'role': + package_components.append(u'roles') + elif self.ref_type == u'playbook': + package_components.append(u'playbooks') + else: + # we assume it's a plugin + package_components += [u'plugins', self.ref_type] + + if self.subdirs: + package_components.append(self.subdirs) + fqcr_components.append(self.subdirs) + + if self.ref_type in (u'role', u'playbook'): + # playbooks and roles are their own resource + package_components.append(self.resource) + + fqcr_components.append(self.resource) + + self.n_python_package_name = to_native('.'.join(package_components)) + self._fqcr = u'.'.join(fqcr_components) + + def __repr__(self): + return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource) + + @property + def fqcr(self): + return self._fqcr + + @staticmethod + def from_fqcr(ref, ref_type): + """ + Parse a string as a fully-qualified collection reference, raises ValueError if invalid + :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') + :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' + :return: a populated AnsibleCollectionRef object + """ + # assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name), + # we split the resource name off the right, split ns and coll off the left, and we're left with any optional + # subdirs that need to be added back below the plugin-specific subdir we'll add. So: + # ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource + # ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource + # ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename + if not AnsibleCollectionRef.is_valid_fqcr(ref): + raise ValueError('{0} is not a valid collection reference'.format(to_native(ref))) + + ref = to_text(ref, errors='strict') + ref_type = to_text(ref_type, errors='strict') + ext = '' + + if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS): + resource_splitname = ref.rsplit(u'.', 2) + package_remnant = resource_splitname[0] + resource = resource_splitname[1] + ext = '.' + resource_splitname[2] + else: + resource_splitname = ref.rsplit(u'.', 1) + package_remnant = resource_splitname[0] + resource = resource_splitname[1] + + # split the left two components of the collection package name off, anything remaining is plugin-type + # specific subdirs to be added back on below the plugin type + package_splitname = package_remnant.split(u'.', 2) + if len(package_splitname) == 3: + subdirs = package_splitname[2] + else: + subdirs = u'' + + collection_name = u'.'.join(package_splitname[0:2]) + + return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type) + + @staticmethod + def try_parse_fqcr(ref, ref_type): + """ + Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error) + :param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') + :param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment' + :return: a populated AnsibleCollectionRef object on successful parsing, else None + """ + try: + return AnsibleCollectionRef.from_fqcr(ref, ref_type) + except ValueError: + pass + + @staticmethod + def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name): + """ + Utility method to convert from a PluginLoader dir name to a plugin ref_type + :param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library') + :return: the corresponding plugin ref_type (eg, 'action', 'role') + """ + legacy_plugin_dir_name = to_text(legacy_plugin_dir_name) + + plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'') + + if plugin_type == u'library': + plugin_type = u'modules' + + if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES: + raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name))) + + return plugin_type + + @staticmethod + def is_valid_fqcr(ref, ref_type=None): + """ + Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself) + :param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource') + :param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment' + :return: True if the collection ref passed is well-formed, False otherwise + """ + + ref = to_text(ref) + + if not ref_type: + return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref)) + + return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)) + + @staticmethod + def is_valid_collection_name(collection_name): + """ + Validates if the given string is a well-formed collection name (does not look up the collection itself) + :param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname') + :return: True if the collection name passed is well-formed, False otherwise + """ + + collection_name = to_text(collection_name) + + if collection_name.count(u'.') != 1: + return False + + return all( + # NOTE: keywords and identifiers are different in differnt Pythons + not iskeyword(ns_or_name) and is_python_identifier(ns_or_name) + for ns_or_name in collection_name.split(u'.') + ) + + +def _get_collection_playbook_path(playbook): + + acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook') + if acr: + try: + # get_collection_path + pkg = import_module(acr.n_python_collection_package_name) + except (IOError, ModuleNotFoundError) as e: + # leaving e as debug target, even though not used in normal code + pkg = None + + if pkg: + cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks')) + + if acr.subdirs: + paths = [to_native(x) for x in acr.subdirs.split(u'.')] + paths.insert(0, cpath) + cpath = os.path.join(*paths) + + path = os.path.join(cpath, to_native(acr.resource)) + if os.path.exists(to_bytes(path)): + return acr.resource, path, acr.collection + elif not acr.resource.endswith(PB_EXTENSIONS): + for ext in PB_EXTENSIONS: + path = os.path.join(cpath, to_native(acr.resource + ext)) + if os.path.exists(to_bytes(path)): + return acr.resource, path, acr.collection + return None + + +def _get_collection_role_path(role_name, collection_list=None): + return _get_collection_resource_path(role_name, u'role', collection_list) + + +def _get_collection_resource_path(name, ref_type, collection_list=None): + + if ref_type == u'playbook': + # they are handled a bit diff due to 'extension variance' and no collection_list + return _get_collection_playbook_path(name) + + acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type) + if acr: + # looks like a valid qualified collection ref; skip the collection_list + collection_list = [acr.collection] + subdirs = acr.subdirs + resource = acr.resource + elif not collection_list: + return None # not a FQ and no collection search list spec'd, nothing to do + else: + resource = name # treat as unqualified, loop through the collection search list to try and resolve + subdirs = '' + + for collection_name in collection_list: + try: + acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type) + # FIXME: error handling/logging; need to catch any import failures and move along + pkg = import_module(acr.n_python_package_name) + + if pkg is not None: + # the package is now loaded, get the collection's package and ask where it lives + path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict')) + return resource, to_text(path, errors='surrogate_or_strict'), collection_name + + except (IOError, ModuleNotFoundError) as e: + continue + except Exception as ex: + # FIXME: pick out typical import errors first, then error logging + continue + + return None + + +def _get_collection_name_from_path(path): + """ + Return the containing collection name for a given path, or None if the path is not below a configured collection, or + the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured + collection roots). + :param path: path to evaluate for collection containment + :return: collection name or None + """ + + # ensure we compare full paths since pkg path will be abspath + path = to_native(os.path.abspath(to_bytes(path))) + + path_parts = path.split('/') + if path_parts.count('ansible_collections') != 1: + return None + + ac_pos = path_parts.index('ansible_collections') + + # make sure it's followed by at least a namespace and collection name + if len(path_parts) < ac_pos + 3: + return None + + candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3]) + + try: + # we've got a name for it, now see if the path prefix matches what the loader sees + imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__))) + except ImportError: + return None + + # reassemble the original path prefix up the collection name, and it should match what we just imported. If not + # this is probably a collection root that's not configured. + + original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3]) + + imported_pkg_path = to_native(os.path.abspath(to_bytes(imported_pkg_path))) + if original_path_prefix != imported_pkg_path: + return None + + return candidate_collection_name + + +def _get_import_redirect(collection_meta_dict, fullname): + if not collection_meta_dict: + return None + + return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect']) + + +def _get_ancestor_redirect(redirected_package_map, fullname): + # walk the requested module's ancestor packages to see if any have been previously redirected + cur_pkg = fullname + while cur_pkg: + cur_pkg = cur_pkg.rpartition('.')[0] + ancestor_redirect = redirected_package_map.get(cur_pkg) + if ancestor_redirect: + # rewrite the prefix on fullname so we import the target first, then alias it + redirect = ancestor_redirect + fullname[len(cur_pkg):] + return redirect + return None + + +def _nested_dict_get(root_dict, key_list): + cur_value = root_dict + for key in key_list: + cur_value = cur_value.get(key) + if not cur_value: + return None + + return cur_value + + +def _iter_modules_impl(paths, prefix=''): + # NB: this currently only iterates what's on disk- redirected modules are not considered + if not prefix: + prefix = '' + else: + prefix = to_native(prefix) + # yield (module_loader, name, ispkg) for each module/pkg under path + # TODO: implement ignore/silent catch for unreadable? + for b_path in map(to_bytes, paths): + if not os.path.isdir(b_path): + continue + for b_basename in sorted(os.listdir(b_path)): + b_candidate_module_path = os.path.join(b_path, b_basename) + if os.path.isdir(b_candidate_module_path): + # exclude things that obviously aren't Python package dirs + # FIXME: this dir is adjustable in py3.8+, check for it + if b'.' in b_basename or b_basename == b'__pycache__': + continue + + # TODO: proper string handling? + yield prefix + to_native(b_basename), True + else: + # FIXME: match builtin ordering for package/dir/file, support compiled? + if b_basename.endswith(b'.py') and b_basename != b'__init__.py': + yield prefix + to_native(os.path.splitext(b_basename)[0]), False + + +def _get_collection_metadata(collection_name): + collection_name = to_native(collection_name) + if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2: + raise ValueError('collection_name must be a non-empty string of the form namespace.collection') + + try: + collection_pkg = import_module('ansible_collections.' + collection_name) + except ImportError: + raise ValueError('unable to locate collection {0}'.format(collection_name)) + + _collection_meta = getattr(collection_pkg, '_collection_meta', None) + + if _collection_meta is None: + raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name)) + + return _collection_meta diff --git a/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py new file mode 100644 index 0000000000..3a971978b7 --- /dev/null +++ b/test/lib/ansible_test/_util/target/legacy_collection_loader/_collection_meta.py @@ -0,0 +1,37 @@ +# (c) 2019 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +# CAUTION: There are two implementations of the collection loader. +# They must be kept functionally identical, although their implementations may differ. +# +# 1) The controller implementation resides in the "lib/ansible/utils/collection_loader/" directory. +# It must function on all Python versions supported on the controller. +# 2) The ansible-test implementation resides in the "test/lib/ansible_test/_util/target/legacy_collection_loader/" directory. +# It must function on all Python versions supported on managed hosts which are not supported by the controller. + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +try: + from collections.abc import Mapping # pylint: disable=ansible-bad-import-from +except ImportError: + from collections import Mapping # pylint: disable=ansible-bad-import-from,deprecated-class + +from ansible.module_utils.common.yaml import yaml_load + + +def _meta_yml_to_dict(yaml_string_data, content_id): + """ + Converts string YAML dictionary to a Python dictionary. This function may be monkeypatched to another implementation + by some tools (eg the import sanity test). + :param yaml_string_data: a bytes-ish YAML dictionary + :param content_id: a unique ID representing the content to allow other implementations to cache the output + :return: a Python dictionary representing the YAML dictionary content + """ + # NB: content_id is passed in, but not used by this implementation + routing_dict = yaml_load(yaml_string_data) + if not routing_dict: + routing_dict = {} + if not isinstance(routing_dict, Mapping): + raise ValueError('collection metadata must be an instance of Python Mapping') + return routing_dict diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py new file mode 100644 index 0000000000..c26971fc5f --- /dev/null +++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_collections.py @@ -0,0 +1,75 @@ +"""Enable unit testing of Ansible collections. PYTEST_DONT_REWRITE""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import os +import sys + +# set by ansible-test to a single directory, rather than a list of directories as supported by Ansible itself +ANSIBLE_COLLECTIONS_PATH = os.path.join(os.environ['ANSIBLE_COLLECTIONS_PATH'], 'ansible_collections') + +# set by ansible-test to the minimum python version supported on the controller +ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION = tuple(int(x) for x in os.environ['ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION'].split('.')) + + +# this monkeypatch to _pytest.pathlib.resolve_package_path fixes PEP420 resolution for collections in pytest >= 6.0.0 +# NB: this code should never run under py2 +def collection_resolve_package_path(path): + """Configure the Python package path so that pytest can find our collections.""" + for parent in path.parents: + if str(parent) == ANSIBLE_COLLECTIONS_PATH: + return parent + + raise Exception('File "%s" not found in collection path "%s".' % (path, ANSIBLE_COLLECTIONS_PATH)) + + +# this monkeypatch to py.path.local.LocalPath.pypkgpath fixes PEP420 resolution for collections in pytest < 6.0.0 +def collection_pypkgpath(self): + """Configure the Python package path so that pytest can find our collections.""" + for parent in self.parts(reverse=True): + if str(parent) == ANSIBLE_COLLECTIONS_PATH: + return parent + + raise Exception('File "%s" not found in collection path "%s".' % (self.strpath, ANSIBLE_COLLECTIONS_PATH)) + + +def pytest_configure(): + """Configure this pytest plugin.""" + try: + if pytest_configure.executed: + return + except AttributeError: + pytest_configure.executed = True + + if sys.version_info >= ANSIBLE_CONTROLLER_MIN_PYTHON_VERSION: + # noinspection PyProtectedMember + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + else: + # noinspection PyProtectedMember + from ansible_test._internal.legacy_collection_loader._collection_finder import _AnsibleCollectionFinder + + # allow unit tests to import code from collections + + # noinspection PyProtectedMember + _AnsibleCollectionFinder(paths=[os.path.dirname(ANSIBLE_COLLECTIONS_PATH)])._install() # pylint: disable=protected-access + + try: + # noinspection PyProtectedMember + from _pytest import pathlib as _pytest_pathlib + except ImportError: + _pytest_pathlib = None + + if hasattr(_pytest_pathlib, 'resolve_package_path'): + _pytest_pathlib.resolve_package_path = collection_resolve_package_path + else: + # looks like pytest <= 6.0.0, use the old hack against py.path + # noinspection PyProtectedMember + import py._path.local + + # force collections unit tests to be loaded with the ansible_collections namespace + # original idea from https://stackoverflow.com/questions/50174130/how-do-i-pytest-a-project-using-pep-420-namespace-packages/50175552#50175552 + # noinspection PyProtectedMember + py._path.local.LocalPath.pypkgpath = collection_pypkgpath # pylint: disable=protected-access + + +pytest_configure() diff --git a/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py new file mode 100644 index 0000000000..b05298ab0b --- /dev/null +++ b/test/lib/ansible_test/_util/target/pytest/plugins/ansible_pytest_coverage.py @@ -0,0 +1,68 @@ +"""Monkey patch os._exit when running under coverage so we don't lose coverage data in forks, such as with `pytest --boxed`. PYTEST_DONT_REWRITE""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def pytest_configure(): + """Configure this pytest plugin.""" + try: + if pytest_configure.executed: + return + except AttributeError: + pytest_configure.executed = True + + try: + import coverage + except ImportError: + coverage = None + + try: + coverage.Coverage + except AttributeError: + coverage = None + + if not coverage: + return + + import gc + import os + + coverage_instances = [] + + for obj in gc.get_objects(): + if isinstance(obj, coverage.Coverage): + coverage_instances.append(obj) + + if not coverage_instances: + coverage_config = os.environ.get('COVERAGE_CONF') + + if not coverage_config: + return + + coverage_output = os.environ.get('COVERAGE_FILE') + + if not coverage_output: + return + + cov = coverage.Coverage(config_file=coverage_config) + coverage_instances.append(cov) + else: + cov = None + + # noinspection PyProtectedMember + os_exit = os._exit # pylint: disable=protected-access + + def coverage_exit(*args, **kwargs): + for instance in coverage_instances: + instance.stop() + instance.save() + + os_exit(*args, **kwargs) + + os._exit = coverage_exit # pylint: disable=protected-access + + if cov: + cov.start() + + +pytest_configure() diff --git a/test/lib/ansible_test/_util/target/sanity/compile/compile.py b/test/lib/ansible_test/_util/target/sanity/compile/compile.py new file mode 100755 index 0000000000..3f6fc96260 --- /dev/null +++ b/test/lib/ansible_test/_util/target/sanity/compile/compile.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +"""Python syntax checker with lint friendly output.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import sys + +ENCODING = 'utf-8' +ERRORS = 'replace' +Text = type(u'') + + +def main(): + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + with open(path, 'rb') as source_fd: + source = source_fd.read() + + try: + compile(source, path, 'exec', dont_inherit=True) + except SyntaxError as ex: + extype, message, lineno, offset = type(ex), ex.text, ex.lineno, ex.offset + except BaseException as ex: # pylint: disable=broad-except + extype, message, lineno, offset = type(ex), str(ex), 0, 0 + else: + continue + + result = "%s:%d:%d: %s: %s" % (path, lineno, offset, extype.__name__, safe_message(message)) + + if sys.version_info <= (3,): + result = result.encode(ENCODING, ERRORS) + + print(result) + + +def safe_message(value): + """Given an input value as text or bytes, return the first non-empty line as text, ensuring it can be round-tripped as UTF-8.""" + if isinstance(value, Text): + value = value.encode(ENCODING, ERRORS) + + value = value.decode(ENCODING, ERRORS) + value = value.strip().splitlines()[0].strip() + + return value + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/target/sanity/import/importer.py b/test/lib/ansible_test/_util/target/sanity/import/importer.py new file mode 100755 index 0000000000..f0659d9b4e --- /dev/null +++ b/test/lib/ansible_test/_util/target/sanity/import/importer.py @@ -0,0 +1,541 @@ +#!/usr/bin/env python +"""Import the given python module(s) and report error(s) encountered.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + + +def main(): + """ + Main program function used to isolate globals from imported code. + Changes to globals in imported modules on Python 2.x will overwrite our own globals. + """ + import ansible + import contextlib + import datetime + import json + import os + import re + import runpy + import subprocess + import sys + import traceback + import types + import warnings + + ansible_path = os.path.dirname(os.path.dirname(ansible.__file__)) + temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep + external_python = os.environ.get('SANITY_EXTERNAL_PYTHON') or sys.executable + collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME') + collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATH') + import_type = os.environ.get('SANITY_IMPORTER_TYPE') + + try: + # noinspection PyCompatibility + from importlib import import_module + except ImportError: + def import_module(name): + __import__(name) + return sys.modules[name] + + try: + # noinspection PyCompatibility + from StringIO import StringIO + except ImportError: + from io import StringIO + + if collection_full_name: + # allow importing code from collections when testing a collection + from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native, text_type + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + from ansible.utils.collection_loader import _collection_finder + + yaml_to_json_path = os.path.join(os.path.dirname(__file__), 'yaml_to_json.py') + yaml_to_dict_cache = {} + + # unique ISO date marker matching the one present in yaml_to_json.py + iso_date_marker = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' + iso_date_re = re.compile('^%s([0-9]{4})-([0-9]{2})-([0-9]{2})$' % iso_date_marker) + + def parse_value(value): + """Custom value parser for JSON deserialization that recognizes our internal ISO date format.""" + if isinstance(value, text_type): + match = iso_date_re.search(value) + + if match: + value = datetime.date(int(match.group(1)), int(match.group(2)), int(match.group(3))) + + return value + + def object_hook(data): + """Object hook for custom ISO date deserialization from JSON.""" + return dict((key, parse_value(value)) for key, value in data.items()) + + def yaml_to_dict(yaml, content_id): + """ + Return a Python dict version of the provided YAML. + Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML. + """ + if content_id in yaml_to_dict_cache: + return yaml_to_dict_cache[content_id] + + try: + cmd = [external_python, yaml_to_json_path] + proc = subprocess.Popen([to_bytes(c) for c in cmd], # pylint: disable=consider-using-with + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml)) + + if proc.returncode != 0: + raise Exception('command %s failed with return code %d: %s' % ([to_native(c) for c in cmd], proc.returncode, to_native(stderr_bytes))) + + data = yaml_to_dict_cache[content_id] = json.loads(to_text(stdout_bytes), object_hook=object_hook) + + return data + except Exception as ex: + raise Exception('internal importer error - failed to parse yaml: %s' % to_native(ex)) + + _collection_finder._meta_yml_to_dict = yaml_to_dict # pylint: disable=protected-access + + collection_loader = _AnsibleCollectionFinder(paths=[collection_root]) + # noinspection PyProtectedMember + collection_loader._install() # pylint: disable=protected-access + else: + # do not support collection loading when not testing a collection + collection_loader = None + + # remove all modules under the ansible package + list(map(sys.modules.pop, [m for m in sys.modules if m.partition('.')[0] == ansible.__name__])) + + if import_type == 'module': + # pre-load an empty ansible package to prevent unwanted code in __init__.py from loading + # this more accurately reflects the environment that AnsiballZ runs modules under + # it also avoids issues with imports in the ansible package that are not allowed + ansible_module = types.ModuleType(ansible.__name__) + ansible_module.__file__ = ansible.__file__ + ansible_module.__path__ = ansible.__path__ + ansible_module.__package__ = ansible.__package__ + + sys.modules[ansible.__name__] = ansible_module + + class ImporterAnsibleModuleException(Exception): + """Exception thrown during initialization of ImporterAnsibleModule.""" + + class ImporterAnsibleModule: + """Replacement for AnsibleModule to support import testing.""" + def __init__(self, *args, **kwargs): + raise ImporterAnsibleModuleException() + + class RestrictedModuleLoader: + """Python module loader that restricts inappropriate imports.""" + def __init__(self, path, name, restrict_to_module_paths): + self.path = path + self.name = name + self.loaded_modules = set() + self.restrict_to_module_paths = restrict_to_module_paths + + def find_module(self, fullname, path=None): + """Return self if the given fullname is restricted, otherwise return None. + :param fullname: str + :param path: str + :return: RestrictedModuleLoader | None + """ + if fullname in self.loaded_modules: + return None # ignore modules that are already being loaded + + if is_name_in_namepace(fullname, ['ansible']): + if not self.restrict_to_module_paths: + return None # for non-modules, everything in the ansible namespace is allowed + + if fullname in ('ansible.module_utils.basic',): + return self # intercept loading so we can modify the result + + if is_name_in_namepace(fullname, ['ansible.module_utils', self.name]): + return None # module_utils and module under test are always allowed + + if any(os.path.exists(candidate_path) for candidate_path in convert_ansible_name_to_absolute_paths(fullname)): + return self # restrict access to ansible files that exist + + return None # ansible file does not exist, do not restrict access + + if is_name_in_namepace(fullname, ['ansible_collections']): + if not collection_loader: + return self # restrict access to collections when we are not testing a collection + + if not self.restrict_to_module_paths: + return None # for non-modules, everything in the ansible namespace is allowed + + if is_name_in_namepace(fullname, ['ansible_collections...plugins.module_utils', self.name]): + return None # module_utils and module under test are always allowed + + if collection_loader.find_module(fullname, path): + return self # restrict access to collection files that exist + + return None # collection file does not exist, do not restrict access + + # not a namespace we care about + return None + + def load_module(self, fullname): + """Raise an ImportError. + :type fullname: str + """ + if fullname == 'ansible.module_utils.basic': + module = self.__load_module(fullname) + + # stop Ansible module execution during AnsibleModule instantiation + module.AnsibleModule = ImporterAnsibleModule + # no-op for _load_params since it may be called before instantiating AnsibleModule + module._load_params = lambda *args, **kwargs: {} # pylint: disable=protected-access + + return module + + raise ImportError('import of "%s" is not allowed in this context' % fullname) + + def __load_module(self, fullname): + """Load the requested module while avoiding infinite recursion. + :type fullname: str + :rtype: module + """ + self.loaded_modules.add(fullname) + return import_module(fullname) + + def run(restrict_to_module_paths): + """Main program function.""" + base_dir = os.getcwd() + messages = set() + + for path in sys.argv[1:] or sys.stdin.read().splitlines(): + name = convert_relative_path_to_name(path) + test_python_module(path, name, base_dir, messages, restrict_to_module_paths) + + if messages: + sys.exit(10) + + def test_python_module(path, name, base_dir, messages, restrict_to_module_paths): + """Test the given python module by importing it. + :type path: str + :type name: str + :type base_dir: str + :type messages: set[str] + :type restrict_to_module_paths: bool + """ + if name in sys.modules: + return # cannot be tested because it has already been loaded + + is_ansible_module = (path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/')) and os.path.basename(path) != '__init__.py' + run_main = is_ansible_module + + if path == 'lib/ansible/modules/async_wrapper.py': + # async_wrapper is a non-standard Ansible module (does not use AnsibleModule) so we cannot test the main function + run_main = False + + capture_normal = Capture() + capture_main = Capture() + + run_module_ok = False + + try: + with monitor_sys_modules(path, messages): + with restrict_imports(path, name, messages, restrict_to_module_paths): + with capture_output(capture_normal): + import_module(name) + + if run_main: + run_module_ok = is_ansible_module + + with monitor_sys_modules(path, messages): + with restrict_imports(path, name, messages, restrict_to_module_paths): + with capture_output(capture_main): + runpy.run_module(name, run_name='__main__', alter_sys=True) + except ImporterAnsibleModuleException: + # module instantiated AnsibleModule without raising an exception + if not run_module_ok: + if is_ansible_module: + report_message(path, 0, 0, 'module-guard', "AnsibleModule instantiation not guarded by `if __name__ == '__main__'`", messages) + else: + report_message(path, 0, 0, 'non-module', "AnsibleModule instantiated by import of non-module", messages) + except BaseException as ex: # pylint: disable=locally-disabled, broad-except + # intentionally catch all exceptions, including calls to sys.exit + exc_type, _exc, exc_tb = sys.exc_info() + message = str(ex) + results = list(reversed(traceback.extract_tb(exc_tb))) + line = 0 + offset = 0 + full_path = os.path.join(base_dir, path) + base_path = base_dir + os.path.sep + source = None + + # avoid line wraps in messages + message = re.sub(r'\n *', ': ', message) + + for result in results: + if result[0] == full_path: + # save the line number for the file under test + line = result[1] or 0 + + if not source and result[0].startswith(base_path) and not result[0].startswith(temp_path): + # save the first path and line number in the traceback which is in our source tree + source = (os.path.relpath(result[0], base_path), result[1] or 0, 0) + + if isinstance(ex, SyntaxError): + # SyntaxError has better information than the traceback + if ex.filename == full_path: # pylint: disable=locally-disabled, no-member + # syntax error was reported in the file under test + line = ex.lineno or 0 # pylint: disable=locally-disabled, no-member + offset = ex.offset or 0 # pylint: disable=locally-disabled, no-member + elif ex.filename.startswith(base_path) and not ex.filename.startswith(temp_path): # pylint: disable=locally-disabled, no-member + # syntax error was reported in our source tree + source = (os.path.relpath(ex.filename, base_path), ex.lineno or 0, ex.offset or 0) # pylint: disable=locally-disabled, no-member + + # remove the filename and line number from the message + # either it was extracted above, or it's not really useful information + message = re.sub(r' \(.*?, line [0-9]+\)$', '', message) + + if source and source[0] != path: + message += ' (at %s:%d:%d)' % (source[0], source[1], source[2]) + + report_message(path, line, offset, 'traceback', '%s: %s' % (exc_type.__name__, message), messages) + finally: + capture_report(path, capture_normal, messages) + capture_report(path, capture_main, messages) + + def is_name_in_namepace(name, namespaces): + """Returns True if the given name is one of the given namespaces, otherwise returns False.""" + name_parts = name.split('.') + + for namespace in namespaces: + namespace_parts = namespace.split('.') + length = min(len(name_parts), len(namespace_parts)) + + truncated_name = name_parts[0:length] + truncated_namespace = namespace_parts[0:length] + + # empty parts in the namespace are treated as wildcards + # to simplify the comparison, use those empty parts to indicate the positions in the name to be empty as well + for idx, part in enumerate(truncated_namespace): + if not part: + truncated_name[idx] = part + + # example: name=ansible, allowed_name=ansible.module_utils + # example: name=ansible.module_utils.system.ping, allowed_name=ansible.module_utils + if truncated_name == truncated_namespace: + return True + + return False + + def check_sys_modules(path, before, messages): + """Check for unwanted changes to sys.modules. + :type path: str + :type before: dict[str, module] + :type messages: set[str] + """ + after = sys.modules + removed = set(before.keys()) - set(after.keys()) + changed = set(key for key, value in before.items() if key in after and value != after[key]) + + # additions are checked by our custom PEP 302 loader, so we don't need to check them again here + + for module in sorted(removed): + report_message(path, 0, 0, 'unload', 'unloading of "%s" in sys.modules is not supported' % module, messages) + + for module in sorted(changed): + report_message(path, 0, 0, 'reload', 'reloading of "%s" in sys.modules is not supported' % module, messages) + + def convert_ansible_name_to_absolute_paths(name): + """Calculate the module path from the given name. + :type name: str + :rtype: list[str] + """ + return [ + os.path.join(ansible_path, name.replace('.', os.path.sep)), + os.path.join(ansible_path, name.replace('.', os.path.sep)) + '.py', + ] + + def convert_relative_path_to_name(path): + """Calculate the module name from the given path. + :type path: str + :rtype: str + """ + if path.endswith('/__init__.py'): + clean_path = os.path.dirname(path) + else: + clean_path = path + + clean_path = os.path.splitext(clean_path)[0] + + name = clean_path.replace(os.path.sep, '.') + + if collection_loader: + # when testing collections the relative paths (and names) being tested are within the collection under test + name = 'ansible_collections.%s.%s' % (collection_full_name, name) + else: + # when testing ansible all files being imported reside under the lib directory + name = name[len('lib/'):] + + return name + + class Capture: + """Captured output and/or exception.""" + def __init__(self): + self.stdout = StringIO() + self.stderr = StringIO() + + def capture_report(path, capture, messages): + """Report on captured output. + :type path: str + :type capture: Capture + :type messages: set[str] + """ + if capture.stdout.getvalue(): + first = capture.stdout.getvalue().strip().splitlines()[0].strip() + report_message(path, 0, 0, 'stdout', first, messages) + + if capture.stderr.getvalue(): + first = capture.stderr.getvalue().strip().splitlines()[0].strip() + report_message(path, 0, 0, 'stderr', first, messages) + + def report_message(path, line, column, code, message, messages): + """Report message if not already reported. + :type path: str + :type line: int + :type column: int + :type code: str + :type message: str + :type messages: set[str] + """ + message = '%s:%d:%d: %s: %s' % (path, line, column, code, message) + + if message not in messages: + messages.add(message) + print(message) + + @contextlib.contextmanager + def restrict_imports(path, name, messages, restrict_to_module_paths): + """Restrict available imports. + :type path: str + :type name: str + :type messages: set[str] + :type restrict_to_module_paths: bool + """ + restricted_loader = RestrictedModuleLoader(path, name, restrict_to_module_paths) + + # noinspection PyTypeChecker + sys.meta_path.insert(0, restricted_loader) + sys.path_importer_cache.clear() + + try: + yield + finally: + if import_type == 'plugin': + from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder + _AnsibleCollectionFinder._remove() # pylint: disable=protected-access + + if sys.meta_path[0] != restricted_loader: + report_message(path, 0, 0, 'metapath', 'changes to sys.meta_path[0] are not permitted', messages) + + while restricted_loader in sys.meta_path: + # noinspection PyTypeChecker + sys.meta_path.remove(restricted_loader) + + sys.path_importer_cache.clear() + + @contextlib.contextmanager + def monitor_sys_modules(path, messages): + """Monitor sys.modules for unwanted changes, reverting any additions made to our own namespaces.""" + snapshot = sys.modules.copy() + + try: + yield + finally: + check_sys_modules(path, snapshot, messages) + + for key in set(sys.modules.keys()) - set(snapshot.keys()): + if is_name_in_namepace(key, ('ansible', 'ansible_collections')): + del sys.modules[key] # only unload our own code since we know it's native Python + + @contextlib.contextmanager + def capture_output(capture): + """Capture sys.stdout and sys.stderr. + :type capture: Capture + """ + old_stdout = sys.stdout + old_stderr = sys.stderr + + sys.stdout = capture.stdout + sys.stderr = capture.stderr + + # clear all warnings registries to make all warnings available + for module in sys.modules.values(): + try: + # noinspection PyUnresolvedReferences + module.__warningregistry__.clear() + except AttributeError: + pass + + with warnings.catch_warnings(): + warnings.simplefilter('error') + + if sys.version_info[0] == 2: + warnings.filterwarnings( + "ignore", + "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," + " and will be removed in a future release.") + warnings.filterwarnings( + "ignore", + "Python 2 is no longer supported by the Python core team. Support for it is now deprecated in cryptography," + " and will be removed in the next release.") + + if sys.version_info[:2] == (3, 5): + warnings.filterwarnings( + "ignore", + "Python 3.5 support will be dropped in the next release ofcryptography. Please upgrade your Python.") + warnings.filterwarnings( + "ignore", + "Python 3.5 support will be dropped in the next release of cryptography. Please upgrade your Python.") + + if sys.version_info >= (3, 10): + # Temporary solution for Python 3.10 until find_spec is implemented in RestrictedModuleLoader. + # That implementation is dependent on find_spec being added to the controller's collection loader first. + # The warning text is: main..RestrictedModuleLoader.find_spec() not found; falling back to find_module() + warnings.filterwarnings( + "ignore", + r"main\.\.RestrictedModuleLoader\.find_spec\(\) not found; falling back to find_module\(\)", + ) + # Temporary solution for Python 3.10 until exec_module is implemented in RestrictedModuleLoader. + # That implementation is dependent on exec_module being added to the controller's collection loader first. + # The warning text is: main..RestrictedModuleLoader.exec_module() not found; falling back to load_module() + warnings.filterwarnings( + "ignore", + r"main\.\.RestrictedModuleLoader\.exec_module\(\) not found; falling back to load_module\(\)", + ) + + # Temporary solution for Python 3.10 until find_spec is implemented in the controller's collection loader. + warnings.filterwarnings( + "ignore", + r"_Ansible.*Finder\.find_spec\(\) not found; falling back to find_module\(\)", + ) + # Temporary solution for Python 3.10 until exec_module is implemented in the controller's collection loader. + warnings.filterwarnings( + "ignore", + r"_Ansible.*Loader\.exec_module\(\) not found; falling back to load_module\(\)", + ) + + # Temporary solution until there is a vendored copy of distutils.version in module_utils. + # Some of our dependencies such as packaging.tags also import distutils, which we have no control over + # The warning text is: The distutils package is deprecated and slated for removal in Python 3.12. + # Use setuptools or check PEP 632 for potential alternatives + warnings.filterwarnings( + "ignore", + r"The distutils package is deprecated and slated for removal in Python 3\.12\. .*", + ) + + try: + yield + finally: + sys.stdout = old_stdout + sys.stderr = old_stderr + + run(import_type == 'module') + + +if __name__ == '__main__': + main() diff --git a/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py new file mode 100644 index 0000000000..09be9576d9 --- /dev/null +++ b/test/lib/ansible_test/_util/target/sanity/import/yaml_to_json.py @@ -0,0 +1,27 @@ +"""Read YAML from stdin and write JSON to stdout.""" +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import datetime +import json +import sys + +from yaml import load + +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader + +# unique ISO date marker matching the one present in importer.py +ISO_DATE_MARKER = 'isodate:f23983df-f3df-453c-9904-bcd08af468cc:' + + +def default(value): + if isinstance(value, datetime.date): + return '%s%s' % (ISO_DATE_MARKER, value.isoformat()) + + raise TypeError('cannot serialize type: %s' % type(value)) + + +json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout, default=default) diff --git a/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 new file mode 100644 index 0000000000..7e039bb415 --- /dev/null +++ b/test/lib/ansible_test/_util/target/setup/ConfigureRemotingForAnsible.ps1 @@ -0,0 +1,453 @@ +#Requires -Version 3.0 + +# Configure a Windows host for remote management with Ansible +# ----------------------------------------------------------- +# +# This script checks the current WinRM (PS Remoting) configuration and makes +# the necessary changes to allow Ansible to connect, authenticate and +# execute PowerShell commands. +# +# All events are logged to the Windows EventLog, useful for unattended runs. +# +# Use option -Verbose in order to see the verbose output messages. +# +# Use option -CertValidityDays to specify how long this certificate is valid +# starting from today. So you would specify -CertValidityDays 3650 to get +# a 10-year valid certificate. +# +# Use option -ForceNewSSLCert if the system has been SysPreped and a new +# SSL Certificate must be forced on the WinRM Listener when re-running this +# script. This is necessary when a new SID and CN name is created. +# +# Use option -EnableCredSSP to enable CredSSP as an authentication option. +# +# Use option -DisableBasicAuth to disable basic authentication. +# +# Use option -SkipNetworkProfileCheck to skip the network profile check. +# Without specifying this the script will only run if the device's interfaces +# are in DOMAIN or PRIVATE zones. Provide this switch if you want to enable +# WinRM on a device with an interface in PUBLIC zone. +# +# Use option -SubjectName to specify the CN name of the certificate. This +# defaults to the system's hostname and generally should not be specified. + +# Written by Trond Hindenes +# Updated by Chris Church +# Updated by Michael Crilly +# Updated by Anton Ouzounov +# Updated by Nicolas Simond +# Updated by Dag Wieërs +# Updated by Jordan Borean +# Updated by Erwan Quélin +# Updated by David Norman +# +# Version 1.0 - 2014-07-06 +# Version 1.1 - 2014-11-11 +# Version 1.2 - 2015-05-15 +# Version 1.3 - 2016-04-04 +# Version 1.4 - 2017-01-05 +# Version 1.5 - 2017-02-09 +# Version 1.6 - 2017-04-18 +# Version 1.7 - 2017-11-23 +# Version 1.8 - 2018-02-23 +# Version 1.9 - 2018-09-21 + +# Support -Verbose option +[CmdletBinding()] + +Param ( + [string]$SubjectName = $env:COMPUTERNAME, + [int]$CertValidityDays = 1095, + [switch]$SkipNetworkProfileCheck, + $CreateSelfSignedCert = $true, + [switch]$ForceNewSSLCert, + [switch]$GlobalHttpFirewallAccess, + [switch]$DisableBasicAuth = $false, + [switch]$EnableCredSSP +) + +Function Write-Log +{ + $Message = $args[0] + Write-EventLog -LogName Application -Source $EventSource -EntryType Information -EventId 1 -Message $Message +} + +Function Write-VerboseLog +{ + $Message = $args[0] + Write-Verbose $Message + Write-Log $Message +} + +Function Write-HostLog +{ + $Message = $args[0] + Write-Output $Message + Write-Log $Message +} + +Function New-LegacySelfSignedCert +{ + Param ( + [string]$SubjectName, + [int]$ValidDays = 1095 + ) + + $hostnonFQDN = $env:computerName + $hostFQDN = [System.Net.Dns]::GetHostByName(($env:computerName)).Hostname + $SignatureAlgorithm = "SHA256" + + $name = New-Object -COM "X509Enrollment.CX500DistinguishedName.1" + $name.Encode("CN=$SubjectName", 0) + + $key = New-Object -COM "X509Enrollment.CX509PrivateKey.1" + $key.ProviderName = "Microsoft Enhanced RSA and AES Cryptographic Provider" + $key.KeySpec = 1 + $key.Length = 4096 + $key.SecurityDescriptor = "D:PAI(A;;0xd01f01ff;;;SY)(A;;0xd01f01ff;;;BA)(A;;0x80120089;;;NS)" + $key.MachineContext = 1 + $key.Create() + + $serverauthoid = New-Object -COM "X509Enrollment.CObjectId.1" + $serverauthoid.InitializeFromValue("1.3.6.1.5.5.7.3.1") + $ekuoids = New-Object -COM "X509Enrollment.CObjectIds.1" + $ekuoids.Add($serverauthoid) + $ekuext = New-Object -COM "X509Enrollment.CX509ExtensionEnhancedKeyUsage.1" + $ekuext.InitializeEncode($ekuoids) + + $cert = New-Object -COM "X509Enrollment.CX509CertificateRequestCertificate.1" + $cert.InitializeFromPrivateKey(2, $key, "") + $cert.Subject = $name + $cert.Issuer = $cert.Subject + $cert.NotBefore = (Get-Date).AddDays(-1) + $cert.NotAfter = $cert.NotBefore.AddDays($ValidDays) + + $SigOID = New-Object -ComObject X509Enrollment.CObjectId + $SigOID.InitializeFromValue(([Security.Cryptography.Oid]$SignatureAlgorithm).Value) + + [string[]] $AlternativeName += $hostnonFQDN + $AlternativeName += $hostFQDN + $IAlternativeNames = New-Object -ComObject X509Enrollment.CAlternativeNames + + foreach ($AN in $AlternativeName) + { + $AltName = New-Object -ComObject X509Enrollment.CAlternativeName + $AltName.InitializeFromString(0x3,$AN) + $IAlternativeNames.Add($AltName) + } + + $SubjectAlternativeName = New-Object -ComObject X509Enrollment.CX509ExtensionAlternativeNames + $SubjectAlternativeName.InitializeEncode($IAlternativeNames) + + [String[]]$KeyUsage = ("DigitalSignature", "KeyEncipherment") + $KeyUsageObj = New-Object -ComObject X509Enrollment.CX509ExtensionKeyUsage + $KeyUsageObj.InitializeEncode([int][Security.Cryptography.X509Certificates.X509KeyUsageFlags]($KeyUsage)) + $KeyUsageObj.Critical = $true + + $cert.X509Extensions.Add($KeyUsageObj) + $cert.X509Extensions.Add($ekuext) + $cert.SignatureInformation.HashAlgorithm = $SigOID + $CERT.X509Extensions.Add($SubjectAlternativeName) + $cert.Encode() + + $enrollment = New-Object -COM "X509Enrollment.CX509Enrollment.1" + $enrollment.InitializeFromRequest($cert) + $certdata = $enrollment.CreateRequest(0) + $enrollment.InstallResponse(2, $certdata, 0, "") + + # extract/return the thumbprint from the generated cert + $parsed_cert = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2 + $parsed_cert.Import([System.Text.Encoding]::UTF8.GetBytes($certdata)) + + return $parsed_cert.Thumbprint +} + +Function Enable-GlobalHttpFirewallAccess +{ + Write-Verbose "Forcing global HTTP firewall access" + # this is a fairly naive implementation; could be more sophisticated about rule matching/collapsing + $fw = New-Object -ComObject HNetCfg.FWPolicy2 + + # try to find/enable the default rule first + $add_rule = $false + $matching_rules = $fw.Rules | Where-Object { $_.Name -eq "Windows Remote Management (HTTP-In)" } + $rule = $null + If ($matching_rules) { + If ($matching_rules -isnot [Array]) { + Write-Verbose "Editing existing single HTTP firewall rule" + $rule = $matching_rules + } + Else { + # try to find one with the All or Public profile first + Write-Verbose "Found multiple existing HTTP firewall rules..." + $rule = $matching_rules | ForEach-Object { $_.Profiles -band 4 }[0] + + If (-not $rule -or $rule -is [Array]) { + Write-Verbose "Editing an arbitrary single HTTP firewall rule (multiple existed)" + # oh well, just pick the first one + $rule = $matching_rules[0] + } + } + } + + If (-not $rule) { + Write-Verbose "Creating a new HTTP firewall rule" + $rule = New-Object -ComObject HNetCfg.FWRule + $rule.Name = "Windows Remote Management (HTTP-In)" + $rule.Description = "Inbound rule for Windows Remote Management via WS-Management. [TCP 5985]" + $add_rule = $true + } + + $rule.Profiles = 0x7FFFFFFF + $rule.Protocol = 6 + $rule.LocalPorts = 5985 + $rule.RemotePorts = "*" + $rule.LocalAddresses = "*" + $rule.RemoteAddresses = "*" + $rule.Enabled = $true + $rule.Direction = 1 + $rule.Action = 1 + $rule.Grouping = "Windows Remote Management" + + If ($add_rule) { + $fw.Rules.Add($rule) + } + + Write-Verbose "HTTP firewall rule $($rule.Name) updated" +} + +# Setup error handling. +Trap +{ + $_ + Exit 1 +} +$ErrorActionPreference = "Stop" + +# Get the ID and security principal of the current user account +$myWindowsID=[System.Security.Principal.WindowsIdentity]::GetCurrent() +$myWindowsPrincipal=new-object System.Security.Principal.WindowsPrincipal($myWindowsID) + +# Get the security principal for the Administrator role +$adminRole=[System.Security.Principal.WindowsBuiltInRole]::Administrator + +# Check to see if we are currently running "as Administrator" +if (-Not $myWindowsPrincipal.IsInRole($adminRole)) +{ + Write-Output "ERROR: You need elevated Administrator privileges in order to run this script." + Write-Output " Start Windows PowerShell by using the Run as Administrator option." + Exit 2 +} + +$EventSource = $MyInvocation.MyCommand.Name +If (-Not $EventSource) +{ + $EventSource = "Powershell CLI" +} + +If ([System.Diagnostics.EventLog]::Exists('Application') -eq $False -or [System.Diagnostics.EventLog]::SourceExists($EventSource) -eq $False) +{ + New-EventLog -LogName Application -Source $EventSource +} + +# Detect PowerShell version. +If ($PSVersionTable.PSVersion.Major -lt 3) +{ + Write-Log "PowerShell version 3 or higher is required." + Throw "PowerShell version 3 or higher is required." +} + +# Find and start the WinRM service. +Write-Verbose "Verifying WinRM service." +If (!(Get-Service "WinRM")) +{ + Write-Log "Unable to find the WinRM service." + Throw "Unable to find the WinRM service." +} +ElseIf ((Get-Service "WinRM").Status -ne "Running") +{ + Write-Verbose "Setting WinRM service to start automatically on boot." + Set-Service -Name "WinRM" -StartupType Automatic + Write-Log "Set WinRM service to start automatically on boot." + Write-Verbose "Starting WinRM service." + Start-Service -Name "WinRM" -ErrorAction Stop + Write-Log "Started WinRM service." + +} + +# WinRM should be running; check that we have a PS session config. +If (!(Get-PSSessionConfiguration -Verbose:$false) -or (!(Get-ChildItem WSMan:\localhost\Listener))) +{ + If ($SkipNetworkProfileCheck) { + Write-Verbose "Enabling PS Remoting without checking Network profile." + Enable-PSRemoting -SkipNetworkProfileCheck -Force -ErrorAction Stop + Write-Log "Enabled PS Remoting without checking Network profile." + } + Else { + Write-Verbose "Enabling PS Remoting." + Enable-PSRemoting -Force -ErrorAction Stop + Write-Log "Enabled PS Remoting." + } +} +Else +{ + Write-Verbose "PS Remoting is already enabled." +} + +# Ensure LocalAccountTokenFilterPolicy is set to 1 +# https://github.com/ansible/ansible/issues/42978 +$token_path = "HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System" +$token_prop_name = "LocalAccountTokenFilterPolicy" +$token_key = Get-Item -Path $token_path +$token_value = $token_key.GetValue($token_prop_name, $null) +if ($token_value -ne 1) { + Write-Verbose "Setting LocalAccountTOkenFilterPolicy to 1" + if ($null -ne $token_value) { + Remove-ItemProperty -Path $token_path -Name $token_prop_name + } + New-ItemProperty -Path $token_path -Name $token_prop_name -Value 1 -PropertyType DWORD > $null +} + +# Make sure there is a SSL listener. +$listeners = Get-ChildItem WSMan:\localhost\Listener +If (!($listeners | Where-Object {$_.Keys -like "TRANSPORT=HTTPS"})) +{ + # We cannot use New-SelfSignedCertificate on 2012R2 and earlier + $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays + Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint" + + # Create the hashtables of settings to be used. + $valueset = @{ + Hostname = $SubjectName + CertificateThumbprint = $thumbprint + } + + $selectorset = @{ + Transport = "HTTPS" + Address = "*" + } + + Write-Verbose "Enabling SSL listener." + New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset + Write-Log "Enabled SSL listener." +} +Else +{ + Write-Verbose "SSL listener is already active." + + # Force a new SSL cert on Listener if the $ForceNewSSLCert + If ($ForceNewSSLCert) + { + + # We cannot use New-SelfSignedCertificate on 2012R2 and earlier + $thumbprint = New-LegacySelfSignedCert -SubjectName $SubjectName -ValidDays $CertValidityDays + Write-HostLog "Self-signed SSL certificate generated; thumbprint: $thumbprint" + + $valueset = @{ + CertificateThumbprint = $thumbprint + Hostname = $SubjectName + } + + # Delete the listener for SSL + $selectorset = @{ + Address = "*" + Transport = "HTTPS" + } + Remove-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset + + # Add new Listener with new SSL cert + New-WSManInstance -ResourceURI 'winrm/config/Listener' -SelectorSet $selectorset -ValueSet $valueset + } +} + +# Check for basic authentication. +$basicAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "Basic"} + +If ($DisableBasicAuth) +{ + If (($basicAuthSetting.Value) -eq $true) + { + Write-Verbose "Disabling basic auth support." + Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $false + Write-Log "Disabled basic auth support." + } + Else + { + Write-Verbose "Basic auth is already disabled." + } +} +Else +{ + If (($basicAuthSetting.Value) -eq $false) + { + Write-Verbose "Enabling basic auth support." + Set-Item -Path "WSMan:\localhost\Service\Auth\Basic" -Value $true + Write-Log "Enabled basic auth support." + } + Else + { + Write-Verbose "Basic auth is already enabled." + } +} + +# If EnableCredSSP if set to true +If ($EnableCredSSP) +{ + # Check for CredSSP authentication + $credsspAuthSetting = Get-ChildItem WSMan:\localhost\Service\Auth | Where-Object {$_.Name -eq "CredSSP"} + If (($credsspAuthSetting.Value) -eq $false) + { + Write-Verbose "Enabling CredSSP auth support." + Enable-WSManCredSSP -role server -Force + Write-Log "Enabled CredSSP auth support." + } +} + +If ($GlobalHttpFirewallAccess) { + Enable-GlobalHttpFirewallAccess +} + +# Configure firewall to allow WinRM HTTPS connections. +$fwtest1 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" +$fwtest2 = netsh advfirewall firewall show rule name="Allow WinRM HTTPS" profile=any +If ($fwtest1.count -lt 5) +{ + Write-Verbose "Adding firewall rule to allow WinRM HTTPS." + netsh advfirewall firewall add rule profile=any name="Allow WinRM HTTPS" dir=in localport=5986 protocol=TCP action=allow + Write-Log "Added firewall rule to allow WinRM HTTPS." +} +ElseIf (($fwtest1.count -ge 5) -and ($fwtest2.count -lt 5)) +{ + Write-Verbose "Updating firewall rule to allow WinRM HTTPS for any profile." + netsh advfirewall firewall set rule name="Allow WinRM HTTPS" new profile=any + Write-Log "Updated firewall rule to allow WinRM HTTPS for any profile." +} +Else +{ + Write-Verbose "Firewall rule already exists to allow WinRM HTTPS." +} + +# Test a remoting connection to localhost, which should work. +$httpResult = Invoke-Command -ComputerName "localhost" -ScriptBlock {$env:COMPUTERNAME} -ErrorVariable httpError -ErrorAction SilentlyContinue +$httpsOptions = New-PSSessionOption -SkipCACheck -SkipCNCheck -SkipRevocationCheck + +$httpsResult = New-PSSession -UseSSL -ComputerName "localhost" -SessionOption $httpsOptions -ErrorVariable httpsError -ErrorAction SilentlyContinue + +If ($httpResult -and $httpsResult) +{ + Write-Verbose "HTTP: Enabled | HTTPS: Enabled" +} +ElseIf ($httpsResult -and !$httpResult) +{ + Write-Verbose "HTTP: Disabled | HTTPS: Enabled" +} +ElseIf ($httpResult -and !$httpsResult) +{ + Write-Verbose "HTTP: Enabled | HTTPS: Disabled" +} +Else +{ + Write-Log "Unable to establish an HTTP or HTTPS remoting session." + Throw "Unable to establish an HTTP or HTTPS remoting session." +} +Write-VerboseLog "PS Remoting has been successfully configured for Ansible." diff --git a/test/lib/ansible_test/_util/target/setup/docker.sh b/test/lib/ansible_test/_util/target/setup/docker.sh new file mode 100644 index 0000000000..ea60e1a6f3 --- /dev/null +++ b/test/lib/ansible_test/_util/target/setup/docker.sh @@ -0,0 +1,13 @@ +#!/bin/sh + +set -eu + +# Required for newer mysql-server packages to install/upgrade on Ubuntu 16.04. +rm -f /usr/sbin/policy-rc.d + +# Improve prompts on remote host for interactive use. +# `cat << EOF > ~/.bashrc` flakes sometimes since /tmp may not be ready yet in +# the container. So don't do that +echo "alias ls='ls --color=auto'" > ~/.bashrc +echo "export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '" >> ~/.bashrc +echo "cd ~/ansible/" >> ~/.bashrc diff --git a/test/lib/ansible_test/_util/target/setup/remote.sh b/test/lib/ansible_test/_util/target/setup/remote.sh new file mode 100644 index 0000000000..9348ac6f9f --- /dev/null +++ b/test/lib/ansible_test/_util/target/setup/remote.sh @@ -0,0 +1,185 @@ +#!/bin/sh + +set -eu + +platform=#{platform} +platform_version=#{platform_version} +python_version=#{python_version} + +python_interpreter="python${python_version}" + +cd ~/ + +install_pip () { + if ! "${python_interpreter}" -m pip.__main__ --version --disable-pip-version-check 2>/dev/null; then + case "${python_version}" in + *) + pip_bootstrap_url="https://ansible-ci-files.s3.amazonaws.com/ansible-test/get-pip-20.3.4.py" + ;; + esac + curl --silent --show-error "${pip_bootstrap_url}" -o /tmp/get-pip.py + "${python_interpreter}" /tmp/get-pip.py --disable-pip-version-check --quiet + rm /tmp/get-pip.py + fi +} + +if [ "${platform}" = "freebsd" ]; then + py_version="$(echo "${python_version}" | tr -d '.')" + + if [ "${py_version}" = "27" ]; then + # on Python 2.7 our only option is to use virtualenv + virtualenv_pkg="py27-virtualenv" + else + # on Python 3.x we'll use the built-in venv instead + virtualenv_pkg="" + fi + + # Declare platform/python version combinations which do not have supporting OS packages available. + # For these combinations ansible-test will use pip to install the requirements instead. + case "${platform_version}/${python_version}" in + "11.4/3.8") + have_os_packages="" + ;; + "12.2/3.8") + have_os_packages="" + ;; + *) + have_os_packages="yes" + ;; + esac + + # PyYAML is never installed with an OS package since it does not include libyaml support. + # Instead, ansible-test will always install it using pip. + if [ "${have_os_packages}" ]; then + jinja2_pkg="py${py_version}-Jinja2" + cryptography_pkg="py${py_version}-cryptography" + else + jinja2_pkg="" + cryptography_pkg="" + fi + + while true; do + # shellcheck disable=SC2086 + env ASSUME_ALWAYS_YES=YES pkg bootstrap && \ + pkg install -q -y \ + bash \ + curl \ + gtar \ + libyaml \ + "python${py_version}" \ + ${jinja2_pkg} \ + ${cryptography_pkg} \ + ${virtualenv_pkg} \ + sudo \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done + + install_pip + + if ! grep '^PermitRootLogin yes$' /etc/ssh/sshd_config > /dev/null; then + sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config + service sshd restart + fi +elif [ "${platform}" = "rhel" ]; then + if grep '8\.' /etc/redhat-release; then + py_version="$(echo "${python_version}" | tr -d '.')" + + if [ "${py_version}" = "36" ]; then + py_pkg_prefix="python3" + else + py_pkg_prefix="python${py_version}" + fi + + while true; do + yum module install -q -y "python${py_version}" && \ + yum install -q -y \ + gcc \ + "${py_pkg_prefix}-devel" \ + "${py_pkg_prefix}-jinja2" \ + "${py_pkg_prefix}-cryptography" \ + iptables \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done + else + while true; do + yum install -q -y \ + gcc \ + python-devel \ + python-virtualenv \ + python2-cryptography \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done + + install_pip + fi + + # pin packaging and pyparsing to match the downstream vendored versions + "${python_interpreter}" -m pip install packaging==20.4 pyparsing==2.4.7 --disable-pip-version-check +elif [ "${platform}" = "centos" ]; then + while true; do + yum install -q -y \ + gcc \ + python-devel \ + python-virtualenv \ + python2-cryptography \ + libffi-devel \ + openssl-devel \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done + + install_pip +elif [ "${platform}" = "osx" ]; then + while true; do + pip install --disable-pip-version-check --quiet \ + 'virtualenv==16.7.10' \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done +elif [ "${platform}" = "aix" ]; then + chfs -a size=1G / + chfs -a size=4G /usr + chfs -a size=1G /var + chfs -a size=1G /tmp + chfs -a size=2G /opt + while true; do + yum install -q -y \ + gcc \ + libffi-devel \ + python-jinja2 \ + python-cryptography \ + python-pip && \ + pip install --disable-pip-version-check --quiet \ + 'virtualenv==16.7.10' \ + && break + echo "Failed to install packages. Sleeping before trying again..." + sleep 10 + done +fi + +# Improve prompts on remote host for interactive use. +# shellcheck disable=SC1117 +cat << EOF > ~/.bashrc +if ls --color > /dev/null 2>&1; then + alias ls='ls --color' +elif ls -G > /dev/null 2>&1; then + alias ls='ls -G' +fi +export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ ' +EOF + +# Make sure ~/ansible/ is the starting directory for interactive shells. +if [ "${platform}" = "osx" ]; then + echo "cd ~/ansible/" >> ~/.bashrc +elif [ "${platform}" = "macos" ] ; then + echo "export BASH_SILENCE_DEPRECATION_WARNING=1" >> ~/.bashrc + echo "cd ~/ansible/" >> ~/.bashrc +fi diff --git a/test/lib/ansible_test/_util/target/setup/ssh-keys.sh b/test/lib/ansible_test/_util/target/setup/ssh-keys.sh new file mode 100644 index 0000000000..7846f3fef0 --- /dev/null +++ b/test/lib/ansible_test/_util/target/setup/ssh-keys.sh @@ -0,0 +1,35 @@ +#!/bin/sh +# Configure SSH keys. + +ssh_public_key=#{ssh_public_key} +ssh_private_key=#{ssh_private_key} +ssh_key_type=#{ssh_key_type} + +ssh_path="${HOME}/.ssh" +private_key_path="${ssh_path}/id_${ssh_key_type}" + +if [ ! -f "${private_key_path}" ]; then + # write public/private ssh key pair + public_key_path="${private_key_path}.pub" + + # shellcheck disable=SC2174 + mkdir -m 0700 -p "${ssh_path}" + touch "${public_key_path}" "${private_key_path}" + chmod 0600 "${public_key_path}" "${private_key_path}" + echo "${ssh_public_key}" > "${public_key_path}" + echo "${ssh_private_key}" > "${private_key_path}" + + # add public key to authorized_keys + authoried_keys_path="${HOME}/.ssh/authorized_keys" + + # the existing file is overwritten to avoid conflicts (ex: RHEL on EC2 blocks root login) + cat "${public_key_path}" > "${authoried_keys_path}" + chmod 0600 "${authoried_keys_path}" + + # add localhost's server keys to known_hosts + known_hosts_path="${HOME}/.ssh/known_hosts" + + for key in /etc/ssh/ssh_host_*_key.pub; do + echo "localhost $(cat "${key}")" >> "${known_hosts_path}" + done +fi -- cgit v1.2.1