summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrian Coca <bcoca@users.noreply.github.com>2017-08-15 16:38:59 -0400
committerGitHub <noreply@github.com>2017-08-15 16:38:59 -0400
commitf921369445900dcc756821e40c9257d5359087af (patch)
tree173a768f8dddfce15c4471479d3677fbe1b51cff
parent8b617aaef564052d05c41e033f7d166570d660b1 (diff)
downloadansible-f921369445900dcc756821e40c9257d5359087af.tar.gz
Ansible Config part2 (#27448)
* Ansible Config part2 - made dump_me nicer, added note this is not prod - moved internal key removal function to vars - carry tracebacks in errors we can now show tracebacks for plugins on vvv - show inventory plugin tracebacks on vvv - minor fixes to cg groups plugin - draft config from plugin docs - made search path warning 'saner' (top level dirs only) - correctly display config entries and others - removed unneeded code - commented out some conn plugin specific from base.yml - also deprecated sudo/su - updated ssh conn docs - shared get option method for connection plugins - note about needing eval for defaults - tailored yaml ext - updated strategy entry - for connection pliugins, options load on plugin load - allow for long types in definitions - better display in ansible-doc - cleaned up/updated source docs and base.yml - added many descriptions - deprecated include toggles as include is - draft backwards compat get_config - fixes to ansible-config, added --only-changed - some code reoorg - small license headers - show default in doc type - pushed module utils details to 5vs - work w/o config file - PEPE ATE! - moved loader to it's own file - fixed rhn_register test - fixed boto requirement in make tests - I ate Pepe - fixed dynamic eval of defaults - better doc code skip ipaddr filter tests when missing netaddr removed devnull string from config better becoem resolution * killed extra space with extreeme prejudice cause its an affront against all that is holy that 2 spaces touch each other! shippable timing out on some images, but merging as it passes most
-rwxr-xr-xbin/ansible-connection2
-rw-r--r--lib/ansible/cli/adhoc.py12
-rw-r--r--lib/ansible/cli/config.py13
-rw-r--r--lib/ansible/cli/console.py2
-rw-r--r--lib/ansible/cli/doc.py148
-rw-r--r--lib/ansible/cli/pull.py2
-rw-r--r--lib/ansible/config/base.yml (renamed from lib/ansible/config/data/config.yml)873
-rw-r--r--lib/ansible/config/data.py22
-rw-r--r--lib/ansible/config/manager.py430
-rw-r--r--lib/ansible/constants.py81
-rw-r--r--lib/ansible/errors/__init__.py4
-rw-r--r--lib/ansible/executor/module_common.py4
-rw-r--r--lib/ansible/executor/task_executor.py1
-rw-r--r--lib/ansible/executor/task_queue_manager.py2
-rw-r--r--lib/ansible/inventory/manager.py7
-rw-r--r--lib/ansible/parsing/mod_args.py2
-rw-r--r--lib/ansible/parsing/plugin_docs.py83
-rw-r--r--lib/ansible/playbook/__init__.py2
-rw-r--r--lib/ansible/playbook/base.py5
-rw-r--r--lib/ansible/playbook/play_context.py146
-rw-r--r--lib/ansible/playbook/role/__init__.py2
-rw-r--r--lib/ansible/playbook/task.py2
-rw-r--r--lib/ansible/plugins/__init__.py548
-rw-r--r--lib/ansible/plugins/action/__init__.py16
-rw-r--r--lib/ansible/plugins/action/junos.py2
-rw-r--r--lib/ansible/plugins/action/synchronize.py2
-rw-r--r--lib/ansible/plugins/cache/__init__.py2
-rw-r--r--lib/ansible/plugins/callback/json.py11
-rw-r--r--lib/ansible/plugins/connection/__init__.py9
-rw-r--r--lib/ansible/plugins/connection/buildah.py20
-rw-r--r--lib/ansible/plugins/connection/netconf.py2
-rw-r--r--lib/ansible/plugins/connection/network_cli.py3
-rw-r--r--lib/ansible/plugins/connection/ssh.py143
-rw-r--r--lib/ansible/plugins/inventory/constructed_groups.py37
-rw-r--r--lib/ansible/plugins/inventory/yaml.py10
-rw-r--r--lib/ansible/plugins/loader.py588
-rw-r--r--lib/ansible/plugins/lookup/etcd.py19
-rw-r--r--lib/ansible/plugins/strategy/__init__.py3
-rw-r--r--lib/ansible/plugins/strategy/free.py2
-rw-r--r--lib/ansible/plugins/strategy/linear.py2
-rw-r--r--lib/ansible/template/__init__.py2
-rw-r--r--lib/ansible/template/safe_eval.py2
-rw-r--r--lib/ansible/utils/plugin_docs.py100
-rw-r--r--lib/ansible/vars/manager.py17
-rw-r--r--setup.py3
-rw-r--r--test/runner/lib/ansible_util.py9
-rw-r--r--test/sanity/pep8/legacy-files.txt1
-rw-r--r--test/units/executor/test_task_executor.py2
-rw-r--r--test/units/modules/cloud/amazon/test_data_pipeline.py8
-rw-r--r--test/units/modules/packaging/os/test_rhn_register.py3
-rw-r--r--test/units/plugins/filter/test_ipaddr.py5
-rw-r--r--test/units/plugins/lookup/test_password.py2
-rw-r--r--test/units/plugins/test_plugins.py2
53 files changed, 1855 insertions, 1565 deletions
diff --git a/bin/ansible-connection b/bin/ansible-connection
index a3e3c7cf41..e29c681815 100755
--- a/bin/ansible-connection
+++ b/bin/ansible-connection
@@ -45,7 +45,7 @@ from ansible.module_utils.six import PY3
from ansible.module_utils.six.moves import cPickle
from ansible.module_utils.connection import send_data, recv_data
from ansible.playbook.play_context import PlayContext
-from ansible.plugins import connection_loader
+from ansible.plugins.loader import connection_loader
from ansible.utils.path import unfrackpath, makedirs_safe
from ansible.errors import AnsibleConnectionFailure
from ansible.utils.display import Display
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index 4891323946..8f1d706e96 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -29,7 +29,7 @@ from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
-from ansible.plugins import get_all_plugin_loaders
+from ansible.plugins.loader import get_all_plugin_loaders
try:
from __main__ import display
@@ -105,6 +105,9 @@ class AdHocCLI(CLI):
(sshpass, becomepass) = self.ask_passwords()
passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
+ # dynamically load any plugins
+ get_all_plugin_loaders()
+
loader, inventory, variable_manager = self._play_prereqs(self.options)
no_hosts = False
@@ -138,13 +141,6 @@ class AdHocCLI(CLI):
if self.options.module_name in ('include', 'include_role'):
raise AnsibleOptionsError("'%s' is not a valid action for ad-hoc commands" % self.options.module_name)
- # dynamically load any plugins from the playbook directory
- for name, obj in get_all_plugin_loaders():
- if obj.subdir:
- plugin_path = os.path.join('.', obj.subdir)
- if os.path.isdir(plugin_path):
- obj.add_directory(plugin_path)
-
play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
diff --git a/lib/ansible/cli/config.py b/lib/ansible/cli/config.py
index bf36550735..89913f16ee 100644
--- a/lib/ansible/cli/config.py
+++ b/lib/ansible/cli/config.py
@@ -26,8 +26,7 @@ import sys
import yaml
from ansible.cli import CLI
-from ansible.config.data import Setting
-from ansible.config.manager import ConfigManager
+from ansible.config.manager import ConfigManager, Setting
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils._text import to_native, to_text
from ansible.parsing.yaml.dumper import AnsibleDumper
@@ -68,6 +67,8 @@ class ConfigCLI(CLI):
if self.action == "list":
self.parser.set_usage("usage: %prog list [options] ")
if self.action == "dump":
+ self.parser.add_option('--only-changed', dest='only_changed', action='store_true',
+ help="Only show configurations that have changed from the default")
self.parser.set_usage("usage: %prog dump [options] [-c ansible.cfg]")
elif self.action == "view":
self.parser.set_usage("usage: %prog view [options] [-c ansible.cfg] ")
@@ -154,14 +155,15 @@ class ConfigCLI(CLI):
'''
list all current configs reading lib/constants.py and shows env and config file setting names
'''
- self.pager(to_text(yaml.dump(self.config.initial_defs, Dumper=AnsibleDumper), errors='surrogate_or_strict'))
+ self.pager(to_text(yaml.dump(self.config.get_configuration_definitions(), Dumper=AnsibleDumper), errors='surrogate_or_strict'))
def execute_dump(self):
'''
Shows the current settings, merges ansible.cfg if specified
'''
+ # FIXME: deal with plugins, not just base config
text = []
- defaults = self.config.initial_defs.copy()
+ defaults = self.config.get_configuration_definitions().copy()
for setting in self.config.data.get_settings():
if setting.name in defaults:
defaults[setting.name] = setting
@@ -176,6 +178,7 @@ class ConfigCLI(CLI):
else:
color = 'green'
msg = "%s(%s) = %s" % (setting, 'default', defaults[setting].get('default'))
- text.append(stringc(msg, color))
+ if not self.options.only_changed or color == 'yellow':
+ text.append(stringc(msg, color))
self.pager(to_text('\n'.join(text), errors='surrogate_or_strict'))
diff --git a/lib/ansible/cli/console.py b/lib/ansible/cli/console.py
index 94d8de583e..9e9d93ee53 100644
--- a/lib/ansible/cli/console.py
+++ b/lib/ansible/cli/console.py
@@ -44,7 +44,7 @@ from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
-from ansible.plugins import module_loader
+from ansible.plugins.loader import module_loader
from ansible.utils import plugin_docs
from ansible.utils.color import stringc
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index dab8677d14..664d2b7285 100644
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -30,7 +30,7 @@ from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils.six import string_types
from ansible.parsing.yaml.dumper import AnsibleDumper
-from ansible.plugins import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, connection_loader, strategy_loader, PluginLoader
+from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, connection_loader, strategy_loader, PluginLoader
from ansible.utils import plugin_docs
try:
from __main__ import display
@@ -66,7 +66,8 @@ class DocCLI(CLI):
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_plugins',
help='Show documentation for all plugins')
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
- help='Choose which plugin type', choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
+ help='Choose which plugin type (defaults to "module")',
+ choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
super(DocCLI, self).parse()
@@ -99,6 +100,10 @@ class DocCLI(CLI):
for i in self.options.module_path.split(os.pathsep):
loader.add_directory(i)
+ # save only top level paths for errors
+ search_paths = DocCLI.print_paths(loader)
+ loader._paths = None # reset so we can use subdirs below
+
# list plugins for type
if self.options.list_dir:
paths = loader._get_paths()
@@ -125,7 +130,7 @@ class DocCLI(CLI):
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True)
if filename is None:
- display.warning("%s %s not found in %s\n" % (plugin_type, plugin, DocCLI.print_paths(loader)))
+ display.warning("%s %s not found in:\n%s\n" % (plugin_type, plugin, search_paths))
continue
if any(filename.endswith(x) for x in C.BLACKLIST_EXTS):
@@ -255,7 +260,7 @@ class DocCLI(CLI):
# Uses a list to get the order right
ret = []
- for i in finder._get_paths():
+ for i in finder._get_paths(subdirs=False):
if i not in ret:
ret.append(i)
return os.pathsep.join(ret)
@@ -288,6 +293,9 @@ class DocCLI(CLI):
return "\n".join(text)
+ def _dump_yaml(self, struct, indent):
+ return CLI.tty_ify('\n'.join([indent + line for line in yaml.dump(struct, default_flow_style=False, Dumper=AnsibleDumper).split('\n')]))
+
def add_fields(self, text, fields, limit, opt_indent):
for o in sorted(fields):
@@ -322,123 +330,109 @@ class DocCLI(CLI):
del opt['choices']
default = ''
if 'default' in opt or not required:
- default = "[Default: " + str(opt.pop('default', '(null)')) + "]"
+ default = "[Default: %s" % str(opt.pop('default', '(null)')) + "]"
+
text.append(textwrap.fill(CLI.tty_ify(aliases + choices + default), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'options' in opt:
- text.append(opt_indent + "options:\n")
- self.add_fields(text, opt['options'], limit, opt_indent + opt_indent)
- text.append('')
- del opt['options']
+ text.append("%soptions:\n" % opt_indent)
+ self.add_fields(text, opt.pop('options'), limit, opt_indent + opt_indent)
if 'spec' in opt:
- text.append(opt_indent + "spec:\n")
- self.add_fields(text, opt['spec'], limit, opt_indent + opt_indent)
- text.append('')
- del opt['spec']
-
- for conf in ('config', 'env_vars', 'host_vars'):
- if conf in opt:
- text.append(textwrap.fill(CLI.tty_ify("%s: " % conf), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
- for entry in opt[conf]:
- if isinstance(entry, dict):
- pre = " -"
- for key in entry:
- text.append(textwrap.fill(CLI.tty_ify("%s %s: %s" % (pre, key, entry[key])),
- limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
- pre = " "
- else:
- text.append(textwrap.fill(CLI.tty_ify(" - %s" % entry), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
- del opt[conf]
+ text.append("%sspec:\n" % opt_indent)
+ self.add_fields(text, opt.pop('spec'), limit, opt_indent + opt_indent)
+
+ conf = {}
+ for config in ('env', 'ini', 'yaml', 'vars'):
+ if config in opt and opt[config]:
+ conf[config] = opt.pop(config)
- # unspecified keys
- for k in opt:
+ if conf:
+ text.append(self._dump_yaml({'set_via': conf}, opt_indent))
+
+ for k in sorted(opt):
if k.startswith('_'):
continue
if isinstance(opt[k], string_types):
- text.append(textwrap.fill(CLI.tty_ify("%s: %s" % (k, opt[k])), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
- elif isinstance(opt[k], (list, dict)):
- text.append(textwrap.fill(CLI.tty_ify("%s: %s" % (k, yaml.dump(opt[k], Dumper=AnsibleDumper, default_flow_style=False))),
- limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
+ text.append('%s%s: %s' % (opt_indent, k, textwrap.fill(CLI.tty_ify(opt[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
+ elif isinstance(opt[k], (list, tuple)):
+ text.append(CLI.tty_ify('%s%s: %s' % (opt_indent, k, ', '.join(opt[k]))))
else:
- display.vv("Skipping %s key cuase we don't know how to handle eet" % k)
+ text.append(self._dump_yaml({k: opt[k]}, opt_indent))
+ text.append('')
def get_man_text(self, doc):
+
+ IGNORE = frozenset(['module', 'docuri', 'version_added', 'short_description', 'now_date'])
opt_indent = " "
text = []
- text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc['filename']))
+
+ text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc.pop('filename')))
pad = display.columns * 0.20
limit = max(display.columns - int(pad), 70)
if isinstance(doc['description'], list):
- desc = " ".join(doc['description'])
+ desc = " ".join(doc.pop('description'))
else:
- desc = doc['description']
+ desc = doc.pop('description')
- text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=" ", subsequent_indent=" "))
+ text.append("%s\n" % textwrap.fill(CLI.tty_ify(desc), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'deprecated' in doc and doc['deprecated'] is not None and len(doc['deprecated']) > 0:
- text.append("DEPRECATED: \n%s\n" % doc['deprecated'])
+ text.append("DEPRECATED: \n%s\n" % doc.pop('deprecated'))
- if 'action' in doc and doc['action']:
+ if doc.pop('action', False):
text.append(" * note: %s\n" % "This module has a corresponding action plugin.")
if 'options' in doc and doc['options']:
- text.append("Options (= is mandatory):\n")
- self.add_fields(text, doc['options'], limit, opt_indent)
+ text.append("OPTIONS (= is mandatory):\n")
+ self.add_fields(text, doc.pop('options'), limit, opt_indent)
text.append('')
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
- text.append("Notes:")
+ text.append("NOTES:")
for note in doc['notes']:
- text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=" * ", subsequent_indent=opt_indent))
+ text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
+ text.append('')
+ del doc['notes']
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
- req = ", ".join(doc['requirements'])
- text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
-
- if 'examples' in doc and len(doc['examples']) > 0:
- text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
- for ex in doc['examples']:
- text.append("%s\n" % (ex['code']))
+ req = ", ".join(doc.pop('requirements'))
+ text.append("REQUIREMENTS:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
if 'plainexamples' in doc and doc['plainexamples'] is not None:
- text.append("EXAMPLES:\n")
+ text.append("EXAMPLES:")
if isinstance(doc['plainexamples'], string_types):
- text.append(doc['plainexamples'])
+ text.append(doc.pop('plainexamples').strip())
else:
- text.append(yaml.dump(doc['plainexamples'], indent=2, default_flow_style=False))
+ text.append(yaml.dump(doc.pop('plainexamples'), indent=2, default_flow_style=False))
+ text.append('')
if 'returndocs' in doc and doc['returndocs'] is not None:
text.append("RETURN VALUES:\n")
if isinstance(doc['returndocs'], string_types):
- text.append(doc['returndocs'])
+ text.append(doc.pop('returndocs'))
else:
- text.append(yaml.dump(doc['returndocs'], indent=2, default_flow_style=False))
+ text.append(yaml.dump(doc.pop('returndocs'), indent=2, default_flow_style=False))
text.append('')
- maintainers = set()
- if 'author' in doc:
- if isinstance(doc['author'], string_types):
- maintainers.add(doc['author'])
- else:
- maintainers.update(doc['author'])
+ # Control rest of keys on verbosity (3 == full, 0 only adds small list)
+ rest = []
+ if self.options.verbosity >= 3:
+ rest = doc
+ elif 'author' in doc:
+ rest = ['author']
- if 'maintainers' in doc:
- if isinstance(doc['maintainers'], string_types):
- maintainers.add(doc['author'])
+ # Generic handler
+ for k in sorted(rest):
+ if k in IGNORE or not doc[k]:
+ continue
+ if isinstance(doc[k], string_types):
+ text.append('%s: %s' % (k.upper(), textwrap.fill(CLI.tty_ify(doc[k]), limit - (len(k) + 2), subsequent_indent=opt_indent)))
+ elif isinstance(doc[k], (list, tuple)):
+ text.append('%s: %s' % (k.upper(), ', '.join(doc[k])))
else:
- maintainers.update(doc['author'])
-
- text.append('MAINTAINERS: ' + ', '.join(maintainers))
- text.append('')
-
- if 'metadata' in doc and doc['metadata']:
- text.append("METADATA:")
- for k in doc['metadata']:
- if isinstance(k, list):
- text.append("\t%s: %s" % (k.capitalize(), ", ".join(doc['metadata'][k])))
- else:
- text.append("\t%s: %s" % (k.capitalize(), doc['metadata'][k]))
+ text.append(self._dump_yaml({k.upper(): doc[k]}, opt_indent))
text.append('')
+
return "\n".join(text)
diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py
index 94103edaa0..38130fab7f 100644
--- a/lib/ansible/cli/pull.py
+++ b/lib/ansible/cli/pull.py
@@ -31,7 +31,7 @@ import time
from ansible.cli import CLI
from ansible.errors import AnsibleOptionsError
from ansible.module_utils._text import to_native
-from ansible.plugins import module_loader
+from ansible.plugins.loader import module_loader
from ansible.utils.cmd_functions import run_cmd
try:
diff --git a/lib/ansible/config/data/config.yml b/lib/ansible/config/base.yml
index 82f9c478a0..89282c799f 100644
--- a/lib/ansible/config/data/config.yml
+++ b/lib/ansible/config/base.yml
@@ -1,6 +1,9 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+---
ACCELERATE_CONNECT_TIMEOUT:
default: 1.0
- desc:
+ description:
- This setting controls the timeout for the socket connect call, and should be kept relatively low.
The connection to the accelerate_port will be attempted 3 times before Ansible will fall back to ssh or paramiko
(depending on your default connection setting) to try and start the accelerate daemon remotely.
@@ -9,152 +12,167 @@ ACCELERATE_CONNECT_TIMEOUT:
env: [{name: ACCELERATE_CONNECT_TIMEOUT }]
ini:
- {key: accelerate_connect_timeout, section: accelerate}
- value_type: float
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_connect_timeout}
+ type: float
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
version_added: "1.4"
ACCELERATE_DAEMON_TIMEOUT:
default: 30
- desc:
+ description:
- This setting controls the timeout for the accelerated daemon, as measured in minutes. The default daemon timeout is 30 minutes.
- Prior to 1.6, the timeout was hard-coded from the time of the daemon’s launch.
- For version 1.6+, the timeout is now based on the last activity to the daemon and is configurable via this option.
env: [{name: ACCELERATE_DAEMON_TIMEOUT}]
ini:
- {key: accelerate_daemon_timeout, section: accelerate}
- value_type: integer
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_daemon_timeout}
+ type: integer
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
version_added: "1.6"
ACCELERATE_KEYS_DIR:
default: ~/.fireball.keys
- desc: ''
- deprecated: True
+ description: ''
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
env: [{name: ACCELERATE_KEYS_DIR}]
ini:
- {key: accelerate_keys_dir, section: accelerate}
- vars: []
- yaml: {key: accelerate.accelerate_keys_dir}
ACCELERATE_KEYS_DIR_PERMS:
default: '700'
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ACCELERATE_KEYS_DIR_PERMS}]
ini:
- {key: accelerate_keys_dir_perms, section: accelerate}
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_keys_dir_perms}
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
ACCELERATE_KEYS_FILE_PERMS:
default: '600'
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ACCELERATE_KEYS_FILE_PERMS}]
ini:
- {key: accelerate_keys_file_perms, section: accelerate}
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_keys_file_perms}
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
ACCELERATE_MULTI_KEY:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ACCELERATE_MULTI_KEY}]
ini:
- {key: accelerate_multi_key, section: accelerate}
- value_type: boolean
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_multi_key}
+ type: boolean
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
ACCELERATE_PORT:
default: 5099
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ACCELERATE_PORT}]
ini:
- {key: accelerate_port, section: accelerate}
- value_type: integer
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_port}
+ type: integer
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
ACCELERATE_TIMEOUT:
default: 30
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ACCELERATE_TIMEOUT}]
ini:
- {key: accelerate_timeout, section: accelerate}
- value_type: integer
- deprecated: True
- vars: []
- yaml: {key: accelerate.accelerate_timeout}
+ type: integer
+ deprecated:
+ why: Removing accelerate as a connection method, settings not needed either.
+ version: "2.5"
+ alternatives: ssh and paramiko
ALLOW_WORLD_READABLE_TMPFILES:
default: False
- desc:
+ description:
- This makes the temporary files created on the machine to be world readable and will issue a warning instead of failing the task.
- It is useful when becoming an unprivileged user.
env: []
ini:
- {key: allow_world_readable_tmpfiles, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.allow_world_readable_tmpfiles}
version_added: "2.1"
ANSIBLE_COW_SELECTION:
default: default
- desc: This allows you to chose a specific cowsay stencil for the banners or use 'random' to cycle through them.
+ description: This allows you to chose a specific cowsay stencil for the banners or use 'random' to cycle through them.
env: [{name: ANSIBLE_COW_SELECTION}]
ini:
- {key: cow_selection, section: defaults}
- vars: []
yaml: {key: defaults.cow_selection}
ANSIBLE_COW_WHITELIST:
default: ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant', 'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep', 'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder', 'vader-koala', 'vader', 'www']
- desc: White list of cowsay templates that are 'safe' to use, set to empty list if you want to enable all installed templates.
+ description: White list of cowsay templates that are 'safe' to use, set to empty list if you want to enable all installed templates.
env: [{name: ANSIBLE_COW_WHITELIST}]
ini:
- {key: cow_whitelist, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.cow_whitelist}
ANSIBLE_FORCE_COLOR:
default: False
- desc: This options forces color mode even when running without a TTY
+ description: This options forces color mode even when running without a TTY
env: [{name: ANSIBLE_FORCE_COLOR}]
ini:
- {key: force_color, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.force_color}
ANSIBLE_NOCOLOR:
default: False
- desc: This setting allows suppressing colorizing output, which is used to give a better indication of failure and status information.
+ description: This setting allows suppressing colorizing output, which is used to give a better indication of failure and status information.
env: [{name: ANSIBLE_NOCOLOR}]
ini:
- {key: nocolor, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.nocolor}
ANSIBLE_NOCOWS:
default: False
- desc: If you have cowsay installed but want to avoid the 'cows' (why????), use this.
+ description: If you have cowsay installed but want to avoid the 'cows' (why????), use this.
env: [{name: ANSIBLE_NOCOWS}]
ini:
- {key: nocows, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.nocows}
+ANSIBLE_PIPELINING:
+ default: False
+ description:
+ - Pipelining, if supported by the connection plugin, reduces the number of network operations required to execute a module on the remote server,
+ by executing many Ansible modules without actual file transfer.
+ - This can result in a very significant performance improvement when enabled.
+ - However this conflicts with privilege escalation (become). For example, when using “sudo:” operations you must first
+ disable ‘requiretty’ in /etc/sudoers on all managed hosts, which is why it is disabled by default.
+ env: [{name: ANSIBLE_PIPELINING}]
+ ini:
+ - {key: pipelining, section: connection}
+ type: boolean
+ yaml: {key: plugins.connection.pipelining}
ANSIBLE_SSH_ARGS:
default: -C -o ControlMaster=auto -o ControlPersist=60s
- desc:
+ description:
- If set, this will override the Ansible default ssh arguments.
- In particular, users may wish to raise the ControlPersist time to encourage performance. A value of 30 minutes may be appropriate.
- Be aware that if `-o ControlPath` is set in ssh_args, the control path setting is not used.
env: [{name: ANSIBLE_SSH_ARGS}]
ini:
- {key: ssh_args, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.ssh_args}
ANSIBLE_SSH_CONTROL_PATH:
+ # TODO: move to ssh plugin
default: null
- desc:
+ description:
- This is the location to save ssh's ControlPath sockets, it uses ssh's variable substitution.
- Since 2.3, if null, ansible will generate a unique hash. Use `%(directory)s` to indicate where to use the control dir path setting.
- Before 2.3 it defaulted to `control_path=%(directory)s/ansible-ssh-%%h-%%p-%%r`.
@@ -162,33 +180,31 @@ ANSIBLE_SSH_CONTROL_PATH:
env: [{name: ANSIBLE_SSH_CONTROL_PATH}]
ini:
- {key: control_path, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.control_path}
ANSIBLE_SSH_CONTROL_PATH_DIR:
+ # TODO: move to ssh plugin
default: ~/.ansible/cp
- desc:
+ description:
- This sets the directory to use for ssh control path if the control path setting is null.
- Also, provides the `%(directory)s` variable for the control path setting.
env: [{name: ANSIBLE_SSH_CONTROL_PATH_DIR}]
ini:
- {key: control_path_dir, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.control_path_dir}
ANSIBLE_SSH_EXECUTABLE:
default: ssh
- desc:
+ description:
- This defines the location of the ssh binary. It defaults to `ssh` which will use the first ssh binary available in $PATH.
- This option is usually not required, it might be useful when access to system ssh is restricted,
or when using ssh wrappers to connect to remote hosts.
env: [{name: ANSIBLE_SSH_EXECUTABLE}]
ini:
- {key: ssh_executable, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.ssh_executable}
version_added: "2.2"
ANSIBLE_SSH_PIPELINING:
default: False
- desc:
+ description:
- Pipelining reduces the number of SSH operations required to execute a module on the remote server,
by executing many Ansible modules without actual file transfer.
- This can result in a very significant performance improvement when enabled.
@@ -196,654 +212,592 @@ ANSIBLE_SSH_PIPELINING:
env: [{name: ANSIBLE_SSH_PIPELINING}]
ini:
- {key: pipelining, section: ssh_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: ssh_connection.pipelining}
ANSIBLE_SSH_RETRIES:
default: 0
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_SSH_RETRIES}]
ini:
- {key: retries, section: ssh_connection}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: ssh_connection.retries}
ANY_ERRORS_FATAL:
default: False
- desc: Sets the default value for the any_errors_fatal keyword
+ description: Sets the default value for the any_errors_fatal keyword
env:
- name: ANSIBLE_ANY_ERRORS_FATAL
ini:
- section: defaults
key: any_errors_fatal
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: errors.anyerrors_fatal}
version_added: "2.4"
BECOME_ALLOW_SAME_USER:
default: False
- desc: This setting controls if become is skipped when remote user and become user are the same.
+ description: This setting controls if become is skipped when remote user and become user are the same.
env: [{name: ANSIBLE_BECOME_ALLOW_SAME_USER}]
ini:
- {key: become_allow_same_user, section: privilege_escalation}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: privilege_escalation.become_allow_same_user}
CACHE_PLUGIN:
default: memory
- desc: Chooses which cache plugin to use
+ description: Chooses which cache plugin to use
env: [{name: ANSIBLE_CACHE_PLUGIN}]
ini:
- {key: fact_caching, section: defaults}
- vars: []
yaml: {key: defaults.fact_caching}
CACHE_PLUGIN_CONNECTION:
default:
- desc: Defines connection or path information for the cache plugin
+ description: Defines connection or path information for the cache plugin
env: [{name: ANSIBLE_CACHE_PLUGIN_CONNECTION}]
ini:
- {key: fact_caching_connection, section: defaults}
- vars: []
yaml: {key: defaults.fact_caching_connection}
CACHE_PLUGIN_PREFIX:
default: ansible_facts
- desc: Prefix to use for cache plugin files/tables
+ description: Prefix to use for cache plugin files/tables
env: [{name: ANSIBLE_CACHE_PLUGIN_PREFIX}]
ini:
- {key: fact_caching_prefix, section: defaults}
- vars: []
yaml: {key: defaults.fact_caching_prefix}
CACHE_PLUGIN_TIMEOUT:
default: 86400
- desc: Expiration timeout for the cache plugin data
+ description: Expiration timeout for the cache plugin data
env: [{name: ANSIBLE_CACHE_PLUGIN_TIMEOUT}]
ini:
- {key: fact_caching_timeout, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.fact_caching_timeout}
COLOR_CHANGED:
default: yellow
- desc: Defines the color to use on 'Changed' status
+ description: Defines the color to use on 'Changed' status
env: [{name: ANSIBLE_COLOR_CHANGED}]
ini:
- {key: changed, section: colors}
- vars: []
yaml: {key: display.colors.changed}
COLOR_DEBUG:
default: dark gray
- desc: Defines the color to use when emitting debug messages
+ description: Defines the color to use when emitting debug messages
env: [{name: ANSIBLE_COLOR_DEBUG}]
ini:
- {key: debug, section: colors}
- vars: []
yaml: {key: colors.debug}
COLOR_DEPRECATE:
default: purple
- desc: Defines the color to use when emitting deprecation messages
+ description: Defines the color to use when emitting deprecation messages
env: [{name: ANSIBLE_COLOR_DEPRECATE}]
ini:
- {key: deprecate, section: colors}
- vars: []
yaml: {key: colors.deprecate}
COLOR_DIFF_ADD:
default: green
- desc: Defines the color to use when showing added lines in diffs
+ description: Defines the color to use when showing added lines in diffs
env: [{name: ANSIBLE_COLOR_DIFF_ADD}]
ini:
- {key: diff_add, section: colors}
- vars: []
yaml: {key: colors.diff_add}
COLOR_DIFF_LINES:
default: cyan
- desc: Defines the color to use when showing diffs
+ description: Defines the color to use when showing diffs
env: [{name: ANSIBLE_COLOR_DIFF_LINES}]
ini:
- {key: diff_lines, section: colors}
- vars: []
yaml: {key: colors.diff_lines}
COLOR_DIFF_REMOVE:
default: red
- desc: Defines the color to use when showing removed lines in diffs
+ description: Defines the color to use when showing removed lines in diffs
env: [{name: ANSIBLE_COLOR_DIFF_REMOVE}]
ini:
- {key: diff_remove, section: colors}
- vars: []
yaml: {key: colors.diff_remove}
COLOR_ERROR:
default: red
- desc: Defines the color to use when emitting error messages
+ description: Defines the color to use when emitting error messages
env: [{name: ANSIBLE_COLOR_ERROR}]
ini:
- {key: error, section: colors}
- vars: []
yaml: {key: colors.error}
COLOR_HIGHLIGHT:
default: white
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_COLOR_HIGHLIGHT}]
ini:
- {key: highlight, section: colors}
- vars: []
yaml: {key: colors.highlight}
COLOR_OK:
default: green
- desc: Defines the color to use when showing 'OK' status
+ description: Defines the color to use when showing 'OK' status
env: [{name: ANSIBLE_COLOR_OK}]
ini:
- {key: ok, section: colors}
- vars: []
yaml: {key: colors.ok}
COLOR_SKIP:
default: cyan
- desc: Defines the color to use when showing 'Skipped' status
+ description: Defines the color to use when showing 'Skipped' status
env: [{name: ANSIBLE_COLOR_SKIP}]
ini:
- {key: skip, section: colors}
- vars: []
yaml: {key: colors.skip}
COLOR_UNREACHABLE:
default: bright red
- desc: Defines the color to use on 'Unreachable' status
+ description: Defines the color to use on 'Unreachable' status
env: [{name: ANSIBLE_COLOR_UNREACHABLE}]
ini:
- {key: unreachable, section: colors}
- vars: []
yaml: {key: colors.unreachable}
COLOR_VERBOSE:
default: blue
- desc: Defines the color to use when emitting verbose messages
+ description: Defines the color to use when emitting verbose messages
env: [{name: ANSIBLE_COLOR_VERBOSE}]
ini:
- {key: verbose, section: colors}
- vars: []
yaml: {key: colors.verbose}
COLOR_WARN:
default: bright purple
- desc: Defines the color to use when emitting warning messages
+ description: Defines the color to use when emitting warning messages
env: [{name: ANSIBLE_COLOR_WARN}]
ini:
- {key: warn, section: colors}
- vars: []
yaml: {key: colors.warn}
COMMAND_WARNINGS:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_COMMAND_WARNINGS}]
ini:
- {key: command_warnings, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.command_warnings}
DEFAULT_ACTION_PLUGIN_PATH:
default: ~/.ansible/plugins/action:/usr/share/ansible/plugins/action
- desc: 'TODO: write it'
+ description: Colon separated paths in which Ansible will search for Action Plugins.
env: [{name: ANSIBLE_ACTION_PLUGINS}]
ini:
- {key: action_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.action_plugins}
DEFAULT_ALLOW_UNSAFE_LOOKUPS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: []
ini:
- {key: allow_unsafe_lookups, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.allow_unsafe_lookups}
DEFAULT_ASK_PASS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ASK_PASS}]
ini:
- {key: ask_pass, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.ask_pass}
DEFAULT_ASK_SUDO_PASS:
default: False
- desc: 'TODO: write it'
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ASK_SUDO_PASS}]
ini:
- {key: ask_sudo_pass, section: defaults}
- value_type: boolean
- vars: []
- yaml: {key: defaults.ask_sudo_pass}
+ type: boolean
DEFAULT_ASK_SU_PASS:
default: False
- desc: 'TODO: write it'
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ASK_SU_PASS}]
ini:
- {key: ask_su_pass, section: defaults}
- value_type: boolean
- vars: []
- yaml: {key: defaults.ask_su_pass}
+ type: boolean
DEFAULT_ASK_VAULT_PASS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ASK_VAULT_PASS}]
ini:
- {key: ask_vault_pass, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.ask_vault_pass}
DEFAULT_BECOME:
default: False
- desc: 'TODO: write it'
+ description: Toggles the use of privilege escalation, allowing you to 'become' another user after login.
env: [{name: ANSIBLE_BECOME}]
ini:
- {key: become, section: privilege_escalation}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: privilege_escalation.become}
DEFAULT_BECOME_ASK_PASS:
default: False
- desc: 'TODO: write it'
+ description: Toggle to prompt for privilege escalation password.
env: [{name: ANSIBLE_BECOME_ASK_PASS}]
ini:
- {key: become_ask_pass, section: privilege_escalation}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: privilege_escalation.become_ask_pass}
+DEFAULT_BECOME_METHOD:
+ default: 'sudo'
+ description: Privilege escalation method to use when `become` is enabled.
+ env: [{name: ANSIBLE_BECOME_METHOD}]
+ ini:
+ - {section: privilege_escalation, key: become_method}
+ yaml: {key: privilege_escalation.become_method}
DEFAULT_BECOME_EXE:
- default:
- desc: 'TODO: write it'
+ default: ~
+ description: 'executable to use for privilege escalation, otherwise Ansible will depend on PATh'
env: [{name: ANSIBLE_BECOME_EXE}]
ini:
- {key: become_exe, section: privilege_escalation}
- vars: []
yaml: {key: privilege_escalation.become_exe}
DEFAULT_BECOME_FLAGS:
- default:
- desc: 'TODO: write it'
+ default: ~
+ description: Flags to pass to the privilege escalation executable.
env: [{name: ANSIBLE_BECOME_FLAGS}]
ini:
- {key: become_flags, section: privilege_escalation}
- vars: []
yaml: {key: privilege_escalation.become_flags}
-DEFAULT_BECOME_METHOD:
- default: 'sudo'
- desc:
- env: [{name: ANSIBLE_BECOME_METHOD}]
- ini:
- - {section: privilege_escalation, key: become_method}
- vars: []
- yaml: {key: privilege_escalation.become_method}
DEFAULT_BECOME_USER:
default: root
- desc: 'TODO: write it'
+ description: User your become when using privilege escalation, most systems will use 'root' when no user is specified.
env: [{name: ANSIBLE_BECOME_USER}]
ini:
- {key: become_user, section: privilege_escalation}
- vars: []
yaml: {key: privilege_escalation.become_user}
DEFAULT_CACHE_PLUGIN_PATH:
default: ~/.ansible/plugins/cache:/usr/share/ansible/plugins/cache
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_CACHE_PLUGINS}]
ini:
- {key: cache_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.cache_plugins}
DEFAULT_CALLABLE_WHITELIST:
default: []
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_CALLABLE_WHITELIST}]
ini:
- {key: callable_whitelist, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.callable_whitelist}
DEFAULT_CALLBACK_PLUGIN_PATH:
default: ~/.ansible/plugins/callback:/usr/share/ansible/plugins/callback
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_CALLBACK_PLUGINS}]
ini:
- {key: callback_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.callback_plugins}
DEFAULT_CALLBACK_WHITELIST:
default: []
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_CALLBACK_WHITELIST}]
ini:
- {key: callback_whitelist, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.callback_whitelist}
DEFAULT_CONNECTION_PLUGIN_PATH:
default: ~/.ansible/plugins/connection:/usr/share/ansible/plugins/connection
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_CONNECTION_PLUGINS}]
ini:
- {key: connection_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.connection_plugins}
DEFAULT_DEBUG:
default: False
- desc: 'TODO: write it'
+ description: Toggles debug output in Ansible, VERY verbose and can hinder multiprocessing.
env: [{name: ANSIBLE_DEBUG}]
ini:
- {key: debug, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.debug}
DEFAULT_EXECUTABLE:
default: /bin/sh
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_EXECUTABLE}]
ini:
- {key: executable, section: defaults}
- vars: []
yaml: {key: defaults.executable}
DEFAULT_FACT_PATH:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_FACT_PATH}]
ini:
- {key: fact_path, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: defaults.fact_path}
DEFAULT_FILTER_PLUGIN_PATH:
default: ~/.ansible/plugins/filter:/usr/share/ansible/plugins/filter
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_FILTER_PLUGINS}]
ini:
- {key: filter_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.filter_plugins}
DEFAULT_FORCE_HANDLERS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_FORCE_HANDLERS}]
ini:
- {key: force_handlers, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.force_handlers}
DEFAULT_FORKS:
default: 5
- desc: 'TODO: write it'
+ description: Maximum number of forks Ansible will use to execute tasks on target hosts.
env: [{name: ANSIBLE_FORKS}]
ini:
- {key: forks, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.forks}
DEFAULT_GATHERING:
default: 'implicit'
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GATHERING}]
ini:
- key: gathering
section: defaults
- vars: []
yaml: {key: defaults.gathering}
DEFAULT_GATHER_SUBSET:
default: 'all'
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GATHER_SUBSET}]
ini:
- key: gather_subset
section: defaults
- vars: []
yaml: {key: defaults.gather_subset}
DEFAULT_GATHER_TIMEOUT:
default: 10
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GATHER_TIMEOUT}]
ini:
- {key: gather_timeout, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.gather_timeout}
DEFAULT_HANDLER_INCLUDES_STATIC:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_HANDLER_INCLUDES_STATIC}]
ini:
- {key: handler_includes_static, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.handler_includes_static}
+ deprecated:
+ why: include itself is deprecated and this setting will not matter in the future
+ version: "2.8"
+ alternatives: none as its already built into the decision between include_tasks and import_tasks
DEFAULT_HASH_BEHAVIOUR:
default: replace
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_HASH_BEHAVIOUR}]
ini:
- {key: hash_behaviour, section: defaults}
- vars: []
yaml: {key: defaults.hash_behaviour}
DEFAULT_HOST_LIST:
default: /etc/ansible/hosts
- desc: 'TODO: write it'
+ description: Location of the Ansible inventory source.
env: [{name: ANSIBLE_INVENTORY}]
expand_relative_paths: True
ini:
- {key: inventory, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: defaults.inventory}
DEFAULT_INTERNAL_POLL_INTERVAL:
default: 0.001
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: []
ini:
- {key: internal_poll_interval, section: defaults}
- value_type: float
- vars: []
+ type: float
yaml: {key: defaults.internal_poll_interval}
DEFAULT_INVENTORY_PLUGIN_PATH:
default: ~/.ansible/plugins/inventory:/usr/share/ansible/plugins/inventory
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_INVENTORY_PLUGINS}]
ini:
- {key: inventory_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.inventory_plugins}
DEFAULT_JINJA2_EXTENSIONS:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_JINJA2_EXTENSIONS}]
ini:
- {key: jinja2_extensions, section: defaults}
- vars: []
yaml: {key: defaults.jinja2_extensions}
DEFAULT_KEEP_REMOTE_FILES:
default: False
- desc: 'TODO: write it'
+ description: Enables/disables the cleaning up of the temporary files Ansible used to execute the tasks on the remote.
env: [{name: ANSIBLE_KEEP_REMOTE_FILES}]
ini:
- {key: keep_remote_files, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.keep_remote_files}
DEFAULT_LIBVIRT_LXC_NOSECLABEL:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: LIBVIRT_LXC_NOSECLABEL}]
ini:
- {key: libvirt_lxc_noseclabel, section: selinux}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: selinux.libvirt_lxc_noseclabel}
DEFAULT_LOAD_CALLBACK_PLUGINS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_LOAD_CALLBACK_PLUGINS}]
ini:
- {key: bin_ansible_callbacks, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.bin_ansible_callbacks}
DEFAULT_LOCAL_TMP:
default: ~/.ansible/tmp
- desc: 'TODO: write it'
+ description: Temporary directory for Ansible to use on the controller.
env: [{name: ANSIBLE_LOCAL_TEMP}]
ini:
- {key: local_tmp, section: defaults}
- value_type: tmppath
- vars: []
+ type: tmppath
yaml: {key: defaults.local_tmp}
DEFAULT_LOG_PATH:
default: ''
- desc: 'TODO: write it'
+ description: File to which Ansible will log on the controller. When empty logging is disabled.
env: [{name: ANSIBLE_LOG_PATH}]
ini:
- {key: log_path, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: defaults.log_path}
DEFAULT_LOOKUP_PLUGIN_PATH:
default: ~/.ansible/plugins/lookup:/usr/share/ansible/plugins/lookup
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_LOOKUP_PLUGINS}]
ini:
- {key: lookup_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.lookup_plugins}
DEFAULT_MANAGED_STR:
default: Ansible managed
- desc: 'TODO: write it'
+ description: Sets the macro for the 'ansible_managed' variable available for 'tempalte' tasks.
env: []
ini:
- {key: ansible_managed, section: defaults}
- vars: []
yaml: {key: defaults.ansible_managed}
DEFAULT_MODULE_ARGS:
default: ''
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_MODULE_ARGS}]
ini:
- {key: module_args, section: defaults}
- vars: []
yaml: {key: defaults.module_args}
DEFAULT_MODULE_COMPRESSION:
default: ZIP_DEFLATED
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: []
ini:
- {key: module_compression, section: defaults}
- vars: []
yaml: {key: defaults.module_compression}
DEFAULT_MODULE_LANG:
- default: os.getenv('LANG', 'en_US.UTF-8')
- desc: 'TODO: write it'
+ # TODO: allow setting to function: os.getenv('LANG', 'en_US.UTF-8')
+ default: eval(os.getenv('LANG', 'en_US.UTF-8'))
+ description: "Language locale setting to use for modules when they execute on the target, if empty it defaults to 'en_US.UTF-8'"
env: [{name: ANSIBLE_MODULE_LANG}]
ini:
- {key: module_lang, section: defaults}
- vars: []
yaml: {key: defaults.module_lang}
DEFAULT_MODULE_NAME:
default: command
- desc: 'TODO: write it'
+ description: Module to use with the `ansible` AdHoc command, if none is specified.
env: []
ini:
- {key: module_name, section: defaults}
- vars: []
yaml: {key: defaults.module_name}
DEFAULT_MODULE_PATH:
default: ~/.ansible/plugins/modules:/usr/share/ansible/plugins/modules
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_LIBRARY}]
ini:
- {key: library, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.library}
DEFAULT_MODULE_SET_LOCALE:
default: False
- desc: 'TODO: write it'
+ description: Controls if we set locale for modules when executing on the target.
env: [{name: ANSIBLE_MODULE_SET_LOCALE}]
ini:
- {key: module_set_locale, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.module_set_locale}
DEFAULT_MODULE_UTILS_PATH:
default: ~/.ansible/plugins/module_utils:/usr/share/ansible/plugins/module_utils
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_MODULE_UTILS}]
ini:
- {key: module_utils, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.module_utils}
DEFAULT_NO_LOG:
default: False
- desc: 'TODO: write it'
+ description: Toggle Ansible's display and logging of task details, mainly used to avoid security disclosures.
env: [{name: ANSIBLE_NO_LOG}]
ini:
- {key: no_log, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.no_log}
DEFAULT_NO_TARGET_SYSLOG:
default: False
- desc: 'TODO: write it'
+ description: Toggle Ansbile logging to syslog on the target when it executes tasks.
env: [{name: ANSIBLE_NO_TARGET_SYSLOG}]
ini:
- {key: no_target_syslog, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.no_target_syslog}
DEFAULT_NULL_REPRESENTATION:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_NULL_REPRESENTATION}]
ini:
- {key: null_representation, section: defaults}
- value_type: none
- vars: []
+ type: none
yaml: {key: defaults.null_representation}
DEFAULT_POLL_INTERVAL:
default: 15
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_POLL_INTERVAL}]
ini:
- {key: poll_interval, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.poll_interval}
DEFAULT_PRIVATE_KEY_FILE:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PRIVATE_KEY_FILE}]
ini:
- {key: private_key_file, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: defaults.private_key_file}
DEFAULT_PRIVATE_ROLE_VARS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PRIVATE_ROLE_VARS}]
ini:
- {key: private_role_vars, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.private_role_vars}
DEFAULT_REMOTE_PORT:
default:
- desc: 'TODO: write it'
+ description: Port to use in remote connections, when blank it will use the connection plugin default.
env: [{name: ANSIBLE_REMOTE_PORT}]
ini:
- {key: remote_port, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.remote_port}
DEFAULT_REMOTE_TMP:
default: ~/.ansible/tmp
- desc: 'TODO: write it'
+ description:
+ - Temporary directory to use on targets when executing tasks.
+ - In some cases Ansible may still choose to use a system temporary dir to avoid permission issues.
env: [{name: ANSIBLE_REMOTE_TEMP}]
ini:
- {key: remote_tmp, section: defaults}
@@ -852,225 +806,244 @@ DEFAULT_REMOTE_TMP:
yaml: {key: defaults.remote_tmp}
DEFAULT_REMOTE_USER:
default:
- desc: 'TODO: write it'
+ description:
+ - Sets the login user for the target machines
+ - When blank it uses the connection plugin's default, normally the user currently executing Ansible.
env: [{name: ANSIBLE_REMOTE_USER}]
ini:
- {key: remote_user, section: defaults}
- vars: []
yaml: {key: defaults.remote_user}
DEFAULT_ROLES_PATH:
default: ~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ROLES_PATH}]
expand_relative_paths: True
ini:
- {key: roles_path, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.roles_path}
DEFAULT_SCP_IF_SSH:
default: smart
- desc: 'TODO: write it'
+ description:
+ - "Prefered method to use when transfering files over ssh"
+ - When set to smart, Ansible will try them until one succeeds or they all fail
+ - If set to True, it will force 'scp', if False it will use 'sftp'
env: [{name: ANSIBLE_SCP_IF_SSH}]
ini:
- {key: scp_if_ssh, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.scp_if_ssh}
DEFAULT_SELINUX_SPECIAL_FS:
default: fuse, nfs, vboxsf, ramfs, 9p
- desc: 'TODO: write it'
+ description:
+ - "Some filesystems do not support safe operations and/or return inconsistent errors,
+ this setting makes Ansible 'tolerate' those in the list w/o causing fatal errors."
+ - Data corruption may occur and writes are not always verified when a filesystem is in the list.
+
env: []
ini:
- {key: special_context_filesystems, section: selinux}
- value_type: list
- vars: []
+ type: list
yaml: {key: selinux.special_context_filesystems}
DEFAULT_SFTP_BATCH_MODE:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_SFTP_BATCH_MODE}]
ini:
- {key: sftp_batch_mode, section: ssh_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: ssh_connection.sftp_batch_mode}
DEFAULT_SQUASH_ACTIONS:
default: apk, apt, dnf, homebrew, openbsd_pkg, pacman, pkgng, yum, zypper
- desc: 'TODO: write it'
+ description:
+ - Ansible can optimise actions that call modules that support list parameters when using with_ looping.
+ Instead of calling the module once for each item, the module is called once with the full list.
+ - The default value for this setting is only for certain package managers, but it can be used for any module
+ - Currently, this is only supported for modules that have a name or pkg parameter, and only when the item is the only thing being passed to the parameter.
env: [{name: ANSIBLE_SQUASH_ACTIONS}]
ini:
- {key: squash_actions, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.squash_actions}
+ version_added: "2.0"
DEFAULT_SSH_TRANSFER_METHOD:
default:
- desc: 'TODO: write it'
+ description: 'unused?'
+ # - "Prefered method to use when transfering files over ssh"
+ # - Setting to smart will try them until one succeeds or they all fail
+ #choices: ['sftp', 'scp', 'dd', 'smart']
env: [{name: ANSIBLE_SSH_TRANSFER_METHOD}]
ini:
- {key: transfer_method, section: ssh_connection}
- vars: []
yaml: {key: ssh_connection.transfer_method}
DEFAULT_STDOUT_CALLBACK:
default: default
- desc: 'TODO: write it'
+ description:
+ - "Set the main callback used to display Ansible output, you can only have one at a time."
+ - You can have many other callbacks, but just one can be in charge of stdout.
env: [{name: ANSIBLE_STDOUT_CALLBACK}]
ini:
- {key: stdout_callback, section: defaults}
- vars: []
yaml: {key: defaults.stdout_callback}
DEFAULT_STRATEGY:
- default: linear
- desc: 'TODO: write it'
+ default: 'linear'
+ description: Set the default strategy used for plays.
env: [{name: ANSIBLE_STRATEGY}]
ini:
- {key: strategy, section: defaults}
- vars: []
yaml: {key: defaults.strategy}
+ version_added: "2.3"
DEFAULT_STRATEGY_PLUGIN_PATH:
default: ~/.ansible/plugins/strategy:/usr/share/ansible/plugins/strategy
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_STRATEGY_PLUGINS}]
ini:
- {key: strategy_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.strategy_plugins}
DEFAULT_SU:
default: False
- desc: 'TODO: write it'
+ description: 'Toggle the use of "su" for tasks.'
env: [{name: ANSIBLE_SU}]
ini:
- {key: su, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.su}
DEFAULT_SUDO:
default: False
- desc: 'TODO: write it'
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'Toggle the use of "sudo" for tasks.'
env: [{name: ANSIBLE_SUDO}]
ini:
- {key: sudo, section: defaults}
- value_type: boolean
- vars: []
- yaml: {key: defaults.sudo}
+ type: boolean
DEFAULT_SUDO_EXE:
- default:
- desc: 'TODO: write it'
+ default: sudo
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'specify an "sudo" executable, otherwise it relies on PATH.'
env: [{name: ANSIBLE_SUDO_EXE}]
ini:
- {key: sudo_exe, section: defaults}
- vars: []
- yaml: {key: defaults.sudo_exe}
DEFAULT_SUDO_FLAGS:
- default: -H -S -n
- desc: 'TODO: write it'
+ default: '-H -S -n'
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'Flags to pass to "sudo"'
env: [{name: ANSIBLE_SUDO_FLAGS}]
ini:
- {key: sudo_flags, section: defaults}
- vars: []
- yaml: {key: defaults.sudo_flags}
DEFAULT_SUDO_USER:
- default: root
- desc: 'TODO: write it'
+ default:
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'User you become when using "sudo", leaving it blank will use the default configured on the target (normally root)'
env: [{name: ANSIBLE_SUDO_USER}]
ini:
- {key: sudo_user, section: defaults}
- vars: []
- yaml: {key: defaults.sudo_user}
DEFAULT_SU_EXE:
- default:
- desc: 'TODO: write it'
+ default: su
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'specify an "su" executable, otherwise it relies on PATH.'
env: [{name: ANSIBLE_SU_EXE}]
ini:
- {key: su_exe, section: defaults}
- vars: []
- yaml: {key: defaults.su_exe}
DEFAULT_SU_FLAGS:
- default:
- desc: 'TODO: write it'
+ default: ~
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
+ description: 'Flags to pass to su'
env: [{name: ANSIBLE_SU_FLAGS}]
ini:
- {key: su_flags, section: defaults}
- vars: []
- yaml: {key: defaults.su_flags}
DEFAULT_SU_USER:
- default: root
- desc: 'TODO: write it'
+ default:
+ description: 'User you become when using "su", leaving it blank will use the default configured on the target (normally root)'
env: [{name: ANSIBLE_SU_USER}]
ini:
- {key: su_user, section: defaults}
- vars: []
- yaml: {key: defaults.su_user}
+ deprecated:
+ why: In favor of become which is a generic framework
+ version: "2.8"
+ alternatives: become
DEFAULT_SYSLOG_FACILITY:
default: LOG_USER
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_SYSLOG_FACILITY}]
ini:
- {key: syslog_facility, section: defaults}
- vars: []
yaml: {key: defaults.syslog_facility}
DEFAULT_TASK_INCLUDES_STATIC:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_TASK_INCLUDES_STATIC}]
ini:
- {key: task_includes_static, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.task_includes_static}
+ deprecated:
+ why: include itself is deprecated and this setting will not matter in the future
+ version: "2.8"
+ alternatives: None, as its already built into the decision between include_tasks and import_tasks
DEFAULT_TEST_PLUGIN_PATH:
default: ~/.ansible/plugins/test:/usr/share/ansible/plugins/test
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_TEST_PLUGINS}]
ini:
- {key: test_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.test_plugins}
DEFAULT_TIMEOUT:
default: 10
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_TIMEOUT}]
ini:
- {key: timeout, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.timeout}
DEFAULT_TRANSPORT:
default: smart
- desc: 'TODO: write it'
+ description: "Default connection plugin to use, the 'smart' option will toggle between 'ssh' and 'paramiko' depending on controller OS and ssh versions"
env: [{name: ANSIBLE_TRANSPORT}]
ini:
- {key: transport, section: defaults}
- vars: []
yaml: {key: defaults.transport}
DEFAULT_UNDEFINED_VAR_BEHAVIOR:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_ERROR_ON_UNDEFINED_VARS}]
ini:
- {key: error_on_undefined_vars, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.error_on_undefined_vars}
DEFAULT_VARS_PLUGIN_PATH:
default: ~/.ansible/plugins/vars:/usr/share/ansible/plugins/vars
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_VARS_PLUGINS}]
ini:
- {key: vars_plugins, section: defaults}
- value_type: pathlist
- vars: []
+ type: pathlist
yaml: {key: defaults.vars_plugins}
DEFAULT_VAR_COMPRESSION_LEVEL:
default: 0
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_VAR_COMPRESSION_LEVEL}]
ini:
- {key: var_compression_level, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.var_compression_level}
DEFAULT_VAULT_ID_MATCH:
default: False
@@ -1098,256 +1071,244 @@ DEFAULT_VAULT_IDENTITY_LIST:
vars: []
yaml: {key: defaults.vault_identity_list}
DEFAULT_VAULT_PASSWORD_FILE:
- default:
- desc: 'TODO: write it'
+ default: ~
+ description: 'TODO: write it'
env: [{name: ANSIBLE_VAULT_PASSWORD_FILE}]
ini:
- {key: vault_password_file, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: defaults.vault_password_file}
DEFAULT_VERBOSITY:
default: 0
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_VERBOSITY}]
ini:
- {key: verbosity, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.verbosity}
DEPRECATION_WARNINGS:
default: True
- desc: 'TODO: write it'
+ description: "Toggle to control the showing of deprecation warnings"
env: [{name: ANSIBLE_DEPRECATION_WARNINGS}]
ini:
- {key: deprecation_warnings, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.deprecation_warnings}
DIFF_ALWAYS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_DIFF_ALWAYS}]
ini:
- {key: always, section: diff}
- value_type: bool
- vars: []
+ type: bool
yaml: {key: diff.always}
DIFF_CONTEXT:
default: 3
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_DIFF_CONTEXT}]
ini:
- {key: context, section: diff}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: diff.context}
DISPLAY_ARGS_TO_STDOUT:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_DISPLAY_ARGS_TO_STDOUT}]
ini:
- {key: display_args_to_stdout, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.display_args_to_stdout}
DISPLAY_SKIPPED_HOSTS:
default: True
- desc: 'TODO: write it'
+ description: "Toggle to control displaying skipped host entries in a task in the default callback"
env: [{name: DISPLAY_SKIPPED_HOSTS}]
ini:
- {key: display_skipped_hosts, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.display_skipped_hosts}
ERROR_ON_MISSING_HANDLER:
default: True
- desc: 'TODO: write it'
+ description: "Toggle to allow missing handlers to become a warning instead of an error when notifying."
env: [{name: ANSIBLE_ERROR_ON_MISSING_HANDLER}]
ini:
- {key: error_on_missing_handler, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.error_on_missing_handler}
GALAXY_IGNORE_CERTS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GALAXY_IGNORE}]
ini:
- {key: ignore_certs, section: galaxy}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: galaxy.ignore_certs}
GALAXY_ROLE_SKELETON:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON}]
ini:
- {key: role_skeleton, section: galaxy}
- value_type: path
- vars: []
+ type: path
yaml: {key: galaxy.role_skeleton}
GALAXY_ROLE_SKELETON_IGNORE:
default: [^.git$, ^.*/.git_keep$]
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GALAXY_ROLE_SKELETON_IGNORE}]
ini:
- {key: role_skeleton_ignore, section: galaxy}
- value_type: list
- vars: []
+ type: list
yaml: {key: galaxy.role_skeleton_ignore}
GALAXY_SCMS:
default: git, hg
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_GALAXY_SCMS}]
ini:
- {key: scms, section: galaxy}
- value_type: list
- vars: []
+ type: list
yaml: {key: galaxy.scms}
GALAXY_SERVER:
default: https://galaxy.ansible.com
- desc: 'TODO: write it'
+ description: "URL to prepend when roles don't specify the full URI, assume they are referencing this server as the source."
env: [{name: ANSIBLE_GALAXY_SERVER}]
ini:
- {key: server, section: galaxy}
- vars: []
yaml: {key: galaxy.server}
HOST_KEY_CHECKING:
default: True
- desc: 'TODO: write it'
+ description: 'Set this to "False" if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host'
env: [{name: ANSIBLE_HOST_KEY_CHECKING}]
ini:
- {key: host_key_checking, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.host_key_checking}
INVENTORY_ENABLED:
default: [host_list, script, yaml, ini]
- desc: List of enabled inventory plugins, it also determines the order in which they are used.
+ description: List of enabled inventory plugins, it also determines the order in which they are used.
env: [{name: ANSIBLE_INVENTORY_ENABLED}]
ini:
- {key: inventory_enabled, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: inventory.enabled_plugins}
INVENTORY_IGNORE_EXTS:
- default: BLACKLIST_EXTS + [ '~', '.orig', '.ini', '.cfg', '.retry']
- desc: List of extensions to ignore when using a directory as an inventory source
+ default: eval(BLACKLIST_EXTS + ( '~', '.orig', '.ini', '.cfg', '.retry'))
+ description: List of extensions to ignore when using a directory as an inventory source
env: [{name: ANSIBLE_INVENTORY_IGNORE}]
ini:
- {key: inventory_ignore_extensions, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: inventory.ignore_extensions}
INVENTORY_IGNORE_PATTERNS:
default: []
- desc: List of patterns to ignore when using a directory as an inventory source
+ description: List of patterns to ignore when using a directory as an inventory source
env: [{name: ANSIBLE_INVENTORY_IGNORE_REGEX}]
ini:
- {key: inventory_ignore_patterns, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: inventory.ignore_patterns}
MAX_FILE_SIZE_FOR_DIFF:
default: 104448
- desc: Maximum size of files to be considered for diff display
+ description: Maximum size of files to be considered for diff display
env: [{name: ANSIBLE_MAX_DIFF_SIZE}]
ini:
- {key: max_diff_size, section: defaults}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: defaults.max_diff_size}
MERGE_MULTIPLE_CLI_TAGS:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_MERGE_MULTIPLE_CLI_TAGS}]
ini:
- - {key: merge_multiple_cli_tags, section: defaults}
- value_type: boolean
- vars: []
+ - {key: merge_multiple_cli_tags, section: defaults}
+ type: boolean
yaml: {key: defaults.merge_multiple_cli_tags}
NETWORK_GROUP_MODULES:
default: [eos, nxos, ios, iosxr, junos, ce, vyos, sros, dellos9, dellos10, dellos6, asa, aruba, aireos]
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: NETWORK_GROUP_MODULES}]
ini:
- {key: network_group_modules, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.network_group_modules}
ONLY_NAMESPACE_FACTS:
default: False
- desc:
+ description:
- Facts normally get injected as top level variables, this setting prevents that.
- Facts are still available in the `ansible_facts` variable w/o the `ansible_` prefix.
env: [{name: ANSIBLE_RESTRICT_FACTS}]
ini:
- {key: restrict_facts_namespace, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.restrict_facts_namespace}
version_added: "2.4"
PARAMIKO_HOST_KEY_AUTO_ADD:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD}]
ini:
- {key: host_key_auto_add, section: paramiko_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: paramiko_connection.host_key_auto_add}
PARAMIKO_LOOK_FOR_KEYS:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS}]
ini:
- {key: look_for_keys, section: paramiko_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: paramiko_connection.look_for_keys}
PARAMIKO_PROXY_COMMAND:
default:
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_PROXY_COMMAND}]
ini:
- {key: proxy_command, section: paramiko_connection}
- vars: []
yaml: {key: paramiko_connection.proxy_command}
PARAMIKO_PTY:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_PTY}]
ini:
- {key: pty, section: paramiko_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: paramiko_connection.pty}
PARAMIKO_RECORD_HOST_KEYS:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PARAMIKO_RECORD_HOST_KEYS}]
ini:
- {key: record_host_keys, section: paramiko_connection}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: paramiko_connection.record_host_keys}
+PERSISTENT_CONNECT_INTERVAL:
+ default: 1
+ description: 'TODO: write it'
+ env: [{name: ANSIBLE_PERSISTENT_CONNECT_INTERVAL}]
+ ini:
+ - {key: connect_interval, section: persistent_connection}
+ type: integer
+ yaml: {key: persistent_connection.connect_interval}
PERSISTENT_CONTROL_PATH_DIR:
default: ~/.ansible/pc
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PERSISTENT_CONTROL_PATH_DIR}]
ini:
- {key: control_path_dir, section: persistent_connection}
- vars: []
yaml: {key: persistent_connection.control_path_dir}
+PERSISTENT_CONNECT_RETRIES:
+ default: 30
+ description: 'TODO: write it'
+ env: [{name: ANSIBLE_PERSISTENT_CONNECT_RETRIES}]
+ ini:
+ - {key: connect_retries, section: persistent_connection}
+ type: integer
+ yaml: {key: persistent_connection.connect_retries}
PERSISTENT_CONNECT_TIMEOUT:
default: 30
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PERSISTENT_CONNECT_TIMEOUT}]
ini:
- {key: connect_timeout, section: persistent_connection}
- value_type: integer
- vars: []
+ type: integer
yaml: {key: persistent_connection.connect_timeout}
PERSISTENT_CONNECT_RETRY_TIMEOUT:
default: 15
@@ -1369,74 +1330,72 @@ PERSISTENT_COMMAND_TIMEOUT:
yaml: {key: persistent_connection.command_timeout}
RETRY_FILES_ENABLED:
default: True
- desc: This controls whether a failed Ansible playbook should create a .retry file.
+ description: This controls whether a failed Ansible playbook should create a .retry file.
env: [{name: ANSIBLE_RETRY_FILES_ENABLED}]
ini:
- {key: retry_files_enabled, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: errors.retry.enabled}
RETRY_FILES_SAVE_PATH:
default: ~
- desc: This sets the path in which Ansible will save .retry files when a playbook fails and retry files are enabled.
+ description: This sets the path in which Ansible will save .retry files when a playbook fails and retry files are enabled.
env: [{name: ANSIBLE_RETRY_FILES_SAVE_PATH}]
ini:
- {key: retry_files_save_path, section: defaults}
- value_type: path
- vars: []
+ type: path
yaml: {key: errors.retry.path}
SHOW_CUSTOM_STATS:
default: False
- desc: 'TODO: write it'
+ description: 'This adds the custom stats set via the set_stats plugin to the default output'
env: [{name: ANSIBLE_SHOW_CUSTOM_STATS}]
ini:
- {key: show_custom_stats, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.show_custom_stats}
STRING_TYPE_FILTERS:
default: [string, to_json, to_nice_json, to_yaml, ppretty, json]
- desc: 'TODO: write it'
+ description:
+ - "This list of filters avoids 'type conversion' when templating variables"
+ - Useful when you want to avoid conversion into lists or dictionaries for JSON strings, for example.
env: [{name: ANSIBLE_STRING_TYPE_FILTERS}]
ini:
- {key: dont_type_filters, section: jinja2}
- value_type: list
- vars: []
+ type: list
yaml: {key: jinja2.dont_type_filters}
SYSTEM_WARNINGS:
default: True
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_SYSTEM_WARNINGS}]
ini:
- {key: system_warnings, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.system_warnings}
USE_PERSISTENT_CONNECTIONS:
default: False
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_USE_PERSISTENT_CONNECTIONS}]
ini:
- {key: use_persistent_connections, section: defaults}
- value_type: boolean
- vars: []
+ type: boolean
yaml: {key: defaults.use_persistent_connections}
VARIABLE_PRECEDENCE:
default: [all_inventory, groups_inventory, all_plugins_inventory, all_plugins_play, groups_plugins_inventory, groups_plugins_play]
- desc: 'TODO: write it'
+ description: 'TODO: write it'
env: [{name: ANSIBLE_PRECEDENCE}]
ini:
- {key: precedence, section: defaults}
- value_type: list
- vars: []
+ type: list
yaml: {key: defaults.precedence}
YAML_FILENAME_EXTENSIONS:
default: [".yml", ".yaml", ".json"]
- desc: "check all of these extensions when looking for 'variable' files which should be YAML or JSON or vaulted versions of theses."
+ description:
+ - "Check all of these extensions when looking for 'variable' files which should be YAML or JSON or vaulted versions of these."
+ - 'This affects vars_files, include_vars, inventory and vars plugins among others.'
env:
- name: ANSIBLE_YAML_FILENAME_EXT
ini:
- - {key: defaults, section: yaml_valid_extensions}
- value_type: list
- vars: []
+ - section: yaml_valid_extensions
+ key: defaults
+ type: list
yaml: {key: defaults.yaml_valid_extensions}
+...
diff --git a/lib/ansible/config/data.py b/lib/ansible/config/data.py
index 903b9cf00c..fb947e88c1 100644
--- a/lib/ansible/config/data.py
+++ b/lib/ansible/config/data.py
@@ -1,27 +1,10 @@
-# (c) 2017, Ansible by Red Hat, inc
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from collections import namedtuple
-
-Setting = namedtuple('Setting','name value origin')
class ConfigData(object):
@@ -59,3 +42,4 @@ class ConfigData(object):
if plugin.name not in self._plugins[plugin.type]:
self._plugins[plugin.type][plugin.name] = {}
self._plugins[plugin.type][plugin.name][setting.name] = setting
+
diff --git a/lib/ansible/config/manager.py b/lib/ansible/config/manager.py
index 6bef96f7af..77b4d17a4d 100644
--- a/lib/ansible/config/manager.py
+++ b/lib/ansible/config/manager.py
@@ -1,19 +1,5 @@
-# (c) 2017, Ansible by Red Hat, inc
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
@@ -24,7 +10,9 @@ import sys
import tempfile
import yaml
-from ansible.config.data import ConfigData, Setting
+from collections import namedtuple
+
+from ansible.config.data import ConfigData
from ansible.errors import AnsibleOptionsError, AnsibleError
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import configparser
@@ -34,131 +22,169 @@ from ansible.parsing.quoting import unquote
from ansible.utils.path import unfrackpath
from ansible.utils.path import makedirs_safe
-
+Plugin = namedtuple('Plugin','name type')
+Setting = namedtuple('Setting','name value origin')
+
+# FIXME: see if we can unify in module_utils with similar function used by argspec
+def ensure_type(value, value_type):
+ ''' return a configuration variable with casting
+ :arg value: The value to ensure correct typing of
+ :kwarg value_type: The type of the value. This can be any of the following strings:
+ :boolean: sets the value to a True or False value
+ :integer: Sets the value to an integer or raises a ValueType error
+ :float: Sets the value to a float or raises a ValueType error
+ :list: Treats the value as a comma separated list. Split the value
+ and return it as a python list.
+ :none: Sets the value to None
+ :path: Expands any environment variables and tilde's in the value.
+ :tmp_path: Create a unique temporary directory inside of the directory
+ specified by value and return its path.
+ :pathlist: Treat the value as a typical PATH string. (On POSIX, this
+ means colon separated strings.) Split the value and then expand
+ each part for environment variables and tildes.
+ '''
+ if value_type:
+ value_type = value_type.lower()
+
+ if value_type in ('boolean', 'bool'):
+ value = boolean(value, strict=False)
+
+ elif value:
+ if value_type in ('integer', 'int'):
+ value = int(value)
+
+ elif value_type == 'float':
+ value = float(value)
+
+ elif value_type == 'list':
+ if isinstance(value, string_types):
+ value = [x.strip() for x in value.split(',')]
+
+ elif value_type == 'none':
+ if value == "None":
+ value = None
+
+ elif value_type == 'path':
+ value = resolve_path(value)
+
+ elif value_type in ('tmp', 'temppath', 'tmppath'):
+ value = resolve_path(value)
+ if not os.path.exists(value):
+ makedirs_safe(value, 0o700)
+ prefix = 'ansible-local-%s' % os.getpid()
+ value = tempfile.mkdtemp(prefix=prefix, dir=value)
+
+ elif value_type == 'pathlist':
+ if isinstance(value, string_types):
+ value = [resolve_path(x) for x in value.split(os.pathsep)]
+
+ # defaults to string types
+ elif isinstance(value, string_types):
+ value = unquote(value)
+
+ return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
+
+# FIXME: see if this can live in utils/path
def resolve_path(path):
-
+ ''' resolve relative or 'varaible' paths '''
if '{{CWD}}' in path: # allow users to force CWD using 'magic' {{CWD}}
path = path.replace('{{CWD}}', os.getcwd())
return unfrackpath(path, follow=False)
+# FIXME: generic file type?
+def get_config_type(cfile):
+
+ ftype = None
+ if cfile is not None:
+ ext = os.path.splitext(cfile)[-1]
+ if ext in ('.ini', '.cfg'):
+ ftype = 'ini'
+ elif ext in ('.yaml', '.yml'):
+ ftype = 'yaml'
+ else:
+ raise AnsibleOptionsError("Unsupported configuration file extension for %s: %s" % (cfile, to_native(ext)))
-def get_ini_config(p, entries):
+ return ftype
+
+# FIXME: can move to module_utils for use for ini plugins also?
+def get_ini_config_value(p, entry):
''' returns the value of last ini entry found '''
value = None
if p is not None:
- for entry in entries:
- try:
- value = p.get(entry.get('section','defaults'), entry.get('key',''), raw=True)
- except:
- pass
-
+ try:
+ value = p.get(entry.get('section','defaults'), entry.get('key',''), raw=True)
+ except: # FIXME: actually report issues here
+ pass
return value
class ConfigManager(object):
+ UNABLE = []
+ DEPRECATED = []
+
def __init__(self, conf_file=None):
+ self._base_defs = {}
+ self._plugins = {}
+ self._parser = None
+
+ self._config_file = conf_file
self.data = ConfigData()
- #FIXME: make dynamic?
- bconfig_def = to_bytes('%s/data/config.yml' % os.path.dirname(__file__))
+
+ #FIXME: make dynamic? scan for more? make it's own method?
+ # Create configuration definitions from source
+ bconfig_def = to_bytes('%s/base.yml' % os.path.dirname(__file__))
if os.path.exists(bconfig_def):
with open(bconfig_def, 'rb') as config_def:
- self.initial_defs = yaml.safe_load(config_def)
+ self._base_defs = yaml.safe_load(config_def)
else:
raise AnsibleError("Missing base configuration definition file (bad install?): %s" % to_native(bconfig_def))
- ftype = None
- if conf_file is None:
+ if self._config_file is None:
# set config using ini
- conf_file = self.find_ini_config_file()
- ftype = 'ini'
- else:
- ext = os.path.splitext(conf_file)[-1]
- if ext in ('.ini', '.cfg'):
- ftype = 'ini'
- elif ext in ('.yaml', '.yml'):
- ftype = 'yaml'
- else:
- raise AnsibleOptionsError("Unsupported configuration file extension: \n{0}".format(ext))
+ self._config_file = self._find_ini_config_file()
- self.parse_config(conf_file, ftype)
+ if self._config_file:
+ if os.path.exists(self._config_file):
+ # initialize parser and read config
+ self._parse_config_file()
- def parse_config(self, cfile, ftype):
+ # update constants
+ self.update_config_data()
+
+ def _parse_config_file(self, cfile=None):
+ ''' return flat configuration settings from file(s) '''
# TODO: take list of files with merge/nomerge
- parser = None
- if cfile:
+ if cfile is None:
+ cfile = self._config_file
+
+ ftype = get_config_type(cfile)
+ if cfile is not None:
if ftype == 'ini':
- parser = configparser.ConfigParser()
+ self._parser = configparser.ConfigParser()
try:
- parser.read(cfile)
+ self._parser.read(cfile)
except configparser.Error as e:
- raise AnsibleOptionsError("Error reading config file: \n{0}".format(e))
- elif ftype == 'yaml':
- with open(cfile, 'rb') as config_stream:
- parser = yaml.safe_load(config_stream)
+ raise AnsibleOptionsError("Error reading config file (%s): %s" % (cfile, to_native(e)))
+ # FIXME: this should eventually handle yaml config files
+ #elif ftype == 'yaml':
+ # with open(cfile, 'rb') as config_stream:
+ # self._parser = yaml.safe_load(config_stream)
else:
- raise AnsibleOptionsError("Unsupported configuration file type: \n{0}".format(ftype))
+ raise AnsibleOptionsError("Unsupported configuration file type: %s" % to_native(ftype))
- self.update_config(cfile, self.initial_defs, parser, ftype)
- def update_config(self, configfile, defs, parser, ftype):
-
- # update the constant for config file
- self.data.update_setting(Setting('CONFIG_FILE', configfile, ''))
+ def _find_yaml_config_files(self):
+ ''' Load YAML Config Files in order, check merge flags, keep origin of settings'''
+ pass
- origin = None
- # env and config defs can have several entries, ordered in list from lowest to highest precedence
- for config in self.initial_defs:
-
- value = None
- # env vars are highest precedence
- if defs[config].get('env'):
- try:
- for env_var in defs[config]['env']:
- env_value = os.environ.get(env_var.get('name'), None)
- if env_value is not None: # only set if env var is defined
- value = env_value
- origin = 'env: %s' % env_var.get('name')
- except:
- sys.stderr.write("Error while loading environment configs for %s\n" % config)
-
- # try config file entries next
- if value is None and defs[config].get(ftype):
- if ftype == 'ini':
- # load from ini config
- try:
- value = get_ini_config(parser, defs[config]['ini'])
- origin = configfile
- except Exception as e:
- sys.stderr.write("Error while loading ini config %s: %s" % (configfile, str(e)))
- elif ftype == 'yaml':
- # FIXME: break down key from defs (. notation???)
- key = 'name'
- value = parser.get(key)
- origin = configfile
-
- # set default if we got here w/o a value
- if value is None:
- value = defs[config].get('default')
- origin = 'default'
-
- # ensure correct type
- try:
- value = self.ensure_type(value, defs[config].get('value_type'))
- except:
- sys.stderr.write("Unable to set correct type for %s, skipping" % config)
- continue
-
- # set the constant
- self.data.update_setting(Setting(config, value, origin))
-
-
- def find_ini_config_file(self):
- ''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
+ def _find_ini_config_file(self):
+ ''' Load INI Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
+ # FIXME: eventually deprecate ini configs
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
@@ -180,57 +206,163 @@ class ConfigManager(object):
return path
- def ensure_type(self, value, value_type):
- ''' return a configuration variable with casting
- :arg value: The value to ensure correct typing of
- :kwarg value_type: The type of the value. This can be any of the following strings:
- :boolean: sets the value to a True or False value
- :integer: Sets the value to an integer or raises a ValueType error
- :float: Sets the value to a float or raises a ValueType error
- :list: Treats the value as a comma separated list. Split the value
- and return it as a python list.
- :none: Sets the value to None
- :path: Expands any environment variables and tilde's in the value.
- :tmp_path: Create a unique temporary directory inside of the directory
- specified by value and return its path.
- :pathlist: Treat the value as a typical PATH string. (On POSIX, this
- means colon separated strings.) Split the value and then expand
- each part for environment variables and tildes.
+ def get_configuration_definitions(self, plugin_type=None, name=None):
+ ''' just list the possible settings, either base or for specific plugins or plugin '''
+
+ ret = {}
+ if plugin_type is None:
+ ret = self._base_defs
+ elif name is None:
+ ret = self._plugins.get(plugin_type, {})
+ else:
+ ret = {name: self._plugins.get(plugin_type, {}).get(name, {})}
+
+ return ret
+
+ def _loop_entries(self, container, entry_list):
+ ''' repeat code for value entry assignment '''
+
+ value = None
+ origin = None
+ for entry in entry_list:
+ name = entry.get('name')
+ temp_value = container.get(name, None)
+ if temp_value is not None: # only set if env var is defined
+ value = temp_value
+ origin = name
+
+ # deal with deprecation of setting source, if used
+ #FIXME: if entry.get('deprecated'):
+
+ return value, origin
+
+ def get_config_value(self, config, cfile=None, plugin_type=None, plugin_name=None, variables=None):
+ ''' wrapper '''
+ value, _drop = self.get_config_value_and_origin(config, cfile=cfile, plugin_type=plugin_type, plugin_name=plugin_name, variables=variables)
+ return value
+
+ def get_config_value_and_origin(self, config, cfile=None, plugin_type=None, plugin_name=None, variables=None):
+ ''' Given a config key figure out the actual value and report on the origin of the settings '''
+
+ if cfile is None:
+ cfile = self._config_file
+
+ # Note: sources that are lists listed in low to high precedence (last one wins)
+ value = None
+ defs = {}
+ if plugin_type is None:
+ defs = self._base_defs
+ elif plugin_name is None:
+ defs = self._plugins[plugin_type]
+ else:
+ defs = self._plugins[plugin_type][plugin_name]
+
+ # Use 'variable overrides' if present, highest precedence, but only present when querying running play
+ if variables:
+ value, origin = self._loop_entries(variables, defs[config]['vars'])
+ origin = 'var: %s' % origin
+
+ # env vars are next precedence
+ if value is None and defs[config].get('env'):
+ value, origin = self._loop_entries(os.environ, defs[config]['env'])
+ origin = 'env: %s' % origin
+
+ # try config file entries next, if we have one
+ if value is None and cfile is not None:
+ ftype = get_config_type(cfile)
+ if ftype and defs[config].get(ftype):
+ if ftype == 'ini':
+ # load from ini config
+ try: # FIXME: generaelize _loop_entries to allow for files also
+ for ini_entry in defs[config]['ini']:
+ value = get_ini_config_value(self._parser, ini_entry)
+ origin = cfile
+ #FIXME: if ini_entry.get('deprecated'):
+ except Exception as e:
+ sys.stderr.write("Error while loading ini config %s: %s" % (cfile, to_native(e)))
+ elif ftype == 'yaml':
+ pass # FIXME: implement, also , break down key from defs (. notation???)
+ origin = cfile
+
'''
- if value_type == 'boolean':
- value = boolean(value, strict=False)
+ # for plugins, try using existing constants, this is for backwards compatiblity
+ if plugin_name and defs[config].get('constants'):
+ value, origin = self._loop_entries(self.data, defs[config]['constants'])
+ origin = 'constant: %s' % origin
+ '''
+
+ # set default if we got here w/o a value
+ if value is None:
+ value = defs[config].get('default')
+ origin = 'default'
+ # FIXME: moved eval to constants as this does not have access to previous vars
+ if plugin_type is None and isinstance(value, string_types) and (value.startswith('eval(') and value.endswith(')')):
+ return value, origin
+ #default_value = defs[config].get('default')
+ #if plugin_type is None and isinstance(default_value, string_types) and (default_value.startswith('eval(') and default_value.endswith(')')):
+ # try:
+ # eval_string = default_value.replace('eval(', '', 1)[:-1]
+ # value = eval(eval_string) # FIXME: safe eval?
+ # except:
+ # value = default_value
+ #else:
+ # value = default_value
+
+ # ensure correct type
+ try:
+ value = ensure_type(value, defs[config].get('type'))
+ except Exception as e:
+ self.UNABLE.append(config)
+
+ # deal with deprecation of the setting
+ if defs[config].get('deprecated') and origin != 'default':
+ self.DEPRECATED.append((config, defs[config].get('deprecated')))
+
+ return value, origin
+
+ def update_plugin_config(self, plugin_type, name, defs):
+ ''' really: update constants '''
+ # no sense?
+ self.initialize_plugin_configuration_definitions(plugin_type, name, defs)
+ self.update_config_data(defs)
+
+ def initialize_plugin_configuration_definitions(self, plugin_type, name, defs):
- elif value:
- if value_type == 'integer':
- value = int(value)
+ if plugin_type not in self._plugins:
+ self._plugins[plugin_type] = {}
- elif value_type == 'float':
- value = float(value)
+ self._plugins[plugin_type][name] = defs
- elif value_type == 'list':
- if isinstance(value, string_types):
- value = [x.strip() for x in value.split(',')]
+ def update_config_data(self, defs=None, configfile=None):
+ ''' really: update constants '''
- elif value_type == 'none':
- if value == "None":
- value = None
+ if defs is None:
+ defs = self._base_defs
- elif value_type == 'path':
- value = resolve_path(value)
+ if configfile is None:
+ configfile = self._config_file
- elif value_type == 'tmppath':
- value = resolve_path(value)
- if not os.path.exists(value):
- makedirs_safe(value, 0o700)
- prefix = 'ansible-local-%s' % os.getpid()
- value = tempfile.mkdtemp(prefix=prefix, dir=value)
+ if not isinstance(defs, dict):
+ raise AnsibleOptionsError("Invalid configuration definition type: %s for %s" % (type(defs), defs))
- elif value_type == 'pathlist':
- if isinstance(value, string_types):
- value = [resolve_path(x) for x in value.split(os.pathsep)]
+ # update the constant for config file
+ self.data.update_setting(Setting('CONFIG_FILE', configfile, ''))
- elif isinstance(value, string_types):
- value = unquote(value)
+ origin = None
+ # env and config defs can have several entries, ordered in list from lowest to highest precedence
+ for config in defs:
+ if not isinstance(defs[config], dict):
+ raise AnsibleOptionsError("Invalid configuration definition '%s': type is %s" % (to_native(config), type(defs[config])))
- return to_text(value, errors='surrogate_or_strict', nonstring='passthru')
+ # get value and origin
+ value, origin = self.get_config_value_and_origin(config, configfile)
+
+ # set the constant
+ self.data.update_setting(Setting(config, value, origin))
+ # FIXME: find better way to do this by passing back to where display is available
+ if self.UNABLE:
+ sys.stderr.write("Unable to set correct type for:\n\t%s\n" % '\n\t'.join(self.UNABLE))
+ if self.DEPRECATED:
+ for k, reason in self.DEPRECATED:
+ sys.stderr.write("[DEPRECATED] %s: %(why)s. It will be removed in %(version)s. As alternative %(alternative)s", (k, reason))
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 3bf4a24ea8..25f1a00d8e 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -1,46 +1,58 @@
-# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
-#
-# This file is part of Ansible
-#
-# Ansible is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Ansible is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+# Copyright (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import os # used to set lang
+
from string import ascii_letters, digits
from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean, BOOLEANS_TRUE
+from ansible.module_utils.six import string_types
from ansible.config.manager import ConfigManager
-_config = ConfigManager()
-
-# Generate constants from config
-for setting in _config.data.get_settings():
- vars()[setting.name] = setting.value
-
+def _deprecated(msg):
+ ''' display is not guaranteed here, nor it being the full class, but try anyways, fallback to sys.stderr.write '''
+ try:
+ from __main__ import display
+ display.deprecated(msg, version='2.8')
+ except:
+ import sys
+ sys.stderr.write('[DEPRECATED] %s, to be removed in 2.8' % msg)
def mk_boolean(value):
''' moved to module_utils'''
- # We don't have a display here so we can't call deprecated
- # display.deprecated('ansible.constants.mk_boolean() is deprecated. Use ansible.module_utils.parsing.convert_bool.boolean() instead', version='2.8')
+ _deprecated('ansible.constants.mk_boolean() is deprecated. Use ansible.module_utils.parsing.convert_bool.boolean() instead')
return boolean(value, strict=False)
+def get_config(parser, section, key, env_var, default_value, value_type=None, expand_relative_paths=False):
+ ''' kept for backwarsd compatibility, but deprecated '''
+ _deprecated('ansible.constants.get_config() is deprecated. There is new config API, see porting docs.')
+
+ import os
-# ### CONSTANTS ### yes, actual ones
+ value = None
+ # small reconstruction of the old code env/ini/default
+ value = os.environ.get(env_var, None)
+ if value is None:
+ try:
+ value = config.get_ini_config(parser, [{'key': key, 'section': section}])
+ except:
+ pass
+ if value is None:
+ value = default_value
+ try:
+ value = config.ensure_type(value, value_type)
+ except:
+ pass
+ return value
+
+### CONSTANTS ### yes, actual ones
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
BECOME_METHODS = ['sudo', 'su', 'pbrun', 'pfexec', 'doas', 'dzdo', 'ksu', 'runas', 'pmrun']
BECOME_ERROR_STRINGS = {
@@ -79,3 +91,22 @@ RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
TREE_DIR = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
+
+### POPULATE SETTINGS FROM CONFIG ###
+config = ConfigManager()
+
+# Generate constants from config
+for setting in config.data.get_settings():
+
+ # FIXME: find better way to do in manager class and/or ensure types
+ if isinstance(setting.value, string_types) and (setting.value.startswith('eval(') and setting.value.endswith(')')):
+ try:
+ eval_string = setting.value.replace('eval(', '', 1)[:-1]
+ vars()[setting.name] = eval(eval_string) # FIXME: safe eval?
+ continue
+ except:
+ pass
+
+ vars()[setting.name] = setting.value
+
+
diff --git a/lib/ansible/errors/__init__.py b/lib/ansible/errors/__init__.py
index c3249d9679..18119d6d95 100644
--- a/lib/ansible/errors/__init__.py
+++ b/lib/ansible/errors/__init__.py
@@ -20,6 +20,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from collections import Sequence
+import traceback
+import sys
from ansible.errors.yaml_strings import (
YAML_COMMON_DICT_ERROR,
@@ -68,6 +70,8 @@ class AnsibleError(Exception):
self.message += '\nexception type: %s' % to_native(type(orig_exc))
self.message += '\nexception: %s' % to_native(orig_exc)
+ self.tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
+
def __str__(self):
return self.message
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index d13d37d800..474c71dcbf 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -36,7 +36,7 @@ from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_text
-from ansible.plugins import module_utils_loader, ps_module_utils_loader
+from ansible.plugins.loader import module_utils_loader, ps_module_utils_loader
from ansible.plugins.shell.powershell import async_watchdog, async_wrapper, become_wrapper, leaf_exec, exec_wrapper
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
@@ -579,7 +579,7 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
zf.writestr(os.path.join("ansible/module_utils",
py_module_file_name), py_module_cache[py_module_name][0])
- display.vvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
+ display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])
# Add the names of the files we're scheduling to examine in the loop to
# py_module_names so that we don't re-examine them in the next pass
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index f4e96e2065..ae1a9fdebf 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -731,6 +731,7 @@ class TaskExecutor:
conn_type = self._play_context.connection
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
+ self._play_context.set_options_from_plugin(connection)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py
index 49b84364bb..6dbf51c716 100644
--- a/lib/ansible/executor/task_queue_manager.py
+++ b/lib/ansible/executor/task_queue_manager.py
@@ -31,7 +31,7 @@ from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_text
from ansible.playbook.block import Block
from ansible.playbook.play_context import PlayContext
-from ansible.plugins import callback_loader, strategy_loader, module_loader
+from ansible.plugins.loader import callback_loader, strategy_loader, module_loader
from ansible.plugins.callback import CallbackBase
from ansible.template import Templar
from ansible.utils.helpers import pct_to_int
diff --git a/lib/ansible/inventory/manager.py b/lib/ansible/inventory/manager.py
index 50a42ccbc5..245cade032 100644
--- a/lib/ansible/inventory/manager.py
+++ b/lib/ansible/inventory/manager.py
@@ -30,7 +30,7 @@ from ansible.inventory.data import InventoryData
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
-from ansible.plugins import PluginLoader
+from ansible.plugins.loader import PluginLoader
from ansible.utils.path import unfrackpath
try:
@@ -260,14 +260,15 @@ class InventoryManager(object):
display.vvv(u'Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name))
break
except AnsibleParserError as e:
- failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' % (to_text(source), plugin_name, to_text(e)))
+ failures.append({'src': source, 'plugin': plugin_name, 'exc': e})
else:
display.debug(u'%s did not meet %s requirements' % (to_text(source), plugin_name))
else:
if failures:
# only if no plugin processed files should we show errors.
for fail in failures:
- display.warning(fail)
+ display.warning(u'\n* Failed to parse %s with %s inventory plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc'])))
+ display.vvv(fail['exc'].tb)
if not parsed:
display.warning(u"Unable to parse %s as an inventory source" % to_text(source))
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
index 39d9ff25dc..632911b5dc 100644
--- a/lib/ansible/parsing/mod_args.py
+++ b/lib/ansible/parsing/mod_args.py
@@ -23,7 +23,7 @@ from ansible.errors import AnsibleParserError, AnsibleError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv, split_args
-from ansible.plugins import module_loader, action_loader
+from ansible.plugins.loader import module_loader, action_loader
from ansible.template import Templar
diff --git a/lib/ansible/parsing/plugin_docs.py b/lib/ansible/parsing/plugin_docs.py
new file mode 100644
index 0000000000..0f48b0080f
--- /dev/null
+++ b/lib/ansible/parsing/plugin_docs.py
@@ -0,0 +1,83 @@
+# Copyright (c) 2017 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ast
+import yaml
+
+from ansible.parsing.yaml.loader import AnsibleLoader
+
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
+
+
+def read_docstring(filename, verbose=True, ignore_errors=True):
+ """
+ Search for assignment of the DOCUMENTATION and EXAMPLES variables in the given file.
+ Parse DOCUMENTATION from YAML and return the YAML doc or None together with EXAMPLES, as plain text.
+ """
+
+ data = {
+ 'doc': None,
+ 'plainexamples': None,
+ 'returndocs': None,
+ 'metadata': None
+ }
+
+ string_to_vars = {
+ 'DOCUMENTATION': 'doc',
+ 'EXAMPLES': 'plainexamples',
+ 'RETURN': 'returndocs',
+ 'ANSIBLE_METADATA': 'metadata'
+ }
+
+ try:
+ M = ast.parse(''.join(open(filename)))
+ try:
+ display.debug('Attempt first docstring is yaml docs')
+ docstring = yaml.load(M.body[0].value.s)
+ for string in string_to_vars.keys():
+ if string in docstring:
+ data[string_to_vars[string]] = docstring[string]
+ display.debug('assigned :%s' % string_to_vars[string])
+ except Exception as e:
+ display.debug('failed docstring parsing: %s' % str(e))
+
+ if 'docs' not in data or not data['docs']:
+ display.debug('Fallback to vars parsing')
+ for child in M.body:
+ if isinstance(child, ast.Assign):
+ for t in child.targets:
+ try:
+ theid = t.id
+ except AttributeError:
+ # skip errors can happen when trying to use the normal code
+ display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
+ continue
+
+ if theid in string_to_vars:
+ varkey = string_to_vars[theid]
+ if isinstance(child.value, ast.Dict):
+ data[varkey] = ast.literal_eval(child.value)
+ else:
+ if theid in ['DOCUMENTATION', 'ANSIBLE_METADATA']:
+ # string should be yaml
+ data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
+ else:
+ # not yaml, should be a simple string
+ data[varkey] = child.value.s
+ display.debug('assigned :%s' % varkey)
+
+ except:
+ if verbose:
+ display.error("unable to parse %s" % filename)
+ if not ignore_errors:
+ raise
+
+ return data
diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py
index cc0451a2f8..7c20843925 100644
--- a/lib/ansible/playbook/__init__.py
+++ b/lib/ansible/playbook/__init__.py
@@ -26,7 +26,7 @@ from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_text
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
-from ansible.plugins import get_all_plugin_loaders
+from ansible.plugins.loader import get_all_plugin_loaders
try:
from __main__ import display
diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py
index 775fd9878e..66caf73bcf 100644
--- a/lib/ansible/playbook/base.py
+++ b/lib/ansible/playbook/base.py
@@ -197,9 +197,10 @@ class Base(with_metaclass(BaseMeta, object)):
self.vars = dict()
def dump_me(self, depth=0):
+ ''' this is never called from production code, it is here to be used when debugging as a 'complex print' '''
if depth == 0:
- print("DUMPING OBJECT ------------------------------------------------------")
- print("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
+ display.debug("DUMPING OBJECT ------------------------------------------------------")
+ display.debug("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
if hasattr(self, '_parent') and self._parent:
self._parent.dump_me(depth + 2)
dep_chain = self._parent.get_dep_chain()
diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py
index d3f738fffe..f86709f202 100644
--- a/lib/ansible/playbook/play_context.py
+++ b/lib/ansible/playbook/play_context.py
@@ -36,6 +36,7 @@ from ansible.module_utils._text import to_bytes
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
+from ansible.plugins import get_plugin_class
from ansible.utils.ssh_functions import check_for_controlpersist
@@ -54,31 +55,47 @@ __all__ = ['PlayContext']
# in variable names.
MAGIC_VARIABLE_MAPPING = dict(
+ accelerate_port=('ansible_accelerate_port', ),
+
+ # base
connection=('ansible_connection', ),
+ module_compression=('ansible_module_compression', ),
+ shell=('ansible_shell_type', ),
+ executable=('ansible_shell_executable', ),
+ remote_tmp_dir=('ansible_remote_tmp', ),
+
+ # connection common
remote_addr=('ansible_ssh_host', 'ansible_host'),
remote_user=('ansible_ssh_user', 'ansible_user'),
- remote_tmp_dir=('ansible_remote_tmp', ),
+ password=('ansible_ssh_pass', 'ansible_password'),
port=('ansible_ssh_port', 'ansible_port'),
+ pipelining=('ansible_ssh_pipelining', 'ansible_pipelining'),
timeout=('ansible_ssh_timeout', 'ansible_timeout'),
- ssh_executable=('ansible_ssh_executable', ),
- accelerate_port=('ansible_accelerate_port', ),
- password=('ansible_ssh_pass', 'ansible_password'),
private_key_file=('ansible_ssh_private_key_file', 'ansible_private_key_file'),
- pipelining=('ansible_ssh_pipelining', 'ansible_pipelining'),
- shell=('ansible_shell_type', ),
+
+ # networking modules
network_os=('ansible_network_os', ),
+
+ # ssh TODO: remove
+ ssh_executable=('ansible_ssh_executable', ),
+ ssh_common_args=('ansible_ssh_common_args', ),
+ sftp_extra_args=('ansible_sftp_extra_args', ),
+ scp_extra_args=('ansible_scp_extra_args', ),
+ ssh_extra_args=('ansible_ssh_extra_args', ),
+ ssh_transfer_method=('ansible_ssh_transfer_method', ),
+
+ # docker TODO: remove
+ docker_extra_args=('ansible_docker_extra_args', ),
+
+ # become
become=('ansible_become', ),
become_method=('ansible_become_method', ),
become_user=('ansible_become_user', ),
become_pass=('ansible_become_password', 'ansible_become_pass'),
become_exe=('ansible_become_exe', ),
become_flags=('ansible_become_flags', ),
- ssh_common_args=('ansible_ssh_common_args', ),
- docker_extra_args=('ansible_docker_extra_args', ),
- sftp_extra_args=('ansible_sftp_extra_args', ),
- scp_extra_args=('ansible_scp_extra_args', ),
- ssh_extra_args=('ansible_ssh_extra_args', ),
- ssh_transfer_method=('ansible_ssh_transfer_method', ),
+
+ # deprecated
sudo=('ansible_sudo', ),
sudo_user=('ansible_sudo_user', ),
sudo_pass=('ansible_sudo_password', 'ansible_sudo_pass'),
@@ -89,10 +106,9 @@ MAGIC_VARIABLE_MAPPING = dict(
su_pass=('ansible_su_password', 'ansible_su_pass'),
su_exe=('ansible_su_exe', ),
su_flags=('ansible_su_flags', ),
- executable=('ansible_shell_executable', ),
- module_compression=('ansible_module_compression', ),
)
+# TODO: needs to be configurable
b_SU_PROMPT_LOCALIZATIONS = [
to_bytes('Password'),
to_bytes('암호'),
@@ -135,7 +151,7 @@ TASK_ATTRIBUTE_OVERRIDES = (
'become_method',
'become_flags',
'connection',
- 'docker_extra_args',
+ 'docker_extra_args', # TODO: remove
'delegate_to',
'no_log',
'remote_user',
@@ -143,6 +159,11 @@ TASK_ATTRIBUTE_OVERRIDES = (
RESET_VARS = (
'ansible_connection',
+ 'ansible_user',
+ 'ansible_host',
+ 'ansible_port',
+
+ # TODO: ???
'ansible_docker_extra_args',
'ansible_ssh_host',
'ansible_ssh_pass',
@@ -151,9 +172,6 @@ RESET_VARS = (
'ansible_ssh_private_key_file',
'ansible_ssh_pipelining',
'ansible_ssh_executable',
- 'ansible_user',
- 'ansible_host',
- 'ansible_port',
)
@@ -165,47 +183,59 @@ class PlayContext(Base):
connection/authentication information.
'''
+ # base
+ _module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
+ _shell = FieldAttribute(isa='string')
+ _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
+
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
- _docker_extra_args = FieldAttribute(isa='string')
_remote_addr = FieldAttribute(isa='string')
_remote_tmp_dir = FieldAttribute(isa='string', default=C.DEFAULT_REMOTE_TMP)
_password = FieldAttribute(isa='string')
- _private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
- _shell = FieldAttribute(isa='string')
- _network_os = FieldAttribute(isa='string')
_connection_user = FieldAttribute(isa='string')
+ _private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
+ _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_PIPELINING)
+
+ # networking modules
+ _network_os = FieldAttribute(isa='string')
+
+ # docker FIXME: remove these
+ _docker_extra_args = FieldAttribute(isa='string')
+
+ # ssh # FIXME: remove these
+ _ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
_ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
_ssh_common_args = FieldAttribute(isa='string')
_sftp_extra_args = FieldAttribute(isa='string')
_scp_extra_args = FieldAttribute(isa='string')
_ssh_extra_args = FieldAttribute(isa='string')
- _ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
_ssh_transfer_method = FieldAttribute(isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)
+
+ # ???
_connection_lockfd = FieldAttribute(isa='int')
- _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
+
+ # accelerate FIXME: remove as soon as deprecation period expires
_accelerate = FieldAttribute(isa='bool', default=False)
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
_accelerate_port = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)
- _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
- _module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
# privilege escalation fields
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_pass = FieldAttribute(isa='string')
- _become_exe = FieldAttribute(isa='string')
- _become_flags = FieldAttribute(isa='string')
+ _become_exe = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_EXE)
+ _become_flags = FieldAttribute(isa='string', default=C.DEFAULT_BECOME_FLAGS)
_prompt = FieldAttribute(isa='string')
- # backwards compatibility fields for sudo/su
- _sudo_exe = FieldAttribute(isa='string')
- _sudo_flags = FieldAttribute(isa='string')
+ # DEPRECATED: backwards compatibility fields for sudo/su
+ _sudo_exe = FieldAttribute(isa='string', default=C.DEFAULT_SUDO_EXE)
+ _sudo_flags = FieldAttribute(isa='string', default=C.DEFAULT_SUDO_FLAGS)
_sudo_pass = FieldAttribute(isa='string')
- _su_exe = FieldAttribute(isa='string')
- _su_flags = FieldAttribute(isa='string')
+ _su_exe = FieldAttribute(isa='string', default=C.DEFAULT_SU_EXE)
+ _su_flags = FieldAttribute(isa='string', default=C.DEFAULT_SU_FLAGS)
_su_pass = FieldAttribute(isa='string')
# general flags
@@ -277,6 +307,22 @@ class PlayContext(Base):
if play.force_handlers is not None:
self.force_handlers = play.force_handlers
+ def set_options_from_plugin(self, plugin):
+ # generic derived from connection plugin
+
+ # get options for plugins
+ options = C.config.get_configuration_definitions(get_plugin_class(plugin), plugin._load_name)
+ for option in options:
+ if option:
+ flag = options[option].get('name')
+ if flag:
+ setattr(self, flag, self.connection.get_option(flag))
+
+ # TODO: made irrelavent by above
+ # get ssh options FIXME: make these common to all connections
+ # for flag in ('ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args'):
+ # setattr(self, flag, getattr(options, flag, ''))
+
def set_options(self, options):
'''
Configures this connection information instance with data from
@@ -291,12 +337,10 @@ class PlayContext(Base):
self.check_mode = boolean(options.check, strict=False)
- # get ssh options FIXME: make these common to all connections
- for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
- setattr(self, flag, getattr(options, flag, ''))
-
- # general flags (should we move out?)
- for flag in ['connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
+ # general flags (should we move out?)
+ # for flag in ('connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff'):
+ # should only be 'non plugin' flags
+ for flag in ('connection', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff'):
attribute = getattr(options, flag, False)
if attribute:
setattr(self, flag, attribute)
@@ -492,22 +536,18 @@ class PlayContext(Base):
command = success_cmd
# set executable to use for the privilege escalation method, with various overrides
- exe = (
- self.become_exe or
- getattr(self, '%s_exe' % self.become_method, None) or
- C.DEFAULT_BECOME_EXE or
- getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or
- self.become_method
- )
+ exe = self.become_method
+ for myexe in (getattr(self, '%s_exe' % self.become_method, None), self.become_exe):
+ if myexe:
+ exe = myexe
+ break
# set flags to use for the privilege escalation method, with various overrides
- flags = (
- self.become_flags or
- getattr(self, '%s_flags' % self.become_method, None) or
- C.DEFAULT_BECOME_FLAGS or
- getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or
- ''
- )
+ flags = ''
+ for myflag in (getattr(self, '%s_flags' % self.become_method, None), self.become_flags):
+ if myflag is not None:
+ flags = myflag
+ break
if self.become_method == 'sudo':
# If we have a password, we run sudo with a randomly-generated
diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py
index d9805b00fa..b0101d700f 100644
--- a/lib/ansible/playbook/role/__init__.py
+++ b/lib/ansible/playbook/role/__init__.py
@@ -31,7 +31,7 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
-from ansible.plugins import get_all_plugin_loaders
+from ansible.plugins.loader import get_all_plugin_loaders
from ansible.utils.vars import combine_vars
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index 35979b68a3..a1db1d1beb 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -26,7 +26,7 @@ from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
-from ansible.plugins import lookup_loader
+from ansible.plugins.loader import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 29d54519c2..b9e7a299e0 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -21,18 +21,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import glob
-import imp
-import os
-import os.path
-import sys
-import warnings
-
-from collections import defaultdict
+from abc import ABCMeta
from ansible import constants as C
-from ansible.module_utils._text import to_text
-
+from ansible.module_utils.six import with_metaclass
try:
from __main__ import display
@@ -46,537 +38,11 @@ PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
-def get_all_plugin_loaders():
- return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
-
-
-class PluginLoader:
-
- '''
- PluginLoader loads plugins from the configured plugin directories.
-
- It searches for plugins by iterating through the combined list of
- play basedirs, configured paths, and the python path.
- The first match is used.
- '''
-
- def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
-
- self.class_name = class_name
- self.base_class = required_base_class
- self.package = package
- self.subdir = subdir
- self.aliases = aliases
-
- if config and not isinstance(config, list):
- config = [config]
- elif not config:
- config = []
-
- self.config = config
-
- if class_name not in MODULE_CACHE:
- MODULE_CACHE[class_name] = {}
- if class_name not in PATH_CACHE:
- PATH_CACHE[class_name] = None
- if class_name not in PLUGIN_PATH_CACHE:
- PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
-
- self._module_cache = MODULE_CACHE[class_name]
- self._paths = PATH_CACHE[class_name]
- self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
-
- self._extra_dirs = []
- self._searched_paths = set()
-
- def __setstate__(self, data):
- '''
- Deserializer.
- '''
-
- class_name = data.get('class_name')
- package = data.get('package')
- config = data.get('config')
- subdir = data.get('subdir')
- aliases = data.get('aliases')
- base_class = data.get('base_class')
-
- PATH_CACHE[class_name] = data.get('PATH_CACHE')
- PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
-
- self.__init__(class_name, package, config, subdir, aliases, base_class)
- self._extra_dirs = data.get('_extra_dirs', [])
- self._searched_paths = data.get('_searched_paths', set())
-
- def __getstate__(self):
- '''
- Serializer.
- '''
-
- return dict(
- class_name=self.class_name,
- base_class=self.base_class,
- package=self.package,
- config=self.config,
- subdir=self.subdir,
- aliases=self.aliases,
- _extra_dirs=self._extra_dirs,
- _searched_paths=self._searched_paths,
- PATH_CACHE=PATH_CACHE[self.class_name],
- PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
- )
-
- def format_paths(self, paths):
- ''' Returns a string suitable for printing of the search path '''
-
- # Uses a list to get the order right
- ret = []
- for i in paths:
- if i not in ret:
- ret.append(i)
- return os.pathsep.join(ret)
-
- def print_paths(self):
- return self.format_paths(self._get_paths())
-
- def _all_directories(self, dir):
- results = []
- results.append(dir)
- for root, subdirs, files in os.walk(dir, followlinks=True):
- if '__init__.py' in files:
- for x in subdirs:
- results.append(os.path.join(root, x))
- return results
-
- def _get_package_paths(self, subdirs=True):
- ''' Gets the path of a Python package '''
-
- if not self.package:
- return []
- if not hasattr(self, 'package_path'):
- m = __import__(self.package)
- parts = self.package.split('.')[1:]
- for parent_mod in parts:
- m = getattr(m, parent_mod)
- self.package_path = os.path.dirname(m.__file__)
- if subdirs:
- return self._all_directories(self.package_path)
- return [self.package_path]
-
- def _get_paths(self, subdirs=True):
- ''' Return a list of paths to search for plugins in '''
-
- # FIXME: This is potentially buggy if subdirs is sometimes True and
- # sometimes False. In current usage, everything calls this with
- # subdirs=True except for module_utils_loader which always calls it
- # with subdirs=False. So there currently isn't a problem with this
- # caching.
- if self._paths is not None:
- return self._paths
-
- ret = self._extra_dirs[:]
-
- # look in any configured plugin paths, allow one level deep for subcategories
- if self.config is not None:
- for path in self.config:
- path = os.path.realpath(os.path.expanduser(path))
- if subdirs:
- contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
- for c in contents:
- if os.path.isdir(c) and c not in ret:
- ret.append(c)
- if path not in ret:
- ret.append(path)
-
- # look for any plugins installed in the package subtree
- # Note package path always gets added last so that every other type of
- # path is searched before it.
- ret.extend(self._get_package_paths(subdirs=subdirs))
-
- # HACK: because powershell modules are in the same directory
- # hierarchy as other modules we have to process them last. This is
- # because powershell only works on windows but the other modules work
- # anywhere (possibly including windows if the correct language
- # interpreter is installed). the non-powershell modules can have any
- # file extension and thus powershell modules are picked up in that.
- # The non-hack way to fix this is to have powershell modules be
- # a different PluginLoader/ModuleLoader. But that requires changing
- # other things too (known thing to change would be PATHS_CACHE,
- # PLUGIN_PATHS_CACHE, and MODULE_CACHE. Since those three dicts key
- # on the class_name and neither regular modules nor powershell modules
- # would have class_names, they would not work as written.
- reordered_paths = []
- win_dirs = []
-
- for path in ret:
- if path.endswith('windows'):
- win_dirs.append(path)
- else:
- reordered_paths.append(path)
- reordered_paths.extend(win_dirs)
-
- # cache and return the result
- self._paths = reordered_paths
- return reordered_paths
-
- def add_directory(self, directory, with_subdir=False):
- ''' Adds an additional directory to the search path '''
-
- directory = os.path.realpath(directory)
-
- if directory is not None:
- if with_subdir:
- directory = os.path.join(directory, self.subdir)
- if directory not in self._extra_dirs:
- # append the directory and invalidate the path cache
- self._extra_dirs.append(directory)
- self._paths = None
-
- def find_plugin(self, name, mod_type='', ignore_deprecated=False):
- ''' Find a plugin named name '''
-
- if mod_type:
- suffix = mod_type
- elif self.class_name:
- # Ansible plugins that run in the controller process (most plugins)
- suffix = '.py'
- else:
- # Only Ansible Modules. Ansible modules can be any executable so
- # they can have any suffix
- suffix = ''
-
- # The particular cache to look for modules within. This matches the
- # requested mod_type
- pull_cache = self._plugin_path_cache[suffix]
- try:
- return pull_cache[name]
- except KeyError:
- # Cache miss. Now let's find the plugin
- pass
-
- # TODO: Instead of using the self._paths cache (PATH_CACHE) and
- # self._searched_paths we could use an iterator. Before enabling that
- # we need to make sure we don't want to add additional directories
- # (add_directory()) once we start using the iterator. Currently, it
- # looks like _get_paths() never forces a cache refresh so if we expect
- # additional directories to be added later, it is buggy.
- for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
- try:
- full_paths = (os.path.join(path, f) for f in os.listdir(path))
- except OSError as e:
- display.warning("Error accessing plugin paths: %s" % to_text(e))
-
- for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
- full_name = os.path.basename(full_path)
-
- # HACK: We have no way of executing python byte
- # compiled files as ansible modules so specifically exclude them
- # FIXME: I believe this is only correct for modules and
- # module_utils. For all other plugins we want .pyc and .pyo should
- # bew valid
- if full_path.endswith(('.pyc', '.pyo')):
- continue
-
- splitname = os.path.splitext(full_name)
- base_name = splitname[0]
- try:
- extension = splitname[1]
- except IndexError:
- extension = ''
-
- # Module found, now enter it into the caches that match
- # this file
- if base_name not in self._plugin_path_cache['']:
- self._plugin_path_cache[''][base_name] = full_path
-
- if full_name not in self._plugin_path_cache['']:
- self._plugin_path_cache[''][full_name] = full_path
-
- if base_name not in self._plugin_path_cache[extension]:
- self._plugin_path_cache[extension][base_name] = full_path
-
- if full_name not in self._plugin_path_cache[extension]:
- self._plugin_path_cache[extension][full_name] = full_path
-
- self._searched_paths.add(path)
- try:
- return pull_cache[name]
- except KeyError:
- # Didn't find the plugin in this directory. Load modules from
- # the next one
- pass
-
- # if nothing is found, try finding alias/deprecated
- if not name.startswith('_'):
- alias_name = '_' + name
- # We've already cached all the paths at this point
- if alias_name in pull_cache:
- if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
- display.deprecated('%s is kept for backwards compatibility '
- 'but usage is discouraged. The module '
- 'documentation details page may explain '
- 'more about this rationale.' %
- name.lstrip('_'))
- return pull_cache[alias_name]
-
- return None
-
- def has_plugin(self, name):
- ''' Checks if a plugin named name exists '''
-
- return self.find_plugin(name) is not None
-
- __contains__ = has_plugin
-
- def _load_module_source(self, name, path):
-
- # avoid collisions across plugins
- full_name = '.'.join([self.package, name])
-
- if full_name in sys.modules:
- # Avoids double loading, See https://github.com/ansible/ansible/issues/13110
- return sys.modules[full_name]
-
- with warnings.catch_warnings():
- warnings.simplefilter("ignore", RuntimeWarning)
- with open(path, 'rb') as module_file:
- module = imp.load_source(full_name, path, module_file)
- return module
-
- def get(self, name, *args, **kwargs):
- ''' instantiates a plugin of the given name using arguments '''
-
- found_in_cache = True
- class_only = kwargs.pop('class_only', False)
- if name in self.aliases:
- name = self.aliases[name]
- path = self.find_plugin(name)
- if path is None:
- return None
-
- if path not in self._module_cache:
- self._module_cache[path] = self._load_module_source(name, path)
- found_in_cache = False
-
- obj = getattr(self._module_cache[path], self.class_name)
- if self.base_class:
- # The import path is hardcoded and should be the right place,
- # so we are not expecting an ImportError.
- module = __import__(self.package, fromlist=[self.base_class])
- # Check whether this obj has the required base class.
- try:
- plugin_class = getattr(module, self.base_class)
- except AttributeError:
- return None
- if not issubclass(obj, plugin_class):
- return None
-
- self._display_plugin_load(self.class_name, name, self._searched_paths, path,
- found_in_cache=found_in_cache, class_only=class_only)
- if not class_only:
- try:
- obj = obj(*args, **kwargs)
- except TypeError as e:
- if "abstract" in e.args[0]:
- # Abstract Base Class. The found plugin file does not
- # fully implement the defined interface.
- return None
- raise
-
- # set extra info on the module, in case we want it later
- setattr(obj, '_original_path', path)
- setattr(obj, '_load_name', name)
- return obj
-
- def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
- msg = 'Loading %s \'%s\' from %s' % (class_name, os.path.basename(name), path)
-
- if len(searched_paths) > 1:
- msg = '%s (searched paths: %s)' % (msg, self.format_paths(searched_paths))
-
- if found_in_cache or class_only:
- msg = '%s (found_in_cache=%s, class_only=%s)' % (msg, found_in_cache, class_only)
-
- display.debug(msg)
-
- def all(self, *args, **kwargs):
- ''' instantiates all plugins with the same arguments '''
-
- path_only = kwargs.pop('path_only', False)
- class_only = kwargs.pop('class_only', False)
- all_matches = []
- found_in_cache = True
-
- for i in self._get_paths():
- all_matches.extend(glob.glob(os.path.join(i, "*.py")))
-
- for path in sorted(all_matches, key=lambda match: os.path.basename(match)):
- name, _ = os.path.splitext(path)
- if '__init__' in name:
- continue
-
- if path_only:
- yield path
- continue
-
- if path not in self._module_cache:
- self._module_cache[path] = self._load_module_source(name, path)
- found_in_cache = False
-
- try:
- obj = getattr(self._module_cache[path], self.class_name)
- except AttributeError as e:
- display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
- continue
-
- if self.base_class:
- # The import path is hardcoded and should be the right place,
- # so we are not expecting an ImportError.
- module = __import__(self.package, fromlist=[self.base_class])
- # Check whether this obj has the required base class.
- try:
- plugin_class = getattr(module, self.base_class)
- except AttributeError:
- continue
- if not issubclass(obj, plugin_class):
- continue
-
- self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
- if not class_only:
- try:
- obj = obj(*args, **kwargs)
- except TypeError as e:
- display.warning("Skipping plugin (%s) as it seems to be incomplete: %s" % (path, to_text(e)))
-
- # set extra info on the module, in case we want it later
- setattr(obj, '_original_path', path)
- setattr(obj, '_load_name', name)
- yield obj
-
-action_loader = PluginLoader(
- 'ActionModule',
- 'ansible.plugins.action',
- C.DEFAULT_ACTION_PLUGIN_PATH,
- 'action_plugins',
- required_base_class='ActionBase',
-)
-
-cache_loader = PluginLoader(
- 'CacheModule',
- 'ansible.plugins.cache',
- C.DEFAULT_CACHE_PLUGIN_PATH,
- 'cache_plugins',
-)
-
-callback_loader = PluginLoader(
- 'CallbackModule',
- 'ansible.plugins.callback',
- C.DEFAULT_CALLBACK_PLUGIN_PATH,
- 'callback_plugins',
-)
-
-connection_loader = PluginLoader(
- 'Connection',
- 'ansible.plugins.connection',
- C.DEFAULT_CONNECTION_PLUGIN_PATH,
- 'connection_plugins',
- aliases={'paramiko': 'paramiko_ssh'},
- required_base_class='ConnectionBase',
-)
-
-shell_loader = PluginLoader(
- 'ShellModule',
- 'ansible.plugins.shell',
- 'shell_plugins',
- 'shell_plugins',
-)
-
-module_loader = PluginLoader(
- '',
- 'ansible.modules',
- C.DEFAULT_MODULE_PATH,
- 'library',
-)
-
-module_utils_loader = PluginLoader(
- '',
- 'ansible.module_utils',
- C.DEFAULT_MODULE_UTILS_PATH,
- 'module_utils',
-)
-
-# NB: dedicated loader is currently necessary because PS module_utils expects "with subdir" lookup where
-# regular module_utils doesn't. This can be revisited once we have more granular loaders.
-ps_module_utils_loader = PluginLoader(
- '',
- 'ansible.module_utils',
- C.DEFAULT_MODULE_UTILS_PATH,
- 'module_utils',
-)
-
-lookup_loader = PluginLoader(
- 'LookupModule',
- 'ansible.plugins.lookup',
- C.DEFAULT_LOOKUP_PLUGIN_PATH,
- 'lookup_plugins',
- required_base_class='LookupBase',
-)
-
-filter_loader = PluginLoader(
- 'FilterModule',
- 'ansible.plugins.filter',
- C.DEFAULT_FILTER_PLUGIN_PATH,
- 'filter_plugins',
-)
-
-test_loader = PluginLoader(
- 'TestModule',
- 'ansible.plugins.test',
- C.DEFAULT_TEST_PLUGIN_PATH,
- 'test_plugins'
-)
-
-fragment_loader = PluginLoader(
- 'ModuleDocFragment',
- 'ansible.utils.module_docs_fragments',
- os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
- '',
-)
-
-strategy_loader = PluginLoader(
- 'StrategyModule',
- 'ansible.plugins.strategy',
- C.DEFAULT_STRATEGY_PLUGIN_PATH,
- 'strategy_plugins',
- required_base_class='StrategyBase',
-)
-
-terminal_loader = PluginLoader(
- 'TerminalModule',
- 'ansible.plugins.terminal',
- 'terminal_plugins',
- 'terminal_plugins'
-)
+def get_plugin_class(obj):
+ return obj.__class__.__name__.lower().replace('module', '')
-vars_loader = PluginLoader(
- 'VarsModule',
- 'ansible.plugins.vars',
- C.DEFAULT_VARS_PLUGIN_PATH,
- 'vars_plugins',
-)
-cliconf_loader = PluginLoader(
- 'Cliconf',
- 'ansible.plugins.cliconf',
- 'cliconf_plugins',
- 'cliconf_plugins',
- required_base_class='CliconfBase'
-)
+class AnsiblePlugin(with_metaclass(ABCMeta, object)):
-netconf_loader = PluginLoader(
- 'Netconf',
- 'ansible.plugins.netconf',
- 'netconf_plugins',
- 'netconf_plugins',
- required_base_class='NetconfBase'
-)
+ def get_option(self, option):
+ return C.get_plugin_option(get_plugin_class(self), self.name, option)
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index c7ff4c1bd7..1a38d05b32 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -40,6 +40,7 @@ from ansible.parsing.utils.jsonify import jsonify
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
from ansible.release import __version__
from ansible.utils.unsafe_proxy import wrap_var
+from ansible.vars.manager import remove_internal_keys
try:
@@ -743,7 +744,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
# remove internal keys
- self._remove_internal_keys(data)
+ remove_internal_keys(data)
# cleanup tmp?
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
@@ -766,17 +767,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data
- def _remove_internal_keys(self, data):
- for key in list(data.keys()):
- if key.startswith('_ansible_') and key != '_ansible_parsed' or key in C.INTERNAL_RESULT_KEYS:
- display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
- del data[key]
-
- # remove bad/empty internal keys
- for key in ['warnings', 'deprecations']:
- if key in data and not data[key]:
- del data[key]
-
def _clean_returned_data(self, data):
remove_keys = set()
fact_keys = set(data.keys())
@@ -817,7 +807,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.warning("Removed restricted key from module data: %s = %s" % (r_key, r_val))
del data[r_key]
- self._remove_internal_keys(data)
+ remove_internal_keys(data)
def _parse_returned_data(self, res):
try:
diff --git a/lib/ansible/plugins/action/junos.py b/lib/ansible/plugins/action/junos.py
index d55908e40a..15d993d77d 100644
--- a/lib/ansible/plugins/action/junos.py
+++ b/lib/ansible/plugins/action/junos.py
@@ -26,7 +26,7 @@ from ansible import constants as C
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils.junos import junos_argument_spec
from ansible.module_utils.six import iteritems
-from ansible.plugins import connection_loader, module_loader
+from ansible.plugins.loader import connection_loader, module_loader
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils.connection import Connection
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index be142fe770..d4d3589cc1 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -26,7 +26,7 @@ from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.playbook.play_context import MAGIC_VARIABLE_MAPPING
from ansible.plugins.action import ActionBase
-from ansible.plugins import connection_loader
+from ansible.plugins.loader import connection_loader
class ActionModule(ActionBase):
diff --git a/lib/ansible/plugins/cache/__init__.py b/lib/ansible/plugins/cache/__init__.py
index c36f00bba2..16e3107e58 100644
--- a/lib/ansible/plugins/cache/__init__.py
+++ b/lib/ansible/plugins/cache/__init__.py
@@ -27,7 +27,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six import with_metaclass
from ansible.module_utils._text import to_bytes
-from ansible.plugins import cache_loader
+from ansible.plugins.loader import cache_loader
try:
from __main__ import display
diff --git a/lib/ansible/plugins/callback/json.py b/lib/ansible/plugins/callback/json.py
index f1221d9351..1dd886ab2c 100644
--- a/lib/ansible/plugins/callback/json.py
+++ b/lib/ansible/plugins/callback/json.py
@@ -15,6 +15,17 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+'''
+DOCUMENTATION:
+ callback: json
+ short_description: Ansbile screen output asjson
+ version_added: "2.2"
+ description:
+ - This callback converts all events into JSON output
+ type: stdout
+ plugin_api_version: "2.0"
+'''
+
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py
index 813cf9dabe..8a30cbbbdc 100644
--- a/lib/ansible/plugins/connection/__init__.py
+++ b/lib/ansible/plugins/connection/__init__.py
@@ -23,14 +23,15 @@ import fcntl
import gettext
import os
import shlex
-from abc import ABCMeta, abstractmethod, abstractproperty
+from abc import abstractmethod, abstractproperty
from functools import wraps
from ansible import constants as C
from ansible.errors import AnsibleError
-from ansible.module_utils.six import string_types, with_metaclass
+from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
-from ansible.plugins import shell_loader
+from ansible.plugins import AnsiblePlugin
+from ansible.plugins.loader import shell_loader
try:
from __main__ import display
@@ -53,7 +54,7 @@ def ensure_connect(func):
return wrapped
-class ConnectionBase(with_metaclass(ABCMeta, object)):
+class ConnectionBase(AnsiblePlugin):
'''
A base class for connections to contain common code.
'''
diff --git a/lib/ansible/plugins/connection/buildah.py b/lib/ansible/plugins/connection/buildah.py
index 1901a28af2..41fc26786d 100644
--- a/lib/ansible/plugins/connection/buildah.py
+++ b/lib/ansible/plugins/connection/buildah.py
@@ -29,16 +29,24 @@ DOCUMENTATION:
author: Tomas Tomecek (ttomecek@redhat.com)
version_added: 2.4
options:
+ remote_addr:
+ description:
+ - The ID of the container you want to access.
+ default: inventory_hostname
+ config:
+ vars:
+ - name: ansible_host
remote_user:
description:
- User specified via name or ID which is used to execute commands inside the container.
config:
- - section: defaults
- key: remote_user
- env_vars:
- - ANSIBLE_REMOTE_USER
- host_vars:
- - ansible_user
+ ini:
+ - section: defaults
+ key: remote_user
+ env:
+ - name: ANSIBLE_REMOTE_USER
+ vars:
+ - name: ansible_user
"""
from __future__ import (absolute_import, division, print_function)
diff --git a/lib/ansible/plugins/connection/netconf.py b/lib/ansible/plugins/connection/netconf.py
index e176282887..55d87e5d07 100644
--- a/lib/ansible/plugins/connection/netconf.py
+++ b/lib/ansible/plugins/connection/netconf.py
@@ -25,7 +25,7 @@ import json
from ansible import constants as C
from ansible.errors import AnsibleConnectionFailure, AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
-from ansible.plugins import netconf_loader
+from ansible.plugins.loader import netconf_loader
from ansible.plugins.connection import ConnectionBase, ensure_connect
from ansible.utils.jsonrpc import Rpc
diff --git a/lib/ansible/plugins/connection/network_cli.py b/lib/ansible/plugins/connection/network_cli.py
index bec126210b..f506298385 100644
--- a/lib/ansible/plugins/connection/network_cli.py
+++ b/lib/ansible/plugins/connection/network_cli.py
@@ -31,8 +31,7 @@ from ansible import constants as C
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils.six import BytesIO, binary_type
from ansible.module_utils._text import to_bytes, to_text
-from ansible.plugins import cliconf_loader
-from ansible.plugins import terminal_loader
+from ansible.plugins.loader import cliconf_loader, terminal_loader
from ansible.plugins.connection.paramiko_ssh import Connection as _Connection
from ansible.utils.jsonrpc import Rpc
diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py
index b526905ff3..85669ff4ce 100644
--- a/lib/ansible/plugins/connection/ssh.py
+++ b/lib/ansible/plugins/connection/ssh.py
@@ -26,73 +26,122 @@ DOCUMENTATION:
author: ansible (@core)
version_added: historical
options:
- _host:
+ host:
description: Hostname/ip to connect to.
default: inventory_hostname
- host_vars:
- - ansible_host
- - ansible_ssh_host
- _host_key_checking:
- type: bool
+ vars:
+ - name: ansible_host
+ - name: ansible_ssh_host
+ host_key_checking:
+ constants:
+ - name: HOST_KEY_CHECKING
description: Determines if ssh should check host keys
- config:
+ type: boolean
+ ini:
- section: defaults
key: 'host_key_checking'
- env_vars:
- - ANSIBLE_HOST_KEY_CHECKING
- _password:
+ env:
+ - name: ANSIBLE_HOST_KEY_CHECKING
+ password:
description: Authentication password for the C(remote_user). Can be supplied as CLI option.
- host_vars:
- - ansible_password
- - ansible_ssh_pass
- _ssh_args:
+ vars:
+ - name: ansible_password
+ - name: ansible_ssh_pass
+ ssh_args:
description: Arguments to pass to all ssh cli tools
default: '-C -o ControlMaster=auto -o ControlPersist=60s'
- config:
+ ini:
- section: 'ssh_connection'
key: 'ssh_args'
- env_vars:
- - ANSIBLE_SSH_ARGS
- _ssh_common_args:
- description: Common extra args for ssh CLI tools
- host_vars:
- - ansible_ssh_common_args
- _scp_extra_args:
+ env:
+ - name: ANSIBLE_SSH_ARGS
+ ssh_common_args:
+ description: Common extra args for all ssh CLI tools
+ vars:
+ - name: ansible_ssh_common_args
+ ssh_executable:
+ default: ssh
+ description:
+ - This defines the location of the ssh binary. It defaults to `ssh` which will use the first ssh binary available in $PATH.
+ - This option is usually not required, it might be useful when access to system ssh is restricted,
+ or when using ssh wrappers to connect to remote hosts.
+ env: [{name: ANSIBLE_SSH_EXECUTABLE}]
+ ini:
+ - {key: ssh_executable, section: ssh_connection}
+ yaml: {key: ssh_connection.ssh_executable}
+ const:
+ - name: ANSIBLE_SSH_EXECUTABLE
+ version_added: "2.2"
+ scp_extra_args:
description: Extra exclusive to the 'scp' CLI
- host_vars:
- - ansible_scp_extra_args
- _sftp_extra_args:
+ vars:
+ - name: ansible_scp_extra_args
+ sftp_extra_args:
description: Extra exclusive to the 'sftp' CLI
- host_vars:
- - ansible_sftp_extra_args
- _ssh_extra_args:
+ vars:
+ - name: ansible_sftp_extra_args
+ ssh_extra_args:
description: Extra exclusive to the 'ssh' CLI
- host_vars:
- - ansible_ssh_extra_args
+ vars:
+ - name: ansible_ssh_extra_args
+ ssh_retries:
+ # constant: ANSIBLE_SSH_RETRIES
+ description: Number of attempts to connect.
+ default: 3
+ env:
+ - name: ANSIBLE_SSH_RETRIES
+ ini:
+ - section: connection
+ key: retries
+ - section: ssh_connection
+ key: retries
port:
description: Remote port to connect to.
type: int
- config:
- - section: defaults
- key: remote_port
- default: 22
- env_vars:
- - ANSIBLE_REMOTE_PORT
- host_vars:
- - ansible_port
- - ansible_ssh_port
+ default: 22
+ ini:
+ - section: defaults
+ key: remote_port
+ env:
+ - name: ANSIBLE_REMOTE_PORT
+ vars:
+ - name: ansible_port
+ - name: ansible_ssh_port
remote_user:
description:
- User name with which to login to the remote server, normally set by the remote_user keyword.
- If no user is supplied, Ansible will let the ssh client binary choose the user as it normally
- config:
- - section: defaults
- key: remote_user
- env_vars:
- - ANSIBLE_REMOTE_USER
- host_vars:
- - ansible_user
- - ansible_ssh_user
+ ini:
+ - section: defaults
+ key: remote_user
+ env:
+ - name: ANSIBLE_REMOTE_USER
+ vars:
+ - name: ansible_user
+ - name: ansible_ssh_user
+ pipelining:
+ default: ANSIBLE_PIPELINING
+ description:
+ - Pipelining reduces the number of SSH operations required to execute a module on the remote server,
+ by executing many Ansible modules without actual file transfer.
+ - This can result in a very significant performance improvement when enabled.
+ - However this conflicts with privilege escalation (become).
+ For example, when using sudo operations you must first disable 'requiretty' in the sudoers file for the target hosts,
+ which is why this feature is disabled by default.
+ env: [{name: ANSIBLE_SSH_PIPELINING}]
+ ini:
+ - {key: pipelining, section: ssh_connection}
+ type: boolean
+ vars: [{name: ansible_ssh_pipelining}]
+
+# TODO:
+# ANSIBLE_SSH_RETRIES
+
+# self._play_context.private_key_file
+# ANSIBLE_SSH_CONTROL_PATH
+# ANSIBLE_SSH_CONTROL_PATH_DIR
+# DEFAULT_SFTP_BATCH_MODE
+# DEFAULT_SCP_IF_SSH
'''
from __future__ import (absolute_import, division, print_function)
diff --git a/lib/ansible/plugins/inventory/constructed_groups.py b/lib/ansible/plugins/inventory/constructed_groups.py
index a7315b39f1..302b289bdb 100644
--- a/lib/ansible/plugins/inventory/constructed_groups.py
+++ b/lib/ansible/plugins/inventory/constructed_groups.py
@@ -25,21 +25,20 @@ DOCUMENTATION:
- Uses a YAML configuration file to identify group and the Jinja2 expressions that qualify a host for membership.
- Only variables already in inventory are available for expressions (no facts).
- Failed expressions will be ignored (assumes vars were missing).
-EXAMPLES:
-# inventory.config file in YAML format
-plugin: constructed_groups
-groups:
- # simple name matching
- webservers: inventory_hostname.startswith('web')
+EXAMPLES: | # inventory.config file in YAML format
+ plugin: constructed_groups
+ groups:
+ # simple name matching
+ webservers: inventory_hostname.startswith('web')
- # using ec2 'tags' (assumes aws inventory)
- development: "'devel' in (ec2_tags|list)"
+ # using ec2 'tags' (assumes aws inventory)
+ development: "'devel' in (ec2_tags|list)"
- # using other host properties populated in inventory
- private_only: not (public_dns_name is defined or ip_address is defined)
+ # using other host properties populated in inventory
+ private_only: not (public_dns_name is defined or ip_address is defined)
- # complex group membership
- multi_group: (group_names|intersection(['alpha', 'beta', 'omega']))|length >= 2
+ # complex group membership
+ multi_group: (group_names|intersection(['alpha', 'beta', 'omega']))|length >= 2
'''
from __future__ import (absolute_import, division, print_function)
@@ -77,7 +76,7 @@ class InventoryModule(BaseInventoryPlugin):
def parse(self, inventory, loader, path, cache=False):
''' parses the inventory file '''
- super(InventoryModule, self).parse(inventory, loader, path)
+ super(InventoryModule, self).parse(inventory, loader, path, cache=True)
try:
data = self.loader.load_from_file(path)
@@ -94,19 +93,19 @@ class InventoryModule(BaseInventoryPlugin):
for host in inventory.hosts:
# get available variables to templar
- hostvars = host.get_vars()
- if host.name in inventory.cache: # adds facts if cache is active
- hostvars = combine_vars(hostvars, inventory.cache[host.name])
+ hostvars = inventory.hosts[host].get_vars()
+ if host in inventory.cache: # adds facts if cache is active
+ hostvars = combine_vars(hostvars, inventory.cache[host])
templar.set_available_variables(hostvars)
# process each 'group entry'
- for group_name, expression in data.get('groups', {}):
- conditional = u"{%% if %s %%} True {%% else %%} False {%% endif %%}" % expression
+ for group_name in data.get('groups', {}):
+ conditional = u"{%% if %s %%} True {%% else %%} False {%% endif %%}" % data['groups'][group_name]
result = templar.template(conditional)
if result and bool(result):
# ensure group exists
inventory.add_group(group_name)
# add host to group
- inventory.add_child(group_name, host.name)
+ inventory.add_child(group_name, host)
except Exception as e:
raise AnsibleParserError("failed to parse %s: %s " % (to_native(path), to_native(e)))
diff --git a/lib/ansible/plugins/inventory/yaml.py b/lib/ansible/plugins/inventory/yaml.py
index 520d8543a3..a7ea874d85 100644
--- a/lib/ansible/plugins/inventory/yaml.py
+++ b/lib/ansible/plugins/inventory/yaml.py
@@ -21,17 +21,19 @@ DOCUMENTATION:
version_added: "2.4"
short_description: Uses a specifically YAML file as inventory source.
description:
- - YAML based inventory, starts with the 'all' group and has hosts/vars/children entries.
+ - "YAML based inventory, starts with the 'all' group and has hosts/vars/children entries."
- Host entries can have sub-entries defined, which will be treated as variables.
- Vars entries are normal group vars.
- - Children are 'child groups', which can also have their own vars/hosts/children and so on.
- - File MUST have a valid extension: yaml, yml, json.
+ - "Children are 'child groups', which can also have their own vars/hosts/children and so on."
+ - File MUST have a valid extension, defined in configuration
notes:
- It takes the place of the previously hardcoded YAML inventory.
- To function it requires being whitelisted in configuration.
options:
- _yaml_extensions:
+ yaml_extensions:
description: list of 'valid' extensions for files containing YAML
+ type: list
+ default: ['.yaml', '.yml', '.json']
EXAMPLES:
all: # keys must be unique, i.e. only one 'hosts' per group
hosts:
diff --git a/lib/ansible/plugins/loader.py b/lib/ansible/plugins/loader.py
new file mode 100644
index 0000000000..4e4ddd33cc
--- /dev/null
+++ b/lib/ansible/plugins/loader.py
@@ -0,0 +1,588 @@
+# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> and others
+# (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import glob
+import imp
+import os
+import os.path
+import sys
+import warnings
+
+from collections import defaultdict
+
+from ansible import constants as C
+from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
+from ansible.module_utils._text import to_text
+from ansible.parsing.plugin_docs import read_docstring
+
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
+
+
+def get_all_plugin_loaders():
+ return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
+
+
+class PluginLoader:
+
+ '''
+ PluginLoader loads plugins from the configured plugin directories.
+
+ It searches for plugins by iterating through the combined list of
+ play basedirs, configured paths, and the python path.
+ The first match is used.
+ '''
+
+ def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
+
+ self.class_name = class_name
+ self.base_class = required_base_class
+ self.package = package
+ self.subdir = subdir
+ self.aliases = aliases
+
+ if config and not isinstance(config, list):
+ config = [config]
+ elif not config:
+ config = []
+
+ self.config = config
+
+ if class_name not in MODULE_CACHE:
+ MODULE_CACHE[class_name] = {}
+ if class_name not in PATH_CACHE:
+ PATH_CACHE[class_name] = None
+ if class_name not in PLUGIN_PATH_CACHE:
+ PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
+
+ self._module_cache = MODULE_CACHE[class_name]
+ self._paths = PATH_CACHE[class_name]
+ self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
+
+ self._extra_dirs = []
+ self._searched_paths = set()
+
+ def __setstate__(self, data):
+ '''
+ Deserializer.
+ '''
+
+ class_name = data.get('class_name')
+ package = data.get('package')
+ config = data.get('config')
+ subdir = data.get('subdir')
+ aliases = data.get('aliases')
+ base_class = data.get('base_class')
+
+ PATH_CACHE[class_name] = data.get('PATH_CACHE')
+ PLUGIN_PATH_CACHE[class_name] = data.get('PLUGIN_PATH_CACHE')
+
+ self.__init__(class_name, package, config, subdir, aliases, base_class)
+ self._extra_dirs = data.get('_extra_dirs', [])
+ self._searched_paths = data.get('_searched_paths', set())
+
+ def __getstate__(self):
+ '''
+ Serializer.
+ '''
+
+ return dict(
+ class_name=self.class_name,
+ base_class=self.base_class,
+ package=self.package,
+ config=self.config,
+ subdir=self.subdir,
+ aliases=self.aliases,
+ _extra_dirs=self._extra_dirs,
+ _searched_paths=self._searched_paths,
+ PATH_CACHE=PATH_CACHE[self.class_name],
+ PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
+ )
+
+ def format_paths(self, paths):
+ ''' Returns a string suitable for printing of the search path '''
+
+ # Uses a list to get the order right
+ ret = []
+ for i in paths:
+ if i not in ret:
+ ret.append(i)
+ return os.pathsep.join(ret)
+
+ def print_paths(self):
+ return self.format_paths(self._get_paths(subdirs=False))
+
+ def _all_directories(self, dir):
+ results = []
+ results.append(dir)
+ for root, subdirs, files in os.walk(dir, followlinks=True):
+ if '__init__.py' in files:
+ for x in subdirs:
+ results.append(os.path.join(root, x))
+ return results
+
+ def _get_package_paths(self, subdirs=True):
+ ''' Gets the path of a Python package '''
+
+ if not self.package:
+ return []
+ if not hasattr(self, 'package_path'):
+ m = __import__(self.package)
+ parts = self.package.split('.')[1:]
+ for parent_mod in parts:
+ m = getattr(m, parent_mod)
+ self.package_path = os.path.dirname(m.__file__)
+ if subdirs:
+ return self._all_directories(self.package_path)
+ return [self.package_path]
+
+ def _get_paths(self, subdirs=True):
+ ''' Return a list of paths to search for plugins in '''
+
+ # FIXME: This is potentially buggy if subdirs is sometimes True and sometimes False.
+ # In current usage, everything calls this with subdirs=True except for module_utils_loader and ansible-doc
+ # which always calls it with subdirs=False. So there currently isn't a problem with this caching.
+ if self._paths is not None:
+ return self._paths
+
+ ret = self._extra_dirs[:]
+
+ # look in any configured plugin paths, allow one level deep for subcategories
+ if self.config is not None:
+ for path in self.config:
+ path = os.path.realpath(os.path.expanduser(path))
+ if subdirs:
+ contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
+ for c in contents:
+ if os.path.isdir(c) and c not in ret:
+ ret.append(c)
+ if path not in ret:
+ ret.append(path)
+
+ # look for any plugins installed in the package subtree
+ # Note package path always gets added last so that every other type of
+ # path is searched before it.
+ ret.extend(self._get_package_paths(subdirs=subdirs))
+
+ # HACK: because powershell modules are in the same directory
+ # hierarchy as other modules we have to process them last. This is
+ # because powershell only works on windows but the other modules work
+ # anywhere (possibly including windows if the correct language
+ # interpreter is installed). the non-powershell modules can have any
+ # file extension and thus powershell modules are picked up in that.
+ # The non-hack way to fix this is to have powershell modules be
+ # a different PluginLoader/ModuleLoader. But that requires changing
+ # other things too (known thing to change would be PATHS_CACHE,
+ # PLUGIN_PATHS_CACHE, and MODULE_CACHE. Since those three dicts key
+ # on the class_name and neither regular modules nor powershell modules
+ # would have class_names, they would not work as written.
+ reordered_paths = []
+ win_dirs = []
+
+ for path in ret:
+ if path.endswith('windows'):
+ win_dirs.append(path)
+ else:
+ reordered_paths.append(path)
+ reordered_paths.extend(win_dirs)
+
+ # cache and return the result
+ self._paths = reordered_paths
+ return reordered_paths
+
+ def _load_config_defs(self, name, path):
+ ''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use '''
+
+ # plugins w/o class name don't support config
+ if self.class_name and self.class_name in ('Connection'):
+ # FIXME: expand from just connection
+ type_name = get_plugin_class(self)
+ dstring = read_docstring(path, verbose=False, ignore_errors=False)
+ if dstring.get('doc', False):
+ if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict):
+ C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options'])
+ display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
+
+ def add_directory(self, directory, with_subdir=False):
+ ''' Adds an additional directory to the search path '''
+
+ directory = os.path.realpath(directory)
+
+ if directory is not None:
+ if with_subdir:
+ directory = os.path.join(directory, self.subdir)
+ if directory not in self._extra_dirs:
+ # append the directory and invalidate the path cache
+ self._extra_dirs.append(directory)
+ self._paths = None
+
+ def find_plugin(self, name, mod_type='', ignore_deprecated=False):
+ ''' Find a plugin named name '''
+
+ if mod_type:
+ suffix = mod_type
+ elif self.class_name:
+ # Ansible plugins that run in the controller process (most plugins)
+ suffix = '.py'
+ else:
+ # Only Ansible Modules. Ansible modules can be any executable so
+ # they can have any suffix
+ suffix = ''
+
+ # The particular cache to look for modules within. This matches the
+ # requested mod_type
+ pull_cache = self._plugin_path_cache[suffix]
+ try:
+ return pull_cache[name]
+ except KeyError:
+ # Cache miss. Now let's find the plugin
+ pass
+
+ # TODO: Instead of using the self._paths cache (PATH_CACHE) and
+ # self._searched_paths we could use an iterator. Before enabling that
+ # we need to make sure we don't want to add additional directories
+ # (add_directory()) once we start using the iterator. Currently, it
+ # looks like _get_paths() never forces a cache refresh so if we expect
+ # additional directories to be added later, it is buggy.
+ for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
+ try:
+ full_paths = (os.path.join(path, f) for f in os.listdir(path))
+ except OSError as e:
+ display.warning("Error accessing plugin paths: %s" % to_text(e))
+
+ for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
+ full_name = os.path.basename(full_path)
+
+ # HACK: We have no way of executing python byte compiled files as ansible modules so specifically exclude them
+ # FIXME: I believe this is only correct for modules and module_utils.
+ # For all other plugins we want .pyc and .pyo should be valid
+ if full_path.endswith(('.pyc', '.pyo')):
+ continue
+
+ splitname = os.path.splitext(full_name)
+ base_name = splitname[0]
+ try:
+ extension = splitname[1]
+ except IndexError:
+ extension = ''
+
+ # Module found, now enter it into the caches that match this file
+ if base_name not in self._plugin_path_cache['']:
+ self._plugin_path_cache[''][base_name] = full_path
+
+ if full_name not in self._plugin_path_cache['']:
+ self._plugin_path_cache[''][full_name] = full_path
+
+ if base_name not in self._plugin_path_cache[extension]:
+ self._plugin_path_cache[extension][base_name] = full_path
+
+ if full_name not in self._plugin_path_cache[extension]:
+ self._plugin_path_cache[extension][full_name] = full_path
+
+ self._searched_paths.add(path)
+ try:
+ return pull_cache[name]
+ except KeyError:
+ # Didn't find the plugin in this directory. Load modules from the next one
+ pass
+
+ # if nothing is found, try finding alias/deprecated
+ if not name.startswith('_'):
+ alias_name = '_' + name
+ # We've already cached all the paths at this point
+ if alias_name in pull_cache:
+ if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
+ # FIXME: this is not always the case, some are just aliases
+ display.deprecated('%s is kept for backwards compatibility but usage is discouraged. '
+ 'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
+ return pull_cache[alias_name]
+
+ return None
+
+ def has_plugin(self, name):
+ ''' Checks if a plugin named name exists '''
+
+ return self.find_plugin(name) is not None
+
+ __contains__ = has_plugin
+
+ def _load_module_source(self, name, path):
+
+ # avoid collisions across plugins
+ full_name = '.'.join([self.package, name])
+
+ if full_name in sys.modules:
+ # Avoids double loading, See https://github.com/ansible/ansible/issues/13110
+ return sys.modules[full_name]
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", RuntimeWarning)
+ with open(path, 'rb') as module_file:
+ module = imp.load_source(full_name, path, module_file)
+ return module
+
+ def _update_object(self, obj, name, path):
+
+ # load plugin config data
+ self._load_config_defs(name, path)
+
+ # set extra info on the module, in case we want it later
+ setattr(obj, '_original_path', path)
+ setattr(obj, '_load_name', name)
+
+ def get(self, name, *args, **kwargs):
+ ''' instantiates a plugin of the given name using arguments '''
+
+ found_in_cache = True
+ class_only = kwargs.pop('class_only', False)
+ if name in self.aliases:
+ name = self.aliases[name]
+ path = self.find_plugin(name)
+ if path is None:
+ return None
+
+ if path not in self._module_cache:
+ self._module_cache[path] = self._load_module_source(name, path)
+ found_in_cache = False
+
+ obj = getattr(self._module_cache[path], self.class_name)
+ if self.base_class:
+ # The import path is hardcoded and should be the right place,
+ # so we are not expecting an ImportError.
+ module = __import__(self.package, fromlist=[self.base_class])
+ # Check whether this obj has the required base class.
+ try:
+ plugin_class = getattr(module, self.base_class)
+ except AttributeError:
+ return None
+ if not issubclass(obj, plugin_class):
+ return None
+
+ self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
+ if not class_only:
+ try:
+ obj = obj(*args, **kwargs)
+ except TypeError as e:
+ if "abstract" in e.args[0]:
+ # Abstract Base Class. The found plugin file does not
+ # fully implement the defined interface.
+ return None
+ raise
+
+ self._update_object(obj, name, path)
+ return obj
+
+ def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
+ msg = 'Loading %s \'%s\' from %s' % (class_name, os.path.basename(name), path)
+
+ if len(searched_paths) > 1:
+ msg = '%s (searched paths: %s)' % (msg, self.format_paths(searched_paths))
+
+ if found_in_cache or class_only:
+ msg = '%s (found_in_cache=%s, class_only=%s)' % (msg, found_in_cache, class_only)
+
+ display.debug(msg)
+
+ def all(self, *args, **kwargs):
+ ''' instantiates all plugins with the same arguments '''
+
+ path_only = kwargs.pop('path_only', False)
+ class_only = kwargs.pop('class_only', False)
+ all_matches = []
+ found_in_cache = True
+
+ for i in self._get_paths():
+ all_matches.extend(glob.glob(os.path.join(i, "*.py")))
+
+ for path in sorted(all_matches, key=lambda match: os.path.basename(match)):
+ name = os.path.basename(os.path.splitext(path)[0])
+
+ if '__init__' in name:
+ continue
+
+ if path_only:
+ yield path
+ continue
+
+ if path not in self._module_cache:
+ self._module_cache[path] = self._load_module_source(name, path)
+ found_in_cache = False
+
+ try:
+ obj = getattr(self._module_cache[path], self.class_name)
+ except AttributeError as e:
+ display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
+ continue
+
+ if self.base_class:
+ # The import path is hardcoded and should be the right place,
+ # so we are not expecting an ImportError.
+ module = __import__(self.package, fromlist=[self.base_class])
+ # Check whether this obj has the required base class.
+ try:
+ plugin_class = getattr(module, self.base_class)
+ except AttributeError:
+ continue
+ if not issubclass(obj, plugin_class):
+ continue
+
+ self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
+ if not class_only:
+ try:
+ obj = obj(*args, **kwargs)
+ except TypeError as e:
+ display.warning("Skipping plugin (%s) as it seems to be incomplete: %s" % (path, to_text(e)))
+
+ self._update_object(obj, name, path)
+ yield obj
+
+action_loader = PluginLoader(
+ 'ActionModule',
+ 'ansible.plugins.action',
+ C.DEFAULT_ACTION_PLUGIN_PATH,
+ 'action_plugins',
+ required_base_class='ActionBase',
+)
+
+cache_loader = PluginLoader(
+ 'CacheModule',
+ 'ansible.plugins.cache',
+ C.DEFAULT_CACHE_PLUGIN_PATH,
+ 'cache_plugins',
+)
+
+callback_loader = PluginLoader(
+ 'CallbackModule',
+ 'ansible.plugins.callback',
+ C.DEFAULT_CALLBACK_PLUGIN_PATH,
+ 'callback_plugins',
+)
+
+connection_loader = PluginLoader(
+ 'Connection',
+ 'ansible.plugins.connection',
+ C.DEFAULT_CONNECTION_PLUGIN_PATH,
+ 'connection_plugins',
+ aliases={'paramiko': 'paramiko_ssh'},
+ required_base_class='ConnectionBase',
+)
+
+shell_loader = PluginLoader(
+ 'ShellModule',
+ 'ansible.plugins.shell',
+ 'shell_plugins',
+ 'shell_plugins',
+)
+
+module_loader = PluginLoader(
+ '',
+ 'ansible.modules',
+ C.DEFAULT_MODULE_PATH,
+ 'library',
+)
+
+module_utils_loader = PluginLoader(
+ '',
+ 'ansible.module_utils',
+ C.DEFAULT_MODULE_UTILS_PATH,
+ 'module_utils',
+)
+
+# NB: dedicated loader is currently necessary because PS module_utils expects "with subdir" lookup where
+# regular module_utils doesn't. This can be revisited once we have more granular loaders.
+ps_module_utils_loader = PluginLoader(
+ '',
+ 'ansible.module_utils',
+ C.DEFAULT_MODULE_UTILS_PATH,
+ 'module_utils',
+)
+
+lookup_loader = PluginLoader(
+ 'LookupModule',
+ 'ansible.plugins.lookup',
+ C.DEFAULT_LOOKUP_PLUGIN_PATH,
+ 'lookup_plugins',
+ required_base_class='LookupBase',
+)
+
+filter_loader = PluginLoader(
+ 'FilterModule',
+ 'ansible.plugins.filter',
+ C.DEFAULT_FILTER_PLUGIN_PATH,
+ 'filter_plugins',
+)
+
+test_loader = PluginLoader(
+ 'TestModule',
+ 'ansible.plugins.test',
+ C.DEFAULT_TEST_PLUGIN_PATH,
+ 'test_plugins'
+)
+
+fragment_loader = PluginLoader(
+ 'ModuleDocFragment',
+ 'ansible.utils.module_docs_fragments',
+ os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
+ '',
+)
+
+strategy_loader = PluginLoader(
+ 'StrategyModule',
+ 'ansible.plugins.strategy',
+ C.DEFAULT_STRATEGY_PLUGIN_PATH,
+ 'strategy_plugins',
+ required_base_class='StrategyBase',
+)
+
+terminal_loader = PluginLoader(
+ 'TerminalModule',
+ 'ansible.plugins.terminal',
+ 'terminal_plugins',
+ 'terminal_plugins'
+)
+
+vars_loader = PluginLoader(
+ 'VarsModule',
+ 'ansible.plugins.vars',
+ C.DEFAULT_VARS_PLUGIN_PATH,
+ 'vars_plugins',
+)
+
+cliconf_loader = PluginLoader(
+ 'Cliconf',
+ 'ansible.plugins.cliconf',
+ 'cliconf_plugins',
+ 'cliconf_plugins',
+ required_base_class='CliconfBase'
+)
+
+netconf_loader = PluginLoader(
+ 'Netconf',
+ 'ansible.plugins.netconf',
+ 'netconf_plugins',
+ 'netconf_plugins',
+ required_base_class='NetconfBase'
+)
diff --git a/lib/ansible/plugins/lookup/etcd.py b/lib/ansible/plugins/lookup/etcd.py
index c0de93c3be..30b562fb30 100644
--- a/lib/ansible/plugins/lookup/etcd.py
+++ b/lib/ansible/plugins/lookup/etcd.py
@@ -29,20 +29,24 @@ DOCUMENTATION:
description:
- the list of keys to lookup on the etcd server
type: list
- element_type: string
+ elements: string
required: True
_etcd_url:
description:
- Environment variable with the url for the etcd server
default: 'http://127.0.0.1:4001'
- env_vars:
- - name: ANSIBLE_ETCD_URL
+ env:
+ - name: ANSIBLE_ETCD_URL
+ yaml:
+ - key: etcd.url
_etcd_version:
description:
- Environment variable with the etcd protocol version
default: 'v1'
- env_vars:
- - name: ANSIBLE_ETCD_VERSION
+ env:
+ - name: ANSIBLE_ETCD_VERSION
+ yaml:
+ - key: etcd.version
EXAMPLES:
- name: "a value from a locally running etcd"
debug: msg={{ lookup('etcd', 'foo/bar') }}
@@ -50,10 +54,11 @@ EXAMPLES:
- name: "a values from a folder on a locally running etcd"
debug: msg={{ lookup('etcd', 'foo') }}
RETURN:
- _list:
+ _raw:
description:
- list of values associated with input keys
- type: strings
+ type: list
+ elements: strings
'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py
index a3698aabdf..4e54fcaf2a 100644
--- a/lib/ansible/plugins/strategy/__init__.py
+++ b/lib/ansible/plugins/strategy/__init__.py
@@ -40,7 +40,7 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
-from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
+from ansible.plugins.loader import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
from ansible.template import Templar
from ansible.utils.vars import combine_vars
from ansible.vars.manager import strip_internal_keys
@@ -899,6 +899,7 @@ class StrategyBase:
msg = "ending play"
elif meta_action == 'reset_connection':
connection = connection_loader.get(play_context.connection, play_context, os.devnull)
+ play_context.set_options_from_plugin(connection)
if connection:
connection.reset()
msg = 'reset connection'
diff --git a/lib/ansible/plugins/strategy/free.py b/lib/ansible/plugins/strategy/free.py
index 9b009bc8f5..43e5c05ddd 100644
--- a/lib/ansible/plugins/strategy/free.py
+++ b/lib/ansible/plugins/strategy/free.py
@@ -34,7 +34,7 @@ import time
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.included_file import IncludedFile
-from ansible.plugins import action_loader
+from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
from ansible.module_utils._text import to_text
diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py
index 76bb946ab8..250be3d6e0 100644
--- a/lib/ansible/plugins/strategy/linear.py
+++ b/lib/ansible/plugins/strategy/linear.py
@@ -38,7 +38,7 @@ from ansible.module_utils._text import to_text
from ansible.playbook.block import Block
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task import Task
-from ansible.plugins import action_loader
+from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 19741a997a..c23bdf43ce 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -45,7 +45,7 @@ from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
-from ansible.plugins import filter_loader, lookup_loader, test_loader
+from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
diff --git a/lib/ansible/template/safe_eval.py b/lib/ansible/template/safe_eval.py
index 2f819b40b1..d2ccc09683 100644
--- a/lib/ansible/template/safe_eval.py
+++ b/lib/ansible/template/safe_eval.py
@@ -24,7 +24,7 @@ import sys
from ansible import constants as C
from ansible.module_utils.six import string_types
from ansible.module_utils.six.moves import builtins
-from ansible.plugins import filter_loader, test_loader
+from ansible.plugins.loader import filter_loader, test_loader
def safe_eval(expr, locals={}, include_exceptions=False):
diff --git a/lib/ansible/utils/plugin_docs.py b/lib/ansible/utils/plugin_docs.py
index f59f115401..717f023f4d 100644
--- a/lib/ansible/utils/plugin_docs.py
+++ b/lib/ansible/utils/plugin_docs.py
@@ -20,15 +20,12 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import ast
-import yaml
-
from collections import MutableMapping, MutableSet, MutableSequence
from ansible.module_utils.six import string_types
-from ansible.parsing.metadata import extract_metadata
+from ansible.parsing.plugin_docs import read_docstring
from ansible.parsing.yaml.loader import AnsibleLoader
-from ansible.plugins import fragment_loader
+from ansible.plugins.loader import fragment_loader
try:
from __main__ import display
@@ -93,94 +90,13 @@ def add_fragments(doc, filename):
def get_docstring(filename, verbose=False):
"""
- Search for assignment of the DOCUMENTATION and EXAMPLES variables
- in the given file.
- Parse DOCUMENTATION from YAML and return the YAML doc or None
- together with EXAMPLES, as plain text.
-
- DOCUMENTATION can be extended using documentation fragments
- loaded by the PluginLoader from the module_docs_fragments
- directory.
+ DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory.
"""
- # FIXME: Should refactor this so that we have a docstring parsing
- # function and a separate variable parsing function
- # Can have a function one higher that invokes whichever is needed
- #
- # Should look roughly like this:
- # get_plugin_doc(filename, verbose=False)
- # documentation = extract_docstring(plugin_ast, identifier, verbose=False)
- # if not documentation and not (filter or test):
- # documentation = extract_variables(plugin_ast)
- # documentation['metadata'] = extract_metadata(plugin_ast)
-
- data = {
- 'doc': None,
- 'plainexamples': None,
- 'returndocs': None,
- 'metadata': None
- }
-
- string_to_vars = {
- 'DOCUMENTATION': 'doc',
- 'EXAMPLES': 'plainexamples',
- 'RETURN': 'returndocs',
- }
-
- try:
- b_module_data = open(filename, 'rb').read()
- M = ast.parse(b_module_data)
- try:
- display.debug('Attempt first docstring is yaml docs')
- docstring = yaml.load(M.body[0].value.s)
- for string in string_to_vars.keys():
- if string in docstring:
- data[string_to_vars[string]] = docstring[string]
- display.debug('assigned :%s' % string_to_vars[string])
- except Exception as e:
- display.debug('failed docstring parsing: %s' % str(e))
-
- if 'docs' not in data or not data['docs']:
- display.debug('Fallback to vars parsing')
- for child in M.body:
- if isinstance(child, ast.Assign):
- for t in child.targets:
- try:
- theid = t.id
- except AttributeError:
- # skip errors can happen when trying to use the normal code
- display.warning("Failed to assign id for %s on %s, skipping" % (t, filename))
- continue
-
- if theid in string_to_vars:
- varkey = string_to_vars[theid]
- if isinstance(child.value, ast.Dict):
- data[varkey] = ast.literal_eval(child.value)
- else:
- if theid == 'DOCUMENTATION':
- # string should be yaml
- data[varkey] = AnsibleLoader(child.value.s, file_name=filename).get_single_data()
- else:
- # not yaml, should be a simple string
- data[varkey] = child.value.s
- display.debug('assigned :%s' % varkey)
-
- # Metadata is per-file rather than per-plugin/function
- data['metadata'] = extract_metadata(module_ast=M)[0]
-
- # add fragments to documentation
- if data['doc']:
- add_fragments(data['doc'], filename)
-
- # remove version
- if data['metadata']:
- for x in ('version', 'metadata_version'):
- if x in data['metadata']:
- del data['metadata'][x]
- except Exception as e:
- display.error("unable to parse %s" % filename)
- if verbose is True:
- display.display("unable to parse %s" % filename)
- raise
+ data = read_docstring(filename, verbose=verbose)
+
+ # add fragments to documentation
+ if data.get('doc', False):
+ add_fragments(data['doc'], filename)
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']
diff --git a/lib/ansible/vars/manager.py b/lib/ansible/vars/manager.py
index af0f4dcde6..0659b825e0 100644
--- a/lib/ansible/vars/manager.py
+++ b/lib/ansible/vars/manager.py
@@ -37,7 +37,7 @@ from ansible.inventory.host import Host
from ansible.inventory.helpers import sort_groups, get_group_vars
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems, string_types, text_type
-from ansible.plugins import lookup_loader, vars_loader
+from ansible.plugins.loader import lookup_loader, vars_loader
from ansible.plugins.cache import FactCache
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
@@ -86,6 +86,21 @@ def strip_internal_keys(dirty):
return clean
+def remove_internal_keys(data):
+ '''
+ More nuanced version of strip_internal_keys
+ '''
+ for key in list(data.keys()):
+ if (key.startswith('_ansible_') and key != '_ansible_parsed') or key in C.INTERNAL_RESULT_KEYS:
+ display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
+ del data[key]
+
+ # remove bad/empty internal keys
+ for key in ['warnings', 'deprecations']:
+ if key in data and not data[key]:
+ del data[key]
+
+
class VariableManager:
def __init__(self, loader=None, inventory=None):
diff --git a/setup.py b/setup.py
index 0262b7d3ef..4345e4206e 100644
--- a/setup.py
+++ b/setup.py
@@ -190,8 +190,7 @@ setup(
'galaxy/data/*/*/.*',
'galaxy/data/*/*/*.*',
'galaxy/data/*/tests/inventory',
- 'config/data/*.yaml',
- 'config/data/*.yml',
+ 'config/base.yml',
],
},
classifiers=[
diff --git a/test/runner/lib/ansible_util.py b/test/runner/lib/ansible_util.py
index 2cd7013fd8..c90772c8b3 100644
--- a/test/runner/lib/ansible_util.py
+++ b/test/runner/lib/ansible_util.py
@@ -21,20 +21,19 @@ def ansible_environment(args, color=True):
if not path.startswith(ansible_path + os.pathsep):
path = ansible_path + os.pathsep + path
- ansible_config = '/dev/null'
- if os.path.isfile('test/integration/%s.cfg' % args.command):
- ansible_config = os.path.abspath('test/integration/%s.cfg' % args.command)
-
ansible = dict(
ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color and color else 'false',
ANSIBLE_DEPRECATION_WARNINGS='false',
- ANSIBLE_CONFIG=ansible_config,
ANSIBLE_HOST_KEY_CHECKING='false',
PYTHONPATH=os.path.abspath('lib'),
PAGER='/bin/cat',
PATH=path,
)
+ if os.path.isfile('test/integration/%s.cfg' % args.command):
+ ansible_config = os.path.abspath('test/integration/%s.cfg' % args.command)
+ ansible['ANSIBLE_CONFIG'] = ansible_config
+
env.update(ansible)
if args.debug:
diff --git a/test/sanity/pep8/legacy-files.txt b/test/sanity/pep8/legacy-files.txt
index 6352547ef1..b6db79130e 100644
--- a/test/sanity/pep8/legacy-files.txt
+++ b/test/sanity/pep8/legacy-files.txt
@@ -1,4 +1,5 @@
lib/ansible/cli/config.py
+lib/ansible/constants.py
lib/ansible/config/data.py
lib/ansible/config/manager.py
lib/ansible/modules/cloud/amazon/_ec2_ami_search.py
diff --git a/test/units/executor/test_task_executor.py b/test/units/executor/test_task_executor.py
index 169e647db8..3b75636b82 100644
--- a/test/units/executor/test_task_executor.py
+++ b/test/units/executor/test_task_executor.py
@@ -24,7 +24,7 @@ from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.executor.task_executor import TaskExecutor
from ansible.playbook.play_context import PlayContext
-from ansible.plugins import action_loader, lookup_loader
+from ansible.plugins.loader import action_loader, lookup_loader
from ansible.parsing.yaml.objects import AnsibleUnicode
from units.mock.loader import DictDataLoader
diff --git a/test/units/modules/cloud/amazon/test_data_pipeline.py b/test/units/modules/cloud/amazon/test_data_pipeline.py
index 2ebc49ea4c..bf9ac230fa 100644
--- a/test/units/modules/cloud/amazon/test_data_pipeline.py
+++ b/test/units/modules/cloud/amazon/test_data_pipeline.py
@@ -16,14 +16,20 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import pytest
-import boto3
import os
import json
import collections
from . placebo_fixtures import placeboify, maybe_sleep
+from nose.plugins.skip import SkipTest
+
from ansible.modules.cloud.amazon import data_pipeline
from ansible.module_utils._text import to_text
+try:
+ import boto3
+except ImportError:
+ raise SkipTest("test_api_gateway.py requires the `boto3` and `botocore` modules")
+
@pytest.fixture(scope='module')
def dp_setup():
diff --git a/test/units/modules/packaging/os/test_rhn_register.py b/test/units/modules/packaging/os/test_rhn_register.py
index 3ee82d46f3..d3f92f0fe4 100644
--- a/test/units/modules/packaging/os/test_rhn_register.py
+++ b/test/units/modules/packaging/os/test_rhn_register.py
@@ -3,6 +3,7 @@ import json
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import PropertyMock, patch, mock_open
from ansible.module_utils import basic
+from ansible.module_utils._text import to_native
from ansible.module_utils.six.moves import xmlrpc_client
from ansible.modules.packaging.os import rhn_register
@@ -96,7 +97,7 @@ class TestRhnRegister(unittest.TestCase):
orig_import = __import__
with patch('__builtin__.__import__', side_effect=mock_import):
rhn = self.module.Rhn()
- self.assertEqual('123456789', rhn.systemid)
+ self.assertEqual('123456789', to_native(rhn.systemid))
def test_without_required_parameters(self):
"""Failure must occurs when all parameters are missing"""
diff --git a/test/units/plugins/filter/test_ipaddr.py b/test/units/plugins/filter/test_ipaddr.py
index 5ef36db80f..9643a0d8bf 100644
--- a/test/units/plugins/filter/test_ipaddr.py
+++ b/test/units/plugins/filter/test_ipaddr.py
@@ -20,6 +20,11 @@ __metaclass__ = type
from ansible.compat.tests import unittest
from ansible.plugins.filter.ipaddr import (ipaddr, _netmask_query, nthhost, next_nth_usable,
previous_nth_usable, network_in_usable, network_in_network)
+try:
+ import netaddr
+except ImportError:
+ from nose.plugins.skip import SkipTest
+ raise SkipTest("This test requires the `netaddr` python library")
class TestIpFilter(unittest.TestCase):
diff --git a/test/units/plugins/lookup/test_password.py b/test/units/plugins/lookup/test_password.py
index 30f27a85fb..b19e3460fa 100644
--- a/test/units/plugins/lookup/test_password.py
+++ b/test/units/plugins/lookup/test_password.py
@@ -29,7 +29,7 @@ from ansible.compat.tests.mock import mock_open, patch
from ansible.errors import AnsibleError
from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import builtins
-from ansible.plugins import PluginLoader
+from ansible.plugins.loader import PluginLoader
from ansible.plugins.lookup import password
from ansible.utils import encrypt
diff --git a/test/units/plugins/test_plugins.py b/test/units/plugins/test_plugins.py
index 64c12246c5..def2544ea8 100644
--- a/test/units/plugins/test_plugins.py
+++ b/test/units/plugins/test_plugins.py
@@ -24,7 +24,7 @@ import os
from ansible.compat.tests import BUILTINS, unittest
from ansible.compat.tests.mock import mock_open, patch, MagicMock
-from ansible.plugins import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
+from ansible.plugins.loader import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
class TestErrors(unittest.TestCase):