summaryrefslogtreecommitdiff
path: root/lib/ansible
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible')
-rw-r--r--lib/ansible/cli/__init__.py138
-rw-r--r--lib/ansible/cli/adhoc.py22
-rw-r--r--lib/ansible/cli/console.py22
-rw-r--r--lib/ansible/cli/doc.py36
-rw-r--r--lib/ansible/cli/galaxy.py64
-rw-r--r--lib/ansible/cli/playbook.py20
-rw-r--r--lib/ansible/cli/pull.py33
-rw-r--r--lib/ansible/cli/vault.py10
-rw-r--r--lib/ansible/constants.py98
-rw-r--r--lib/ansible/inventory/data.py17
-rw-r--r--lib/ansible/inventory/group.py10
-rw-r--r--lib/ansible/inventory/host.py18
-rw-r--r--lib/ansible/inventory/manager.py43
-rw-r--r--lib/ansible/module_utils/_text.py4
-rw-r--r--lib/ansible/module_utils/a10.py9
-rw-r--r--lib/ansible/module_utils/ansible_tower.py10
-rw-r--r--lib/ansible/module_utils/aos.py18
-rw-r--r--lib/ansible/module_utils/api.py25
-rw-r--r--lib/ansible/module_utils/avi.py4
-rw-r--r--lib/ansible/module_utils/azure_rm_common.py14
-rw-r--r--lib/ansible/module_utils/basic.py198
-rw-r--r--lib/ansible/module_utils/bigswitch_utils.py3
-rw-r--r--lib/ansible/module_utils/connection.py13
-rw-r--r--lib/ansible/module_utils/database.py20
-rw-r--r--lib/ansible/module_utils/docker_common.py15
-rw-r--r--lib/ansible/module_utils/ec2.py17
-rw-r--r--lib/ansible/module_utils/f5_utils.py20
-rw-r--r--lib/ansible/module_utils/fortios.py63
-rw-r--r--lib/ansible/module_utils/gcdns.py8
-rw-r--r--lib/ansible/module_utils/gce.py9
-rw-r--r--lib/ansible/module_utils/gcp.py5
-rw-r--r--lib/ansible/module_utils/infinibox.py6
-rw-r--r--lib/ansible/module_utils/json_utils.py1
-rw-r--r--lib/ansible/module_utils/junos.py21
-rw-r--r--lib/ansible/module_utils/known_hosts.py4
-rw-r--r--lib/ansible/module_utils/lxd.py4
-rw-r--r--lib/ansible/module_utils/mysql.py1
-rw-r--r--lib/ansible/module_utils/netcfg.py15
-rw-r--r--lib/ansible/module_utils/netcli.py14
-rw-r--r--lib/ansible/module_utils/netconf.py13
-rw-r--r--lib/ansible/module_utils/network.py7
-rw-r--r--lib/ansible/module_utils/network_common.py14
-rw-r--r--lib/ansible/module_utils/openstack.py21
-rw-r--r--lib/ansible/module_utils/openswitch.py9
-rw-r--r--lib/ansible/module_utils/ordnance.py1
-rw-r--r--lib/ansible/module_utils/ovirt.py10
-rw-r--r--lib/ansible/module_utils/postgres.py29
-rw-r--r--lib/ansible/module_utils/pycompat24.py2
-rw-r--r--lib/ansible/module_utils/redhat.py11
-rw-r--r--lib/ansible/module_utils/service.py21
-rw-r--r--lib/ansible/module_utils/shell.py7
-rw-r--r--lib/ansible/module_utils/splitter.py20
-rw-r--r--lib/ansible/module_utils/univention_umc.py18
-rw-r--r--lib/ansible/module_utils/urls.py72
-rw-r--r--lib/ansible/module_utils/vca.py26
-rw-r--r--lib/ansible/playbook/__init__.py2
-rw-r--r--lib/ansible/playbook/attribute.py1
-rw-r--r--lib/ansible/playbook/base.py52
-rw-r--r--lib/ansible/playbook/become.py14
-rw-r--r--lib/ansible/playbook/block.py34
-rw-r--r--lib/ansible/playbook/conditional.py8
-rw-r--r--lib/ansible/playbook/handler.py3
-rw-r--r--lib/ansible/playbook/handler_task_include.py6
-rw-r--r--lib/ansible/playbook/helpers.py33
-rw-r--r--lib/ansible/playbook/included_file.py9
-rw-r--r--lib/ansible/playbook/loop_control.py5
-rw-r--r--lib/ansible/playbook/play.py64
-rw-r--r--lib/ansible/playbook/play_context.py217
-rw-r--r--lib/ansible/playbook/playbook_include.py9
-rw-r--r--lib/ansible/playbook/role/__init__.py75
-rw-r--r--lib/ansible/playbook/role/definition.py16
-rw-r--r--lib/ansible/playbook/role/include.py3
-rw-r--r--lib/ansible/playbook/role/metadata.py8
-rw-r--r--lib/ansible/playbook/role/requirement.py4
-rw-r--r--lib/ansible/playbook/role_include.py11
-rw-r--r--lib/ansible/playbook/taggable.py8
-rw-r--r--lib/ansible/playbook/task.py56
-rw-r--r--lib/ansible/playbook/task_include.py1
-rw-r--r--lib/ansible/plugins/__init__.py55
-rw-r--r--lib/ansible/plugins/action/__init__.py86
-rw-r--r--lib/ansible/plugins/action/asa_config.py3
-rw-r--r--lib/ansible/plugins/action/asa_template.py1
-rw-r--r--lib/ansible/plugins/action/assemble.py14
-rw-r--r--lib/ansible/plugins/action/copy.py18
-rw-r--r--lib/ansible/plugins/action/eos_template.py1
-rw-r--r--lib/ansible/plugins/action/fetch.py14
-rw-r--r--lib/ansible/plugins/action/group_by.py4
-rw-r--r--lib/ansible/plugins/action/ios_template.py1
-rw-r--r--lib/ansible/plugins/action/iosxr_template.py2
-rw-r--r--lib/ansible/plugins/action/junos.py2
-rw-r--r--lib/ansible/plugins/action/junos_template.py1
-rw-r--r--lib/ansible/plugins/action/normal.py2
-rw-r--r--lib/ansible/plugins/action/nxos_template.py1
-rw-r--r--lib/ansible/plugins/action/ops_config.py3
-rw-r--r--lib/ansible/plugins/action/ops_template.py4
-rw-r--r--lib/ansible/plugins/action/package.py6
-rw-r--r--lib/ansible/plugins/action/patch.py4
-rw-r--r--lib/ansible/plugins/action/pause.py15
-rw-r--r--lib/ansible/plugins/action/script.py4
-rw-r--r--lib/ansible/plugins/action/service.py6
-rw-r--r--lib/ansible/plugins/action/set_stats.py2
-rw-r--r--lib/ansible/plugins/action/sros_config.py7
-rw-r--r--lib/ansible/plugins/action/synchronize.py7
-rw-r--r--lib/ansible/plugins/action/template.py11
-rw-r--r--lib/ansible/plugins/action/unarchive.py6
-rw-r--r--lib/ansible/plugins/cache/__init__.py26
-rw-r--r--lib/ansible/plugins/cache/base.py1
-rw-r--r--lib/ansible/plugins/cache/jsonfile.py1
-rw-r--r--lib/ansible/plugins/cache/memory.py1
-rw-r--r--lib/ansible/plugins/cache/pickle.py1
-rw-r--r--lib/ansible/plugins/cache/redis.py3
-rw-r--r--lib/ansible/plugins/cache/yaml.py1
-rw-r--r--lib/ansible/plugins/callback/__init__.py15
-rw-r--r--lib/ansible/plugins/callback/actionable.py2
-rw-r--r--lib/ansible/plugins/callback/context_demo.py1
-rw-r--r--lib/ansible/plugins/callback/default.py21
-rw-r--r--lib/ansible/plugins/callback/dense.py36
-rw-r--r--lib/ansible/plugins/callback/foreman.py18
-rw-r--r--lib/ansible/plugins/callback/hipchat.py6
-rw-r--r--lib/ansible/plugins/callback/jabber.py15
-rw-r--r--lib/ansible/plugins/callback/log_plays.py4
-rw-r--r--lib/ansible/plugins/callback/logentries.py12
-rw-r--r--lib/ansible/plugins/callback/logstash.py23
-rw-r--r--lib/ansible/plugins/callback/mail.py6
-rw-r--r--lib/ansible/plugins/callback/minimal.py6
-rw-r--r--lib/ansible/plugins/callback/oneline.py19
-rw-r--r--lib/ansible/plugins/callback/osx_say.py13
-rw-r--r--lib/ansible/plugins/callback/profile_tasks.py10
-rw-r--r--lib/ansible/plugins/callback/selective.py15
-rw-r--r--lib/ansible/plugins/callback/skippy.py1
-rw-r--r--lib/ansible/plugins/callback/slack.py1
-rw-r--r--lib/ansible/plugins/callback/syslog_json.py23
-rw-r--r--lib/ansible/plugins/connection/__init__.py4
-rw-r--r--lib/ansible/plugins/connection/accelerate.py14
-rw-r--r--lib/ansible/plugins/connection/chroot.py2
-rw-r--r--lib/ansible/plugins/connection/docker.py4
-rw-r--r--lib/ansible/plugins/connection/funcd.py1
-rw-r--r--lib/ansible/plugins/connection/iocage.py1
-rw-r--r--lib/ansible/plugins/connection/jail.py4
-rw-r--r--lib/ansible/plugins/connection/libvirt_lxc.py2
-rw-r--r--lib/ansible/plugins/connection/local.py2
-rw-r--r--lib/ansible/plugins/connection/lxc.py4
-rw-r--r--lib/ansible/plugins/connection/paramiko_ssh.py20
-rw-r--r--lib/ansible/plugins/connection/ssh.py18
-rw-r--r--lib/ansible/plugins/connection/winrm.py23
-rw-r--r--lib/ansible/plugins/connection/zone.py18
-rw-r--r--lib/ansible/plugins/filter/__init__.py2
-rw-r--r--lib/ansible/plugins/filter/core.py71
-rw-r--r--lib/ansible/plugins/filter/ipaddr.py92
-rw-r--r--lib/ansible/plugins/filter/json_query.py1
-rw-r--r--lib/ansible/plugins/filter/mathstuff.py34
-rw-r--r--lib/ansible/plugins/inventory/__init__.py26
-rw-r--r--lib/ansible/plugins/inventory/ini.py5
-rw-r--r--lib/ansible/plugins/inventory/script.py9
-rw-r--r--lib/ansible/plugins/inventory/yaml.py5
-rw-r--r--lib/ansible/plugins/lookup/__init__.py2
-rw-r--r--lib/ansible/plugins/lookup/cartesian.py2
-rw-r--r--lib/ansible/plugins/lookup/csvfile.py12
-rw-r--r--lib/ansible/plugins/lookup/dict.py1
-rw-r--r--lib/ansible/plugins/lookup/dig.py65
-rw-r--r--lib/ansible/plugins/lookup/dnstxt.py8
-rw-r--r--lib/ansible/plugins/lookup/env.py1
-rw-r--r--lib/ansible/plugins/lookup/etcd.py4
-rw-r--r--lib/ansible/plugins/lookup/filetree.py7
-rw-r--r--lib/ansible/plugins/lookup/first_found.py89
-rw-r--r--lib/ansible/plugins/lookup/flattened.py8
-rw-r--r--lib/ansible/plugins/lookup/hashi_vault.py7
-rw-r--r--lib/ansible/plugins/lookup/indexed_items.py2
-rw-r--r--lib/ansible/plugins/lookup/ini.py14
-rw-r--r--lib/ansible/plugins/lookup/inventory_hostnames.py9
-rw-r--r--lib/ansible/plugins/lookup/items.py2
-rw-r--r--lib/ansible/plugins/lookup/keyring.py10
-rw-r--r--lib/ansible/plugins/lookup/lines.py1
-rw-r--r--lib/ansible/plugins/lookup/list.py1
-rw-r--r--lib/ansible/plugins/lookup/mongodb.py15
-rw-r--r--lib/ansible/plugins/lookup/nested.py5
-rw-r--r--lib/ansible/plugins/lookup/password.py2
-rw-r--r--lib/ansible/plugins/lookup/passwordstore.py31
-rw-r--r--lib/ansible/plugins/lookup/pipe.py1
-rw-r--r--lib/ansible/plugins/lookup/random_choice.py4
-rw-r--r--lib/ansible/plugins/lookup/redis_kv.py8
-rw-r--r--lib/ansible/plugins/lookup/sequence.py4
-rw-r--r--lib/ansible/plugins/lookup/shelvefile.py2
-rw-r--r--lib/ansible/plugins/lookup/subelements.py6
-rw-r--r--lib/ansible/plugins/lookup/template.py2
-rw-r--r--lib/ansible/plugins/lookup/together.py1
-rw-r--r--lib/ansible/plugins/shell/__init__.py8
-rw-r--r--lib/ansible/plugins/shell/csh.py1
-rw-r--r--lib/ansible/plugins/shell/fish.py2
-rw-r--r--lib/ansible/plugins/shell/powershell.py16
-rw-r--r--lib/ansible/plugins/shell/sh.py2
-rw-r--r--lib/ansible/plugins/strategy/__init__.py79
-rw-r--r--lib/ansible/plugins/strategy/debug.py6
-rw-r--r--lib/ansible/plugins/strategy/free.py6
-rw-r--r--lib/ansible/plugins/strategy/linear.py10
-rw-r--r--lib/ansible/plugins/test/core.py37
-rw-r--r--lib/ansible/plugins/test/files.py17
-rw-r--r--lib/ansible/plugins/test/mathstuff.py4
-rw-r--r--lib/ansible/plugins/vars/__init__.py2
-rw-r--r--lib/ansible/release.py2
-rw-r--r--lib/ansible/template/__init__.py69
-rw-r--r--lib/ansible/template/safe_eval.py3
-rw-r--r--lib/ansible/template/template.py1
-rw-r--r--lib/ansible/template/vars.py4
-rw-r--r--lib/ansible/vars/manager.py34
205 files changed, 1856 insertions, 1668 deletions
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index 40beee3192..86fa8edd07 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -20,25 +20,26 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import getpass
import operator
import optparse
import os
+import subprocess
+import re
import sys
import time
import yaml
-import re
-import getpass
-import subprocess
+
from abc import ABCMeta, abstractmethod
import ansible
-from ansible.release import __version__
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.inventory.manager import InventoryManager
from ansible.module_utils.six import with_metaclass, string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.dataloader import DataLoader
+from ansible.release import __version__
from ansible.utils.path import unfrackpath
from ansible.utils.vars import load_extra_vars, load_options_vars
from ansible.vars.manager import VariableManager
@@ -79,7 +80,7 @@ class InvalidOptsParser(SortedOptParser):
add_help_option=False,
prog=parser.prog,
epilog=parser.epilog)
- self.version=parser.version
+ self.version = parser.version
def _process_long_opt(self, rargs, values):
try:
@@ -93,18 +94,19 @@ class InvalidOptsParser(SortedOptParser):
except optparse.BadOptionError:
pass
+
class CLI(with_metaclass(ABCMeta, object)):
''' code behind bin/ansible* programs '''
VALID_ACTIONS = []
_ITALIC = re.compile(r"I\(([^)]+)\)")
- _BOLD = re.compile(r"B\(([^)]+)\)")
+ _BOLD = re.compile(r"B\(([^)]+)\)")
_MODULE = re.compile(r"M\(([^)]+)\)")
- _URL = re.compile(r"U\(([^)]+)\)")
- _CONST = re.compile(r"C\(([^)]+)\)")
+ _URL = re.compile(r"U\(([^)]+)\)")
+ _CONST = re.compile(r"C\(([^)]+)\)")
- PAGER = 'less'
+ PAGER = 'less'
# -F (quit-if-one-screen) -R (allow raw ansi control chars)
# -S (chop long lines) -X (disable termcap init and de-init)
@@ -229,7 +231,7 @@ class CLI(with_metaclass(ABCMeta, object)):
def normalize_become_options(self):
''' this keeps backwards compatibility with sudo/su self.options '''
self.options.become_ask_pass = self.options.become_ask_pass or self.options.ask_sudo_pass or self.options.ask_su_pass or C.DEFAULT_BECOME_ASK_PASS
- self.options.become_user = self.options.become_user or self.options.sudo_user or self.options.su_user or C.DEFAULT_BECOME_USER
+ self.options.become_user = self.options.become_user or self.options.sudo_user or self.options.su_user or C.DEFAULT_BECOME_USER
if self.options.become:
pass
@@ -287,78 +289,79 @@ class CLI(with_metaclass(ABCMeta, object)):
@staticmethod
def base_parser(usage="", output_opts=False, runas_opts=False, meta_opts=False, runtask_opts=False, vault_opts=False, module_opts=False,
- async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False,
- runas_prompt_opts=False, desc=None):
+ async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False,
+ runas_prompt_opts=False, desc=None):
''' create an options parser for most ansible scripts '''
# base opts
parser = SortedOptParser(usage, version=CLI.version("%prog"), description=desc, epilog=epilog)
- parser.add_option('-v','--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
- help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
+ parser.add_option('-v', '--verbose', dest='verbosity', default=C.DEFAULT_VERBOSITY, action="count",
+ help="verbose mode (-vvv for more, -vvvv to enable connection debugging)")
if inventory_opts:
parser.add_option('-i', '--inventory', '--inventory-file', dest='inventory', action="append",
- help="specify inventory host path (default=[%s]) or comma separated host list. --inventory-file is deprecated" % C.DEFAULT_HOST_LIST)
+ help="specify inventory host path (default=[%s]) or comma separated host list. "
+ "--inventory-file is deprecated" % C.DEFAULT_HOST_LIST)
parser.add_option('--list-hosts', dest='listhosts', action='store_true',
- help='outputs a list of matching hosts; does not execute anything else')
+ help='outputs a list of matching hosts; does not execute anything else')
parser.add_option('-l', '--limit', default=C.DEFAULT_SUBSET, dest='subset',
- help='further limit selected hosts to an additional pattern')
+ help='further limit selected hosts to an additional pattern')
if module_opts:
parser.add_option('-M', '--module-path', dest='module_path', default=None,
- help="prepend path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
- action="callback", callback=CLI.expand_tilde, type=str)
+ help="prepend path(s) to module library (default=%s)" % C.DEFAULT_MODULE_PATH,
+ action="callback", callback=CLI.expand_tilde, type=str)
if runtask_opts:
parser.add_option('-e', '--extra-vars', dest="extra_vars", action="append",
- help="set additional variables as key=value or YAML/JSON", default=[])
+ help="set additional variables as key=value or YAML/JSON", default=[])
if fork_opts:
- parser.add_option('-f','--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
- help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
+ parser.add_option('-f', '--forks', dest='forks', default=C.DEFAULT_FORKS, type='int',
+ help="specify number of parallel processes to use (default=%s)" % C.DEFAULT_FORKS)
if vault_opts:
parser.add_option('--ask-vault-pass', default=C.DEFAULT_ASK_VAULT_PASS, dest='ask_vault_pass', action='store_true',
- help='ask for vault password')
+ help='ask for vault password')
parser.add_option('--vault-password-file', default=C.DEFAULT_VAULT_PASSWORD_FILE, dest='vault_password_file',
- help="vault password file", action="callback", callback=CLI.unfrack_path, type='string')
+ help="vault password file", action="callback", callback=CLI.unfrack_path, type='string')
parser.add_option('--new-vault-password-file', dest='new_vault_password_file',
- help="new vault password file for rekey", action="callback", callback=CLI.unfrack_path, type='string')
+ help="new vault password file for rekey", action="callback", callback=CLI.unfrack_path, type='string')
parser.add_option('--output', default=None, dest='output_file',
- help='output file name for encrypt or decrypt; use - for stdout',
- action="callback", callback=CLI.unfrack_path, type='string')
+ help='output file name for encrypt or decrypt; use - for stdout',
+ action="callback", callback=CLI.unfrack_path, type='string')
if subset_opts:
parser.add_option('-t', '--tags', dest='tags', default=[], action='append',
- help="only run plays and tasks tagged with these values")
+ help="only run plays and tasks tagged with these values")
parser.add_option('--skip-tags', dest='skip_tags', default=[], action='append',
- help="only run plays and tasks whose tags do not match these values")
+ help="only run plays and tasks whose tags do not match these values")
if output_opts:
parser.add_option('-o', '--one-line', dest='one_line', action='store_true',
- help='condense output')
+ help='condense output')
parser.add_option('-t', '--tree', dest='tree', default=None,
- help='log output to this directory')
+ help='log output to this directory')
if connect_opts:
connect_group = optparse.OptionGroup(parser, "Connection Options", "control as whom and how to connect to hosts")
connect_group.add_option('-k', '--ask-pass', default=C.DEFAULT_ASK_PASS, dest='ask_pass', action='store_true',
- help='ask for connection password')
- connect_group.add_option('--private-key','--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
- help='use this file to authenticate the connection', action="callback", callback=CLI.unfrack_path, type='string')
+ help='ask for connection password')
+ connect_group.add_option('--private-key', '--key-file', default=C.DEFAULT_PRIVATE_KEY_FILE, dest='private_key_file',
+ help='use this file to authenticate the connection', action="callback", callback=CLI.unfrack_path, type='string')
connect_group.add_option('-u', '--user', default=C.DEFAULT_REMOTE_USER, dest='remote_user',
- help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
+ help='connect as this user (default=%s)' % C.DEFAULT_REMOTE_USER)
connect_group.add_option('-c', '--connection', dest='connection', default=C.DEFAULT_TRANSPORT,
- help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
+ help="connection type to use (default=%s)" % C.DEFAULT_TRANSPORT)
connect_group.add_option('-T', '--timeout', default=C.DEFAULT_TIMEOUT, type='int', dest='timeout',
- help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
+ help="override the connection timeout in seconds (default=%s)" % C.DEFAULT_TIMEOUT)
connect_group.add_option('--ssh-common-args', default='', dest='ssh_common_args',
- help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
+ help="specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand)")
connect_group.add_option('--sftp-extra-args', default='', dest='sftp_extra_args',
- help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
+ help="specify extra arguments to pass to sftp only (e.g. -f, -l)")
connect_group.add_option('--scp-extra-args', default='', dest='scp_extra_args',
- help="specify extra arguments to pass to scp only (e.g. -l)")
+ help="specify extra arguments to pass to scp only (e.g. -l)")
connect_group.add_option('--ssh-extra-args', default='', dest='ssh_extra_args',
- help="specify extra arguments to pass to ssh only (e.g. -R)")
+ help="specify extra arguments to pass to ssh only (e.g. -R)")
parser.add_option_group(connect_group)
@@ -368,54 +371,55 @@ class CLI(with_metaclass(ABCMeta, object)):
runas_group = rg
# priv user defaults to root later on to enable detecting when this option was given here
runas_group.add_option("-s", "--sudo", default=C.DEFAULT_SUDO, action="store_true", dest='sudo',
- help="run operations with sudo (nopasswd) (deprecated, use become)")
+ help="run operations with sudo (nopasswd) (deprecated, use become)")
runas_group.add_option('-U', '--sudo-user', dest='sudo_user', default=None,
- help='desired sudo user (default=root) (deprecated, use become)')
+ help='desired sudo user (default=root) (deprecated, use become)')
runas_group.add_option('-S', '--su', default=C.DEFAULT_SU, action='store_true',
- help='run operations with su (deprecated, use become)')
+ help='run operations with su (deprecated, use become)')
runas_group.add_option('-R', '--su-user', default=None,
- help='run operations with su as this user (default=%s) (deprecated, use become)' % C.DEFAULT_SU_USER)
+ help='run operations with su as this user (default=%s) (deprecated, use become)' % C.DEFAULT_SU_USER)
# consolidated privilege escalation (become)
runas_group.add_option("-b", "--become", default=C.DEFAULT_BECOME, action="store_true", dest='become',
- help="run operations with become (does not imply password prompting)")
+ help="run operations with become (does not imply password prompting)")
runas_group.add_option('--become-method', dest='become_method', default=C.DEFAULT_BECOME_METHOD, type='choice', choices=C.BECOME_METHODS,
- help="privilege escalation method to use (default=%s), valid choices: [ %s ]" % (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
+ help="privilege escalation method to use (default=%s), valid choices: [ %s ]" %
+ (C.DEFAULT_BECOME_METHOD, ' | '.join(C.BECOME_METHODS)))
runas_group.add_option('--become-user', default=None, dest='become_user', type='string',
- help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
+ help='run operations as this user (default=%s)' % C.DEFAULT_BECOME_USER)
if runas_opts or runas_prompt_opts:
if not runas_group:
runas_group = rg
runas_group.add_option('--ask-sudo-pass', default=C.DEFAULT_ASK_SUDO_PASS, dest='ask_sudo_pass', action='store_true',
- help='ask for sudo password (deprecated, use become)')
+ help='ask for sudo password (deprecated, use become)')
runas_group.add_option('--ask-su-pass', default=C.DEFAULT_ASK_SU_PASS, dest='ask_su_pass', action='store_true',
- help='ask for su password (deprecated, use become)')
+ help='ask for su password (deprecated, use become)')
runas_group.add_option('-K', '--ask-become-pass', default=False, dest='become_ask_pass', action='store_true',
- help='ask for privilege escalation password')
+ help='ask for privilege escalation password')
if runas_group:
parser.add_option_group(runas_group)
if async_opts:
parser.add_option('-P', '--poll', default=C.DEFAULT_POLL_INTERVAL, type='int', dest='poll_interval',
- help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
+ help="set the poll interval if using -B (default=%s)" % C.DEFAULT_POLL_INTERVAL)
parser.add_option('-B', '--background', dest='seconds', type='int', default=0,
- help='run asynchronously, failing after X seconds (default=N/A)')
+ help='run asynchronously, failing after X seconds (default=N/A)')
if check_opts:
parser.add_option("-C", "--check", default=False, dest='check', action='store_true',
- help="don't make any changes; instead, try to predict some of the changes that may occur")
+ help="don't make any changes; instead, try to predict some of the changes that may occur")
parser.add_option('--syntax-check', dest='syntax', action='store_true',
- help="perform a syntax check on the playbook, but do not execute it")
+ help="perform a syntax check on the playbook, but do not execute it")
parser.add_option("-D", "--diff", default=False, dest='diff', action='store_true',
- help="when changing (small) files and templates, show the differences in those files; works great with --check")
+ help="when changing (small) files and templates, show the differences in those files; works great with --check")
if meta_opts:
parser.add_option('--force-handlers', default=C.DEFAULT_FORCE_HANDLERS, dest='force_handlers', action='store_true',
- help="run handlers even if a task fails")
+ help="run handlers even if a task fails")
parser.add_option('--flush-cache', dest='flush_cache', action='store_true',
- help="clear the fact cache")
+ help="clear the fact cache")
return parser
@@ -491,7 +495,7 @@ class CLI(with_metaclass(ABCMeta, object)):
else:
# set default if it exists
if os.path.exists(C.DEFAULT_HOST_LIST):
- self.options.inventory = [ C.DEFAULT_HOST_LIST ]
+ self.options.inventory = [C.DEFAULT_HOST_LIST]
@staticmethod
def version(prog):
@@ -531,11 +535,11 @@ class CLI(with_metaclass(ABCMeta, object)):
if len(ansible_versions) < 3:
for counter in range(len(ansible_versions), 3):
ansible_versions.append(0)
- return {'string': ansible_version_string.strip(),
- 'full': ansible_version,
- 'major': ansible_versions[0],
- 'minor': ansible_versions[1],
- 'revision': ansible_versions[2]}
+ return {'string': ansible_version_string.strip(),
+ 'full': ansible_version,
+ 'major': ansible_versions[0],
+ 'minor': ansible_versions[1],
+ 'revision': ansible_versions[2]}
@staticmethod
def _git_repo_info(repo_path):
@@ -576,8 +580,7 @@ class CLI(with_metaclass(ABCMeta, object)):
offset = time.timezone
else:
offset = time.altzone
- result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit,
- time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
+ result = "({0} {1}) last updated {2} (GMT {3:+04d})".format(branch, commit, time.strftime("%Y/%m/%d %H:%M:%S", date), int(offset / -36))
else:
result = ''
return result
@@ -669,7 +672,7 @@ class CLI(with_metaclass(ABCMeta, object)):
else:
try:
f = open(this_path, "rb")
- vault_pass=f.read().strip()
+ vault_pass = f.read().strip()
f.close()
except (OSError, IOError) as e:
raise AnsibleError("Could not read vault password file %s: %s" % (this_path, e))
@@ -705,4 +708,3 @@ class CLI(with_metaclass(ABCMeta, object)):
variable_manager.options_vars = load_options_vars(options, CLI.version_info(gitinfo=False))
return loader, inventory, variable_manager
-
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index f1e94b6c3e..9795b221e7 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -66,10 +66,10 @@ class AdHocCLI(CLI):
# options unique to ansible ad-hoc
self.parser.add_option('-a', '--args', dest='module_args',
- help="module arguments", default=C.DEFAULT_MODULE_ARGS)
+ help="module arguments", default=C.DEFAULT_MODULE_ARGS)
self.parser.add_option('-m', '--module-name', dest='module_name',
- help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
- default=C.DEFAULT_MODULE_NAME)
+ help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
+ default=C.DEFAULT_MODULE_NAME)
super(AdHocCLI, self).parse()
@@ -84,10 +84,10 @@ class AdHocCLI(CLI):
def _play_ds(self, pattern, async, poll):
check_raw = self.options.module_name in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
return dict(
- name = "Ansible Ad-Hoc",
- hosts = pattern,
- gather_facts = 'no',
- tasks = [ dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll) ]
+ name="Ansible Ad-Hoc",
+ hosts=pattern,
+ gather_facts='no',
+ tasks=[dict(action=dict(module=self.options.module_name, args=parse_kv(self.options.module_args, check_raw=check_raw)), async=async, poll=poll)]
)
def run(self):
@@ -98,12 +98,12 @@ class AdHocCLI(CLI):
# only thing left should be host pattern
pattern = to_text(self.args[0], errors='surrogate_or_strict')
- sshpass = None
+ sshpass = None
becomepass = None
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
- passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
+ passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
loader, inventory, variable_manager = self._play_prereqs(self.options)
@@ -155,11 +155,11 @@ class AdHocCLI(CLI):
else:
cb = 'minimal'
- run_tree=False
+ run_tree = False
if self.options.tree:
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
C.TREE_DIR = self.options.tree
- run_tree=True
+ run_tree = True
# now create a task queue manager to execute the play
self._tqm = None
diff --git a/lib/ansible/cli/console.py b/lib/ansible/cli/console.py
index 2dbcb4be0c..28a89a5a7b 100644
--- a/lib/ansible/cli/console.py
+++ b/lib/ansible/cli/console.py
@@ -58,8 +58,8 @@ class ConsoleCLI(CLI, cmd.Cmd):
''' a REPL that allows for running ad-hoc tasks against a chosen inventory (based on dominis' ansible-shell).'''
modules = []
- ARGUMENTS = { 'host-pattern': 'A name of a group in the inventory, a shell-like glob '
- 'selecting hosts in inventory or any combination of the two separated by commas.', }
+ ARGUMENTS = {'host-pattern': 'A name of a group in the inventory, a shell-like glob '
+ 'selecting hosts in inventory or any combination of the two separated by commas.'}
def __init__(self, args):
@@ -93,7 +93,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
# options unique to shell
self.parser.add_option('--step', dest='step', action='store_true',
- help="one-step-at-a-time: confirm each task before running")
+ help="one-step-at-a-time: confirm each task before running")
self.parser.set_defaults(cwd='*')
@@ -150,7 +150,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
elif module in C.IGNORE_FILES:
continue
elif module.startswith('_'):
- fullpath = '/'.join([path,module])
+ fullpath = '/'.join([path, module])
if os.path.islink(fullpath): # avoids aliases
continue
module = module.replace('_', '', 1)
@@ -184,10 +184,10 @@ class ConsoleCLI(CLI, cmd.Cmd):
try:
check_raw = self.options.module_name in ('command', 'shell', 'script', 'raw')
play_ds = dict(
- name = "Ansible Shell",
- hosts = self.options.cwd,
- gather_facts = 'no',
- tasks = [ dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
+ name="Ansible Shell",
+ hosts=self.options.cwd,
+ gather_facts='no',
+ tasks=[dict(action=dict(module=module, args=parse_kv(module_args, check_raw=check_raw)))]
)
play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
except Exception as e:
@@ -368,7 +368,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
- if self.options.cwd in ('all','*','\\'):
+ if self.options.cwd in ('all', '*', '\\'):
completions = self.hosts + self.groups
else:
completions = [x.name for x in self.inventory.list_hosts(self.options.cwd)]
@@ -392,7 +392,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
super(ConsoleCLI, self).run()
- sshpass = None
+ sshpass = None
becomepass = None
# hosts
@@ -410,7 +410,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
- self.passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
+ self.passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
self.loader, self.inventory, self.variable_manager = self._play_prereqs(self.options)
diff --git a/lib/ansible/cli/doc.py b/lib/ansible/cli/doc.py
index 1bf1747bfe..950c20af76 100644
--- a/lib/ansible/cli/doc.py
+++ b/lib/ansible/cli/doc.py
@@ -21,8 +21,8 @@ __metaclass__ = type
import datetime
import os
-import traceback
import textwrap
+import traceback
import yaml
from ansible import constants as C
@@ -60,13 +60,13 @@ class DocCLI(CLI):
)
self.parser.add_option("-l", "--list", action="store_true", default=False, dest='list_dir',
- help='List available plugins')
+ help='List available plugins')
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
- help='Show playbook snippet for specified plugin(s)')
+ help='Show playbook snippet for specified plugin(s)')
self.parser.add_option("-a", "--all", action="store_true", default=False, dest='all_plugins',
- help='Show documentation for all plugins')
+ help='Show documentation for all plugins')
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
- help='Choose which plugin type', choices=['module','cache', 'connection', 'callback', 'lookup', 'strategy', 'inventory'])
+ help='Choose which plugin type', choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy'])
super(DocCLI, self).parse()
@@ -90,7 +90,7 @@ class DocCLI(CLI):
elif plugin_type == 'strategy':
loader = strategy_loader
elif plugin_type == 'inventory':
- loader = PluginLoader( 'InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
+ loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
else:
loader = module_loader
@@ -141,9 +141,9 @@ class DocCLI(CLI):
if doc is not None:
# assign from other sections
- doc['plainexamples'] = plainexamples
- doc['returndocs'] = returndocs
- doc['metadata'] = metadata
+ doc['plainexamples'] = plainexamples
+ doc['returndocs'] = returndocs
+ doc['metadata'] = metadata
# generate extra data
if plugin_type == 'module':
@@ -152,9 +152,9 @@ class DocCLI(CLI):
doc['action'] = True
else:
doc['action'] = False
- doc['filename'] = filename
- doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
- doc['docuri'] = doc[plugin_type].replace('_', '-')
+ doc['filename'] = filename
+ doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d')
+ doc['docuri'] = doc[plugin_type].replace('_', '-')
if self.options.show_snippet and plugin_type == 'module':
text += self.get_snippet_text(doc)
@@ -238,7 +238,7 @@ class DocCLI(CLI):
if len(desc) > linelimit:
desc = desc[:linelimit] + '...'
- if plugin.startswith('_'): # Handle deprecated
+ if plugin.startswith('_'): # Handle deprecated
deprecated.append("%-*s %-*.*s" % (displace, plugin[1:], linelimit, len(desc), desc))
else:
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc))
@@ -309,7 +309,7 @@ class DocCLI(CLI):
text.append(textwrap.fill(CLI.tty_ify(opt['description']), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
del opt['description']
- aliases= ''
+ aliases = ''
if 'aliases' in opt:
choices = "(Aliases: " + ", ".join(str(i) for i in opt['aliases']) + ")"
del opt['aliases']
@@ -319,7 +319,7 @@ class DocCLI(CLI):
del opt['choices']
default = ''
if 'default' in opt or not required:
- default = "[Default: " + str(opt.pop('default', '(null)')) + "]"
+ default = "[Default: " + str(opt.pop('default', '(null)')) + "]"
text.append(textwrap.fill(CLI.tty_ify(aliases + choices + default), limit, initial_indent=opt_indent, subsequent_indent=opt_indent))
if 'options' in opt:
@@ -361,7 +361,7 @@ class DocCLI(CLI):
display.vv("Skipping %s key cuase we don't know how to handle eet" % k)
def get_man_text(self, doc):
- opt_indent=" "
+ opt_indent = " "
text = []
text.append("> %s (%s)\n" % (doc[self.options.type].upper(), doc['filename']))
pad = display.columns * 0.20
@@ -388,11 +388,11 @@ class DocCLI(CLI):
if 'notes' in doc and doc['notes'] and len(doc['notes']) > 0:
text.append("Notes:")
for note in doc['notes']:
- text.append(textwrap.fill(CLI.tty_ify(note), limit-6, initial_indent=" * ", subsequent_indent=opt_indent))
+ text.append(textwrap.fill(CLI.tty_ify(note), limit - 6, initial_indent=" * ", subsequent_indent=opt_indent))
if 'requirements' in doc and doc['requirements'] is not None and len(doc['requirements']) > 0:
req = ", ".join(doc['requirements'])
- text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit-16, initial_indent=" ", subsequent_indent=opt_indent))
+ text.append("Requirements:%s\n" % textwrap.fill(CLI.tty_ify(req), limit - 16, initial_indent=" ", subsequent_indent=opt_indent))
if 'examples' in doc and len(doc['examples']) > 0:
text.append("Example%s:\n" % ('' if len(doc['examples']) < 2 else 's'))
diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
index a89435a011..a2e6b252a6 100644
--- a/lib/ansible/cli/galaxy.py
+++ b/lib/ansible/cli/galaxy.py
@@ -23,11 +23,11 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
-import sys
-import yaml
-import time
import re
import shutil
+import sys
+import time
+import yaml
from jinja2 import Environment, FileSystemLoader
@@ -36,11 +36,11 @@ from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.galaxy import Galaxy
from ansible.galaxy.api import GalaxyAPI
-from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.login import GalaxyLogin
+from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.token import GalaxyToken
-from ansible.playbook.role.requirement import RoleRequirement
from ansible.module_utils._text import to_text
+from ansible.playbook.role.requirement import RoleRequirement
try:
from __main__ import display
@@ -52,7 +52,7 @@ except ImportError:
class GalaxyCLI(CLI):
'''command to manage Ansible roles in shared repostories, the default of which is Ansible Galaxy *https://galaxy.ansible.com*.'''
- SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" )
+ SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url")
VALID_ACTIONS = ("delete", "import", "info", "init", "install", "list", "login", "remove", "search", "setup")
def __init__(self, args):
@@ -64,7 +64,6 @@ class GalaxyCLI(CLI):
super(GalaxyCLI, self).set_action()
-
# specific to actions
if self.action == "delete":
self.parser.set_usage("usage: %prog delete [options] github_user github_repo")
@@ -113,9 +112,9 @@ class GalaxyCLI(CLI):
# options that apply to more than one action
if self.action in ['init', 'info']:
- self.parser.add_option( '--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
+ self.parser.add_option('--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
- if self.action not in ("delete","import","init","login","setup"):
+ if self.action not in ("delete", "import", "init", "login", "setup"):
# NOTE: while the option type=str, the default is a list, and the
# callback will set the value to a list.
self.parser.add_option('-p', '--roles-path', dest='roles_path', action="callback", callback=CLI.expand_paths, type=str,
@@ -123,15 +122,15 @@ class GalaxyCLI(CLI):
help='The path to the directory containing your roles. The default is the roles_path configured in your ansible.cfg '
'file (/etc/ansible/roles if not configured)')
- if self.action in ("init","install"):
+ if self.action in ("init", "install"):
self.parser.add_option('-f', '--force', dest='force', action='store_true', default=False, help='Force overwriting an existing role')
def parse(self):
''' create an options parser for bin/ansible '''
self.parser = CLI.base_parser(
- usage = "usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS),
- epilog = "\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
+ usage="usage: %%prog [%s] [--help] [options] ..." % "|".join(self.VALID_ACTIONS),
+ epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
)
# common
@@ -190,8 +189,8 @@ class GalaxyCLI(CLI):
creates the skeleton framework of a role that complies with the galaxy metadata format.
"""
- init_path = self.get_opt('init_path', './')
- force = self.get_opt('force', False)
+ init_path = self.get_opt('init_path', './')
+ force = self.get_opt('force', False)
role_skeleton = self.get_opt('role_skeleton', C.GALAXY_ROLE_SKELETON)
role_name = self.args.pop(0).strip() if self.args else None
@@ -203,9 +202,9 @@ class GalaxyCLI(CLI):
raise AnsibleError("- the path %s already exists, but is a file - aborting" % role_path)
elif not force:
raise AnsibleError("- the directory %s already exists."
- "you can use --force to re-initialize this directory,\n"
- "however it will reset any main.yml files that may have\n"
- "been modified there already." % role_path)
+ "you can use --force to re-initialize this directory,\n"
+ "however it will reset any main.yml files that may have\n"
+ "been modified there already." % role_path)
inject_data = dict(
role_name=role_name,
@@ -292,12 +291,12 @@ class GalaxyCLI(CLI):
role_info.update(gr.metadata)
req = RoleRequirement()
- role_spec= req.role_yaml_parse({'role': role})
+ role_spec = req.role_yaml_parse({'role': role})
if role_spec:
role_info.update(role_spec)
data = self._display_role_info(role_info)
- ### FIXME: This is broken in both 1.9 and 2.0 as
+ # FIXME: This is broken in both 1.9 and 2.0 as
# _display_role_info() always returns something
if not data:
data = u"\n- the role %s was not found" % role
@@ -310,7 +309,7 @@ class GalaxyCLI(CLI):
can be a name (which will be downloaded via the galaxy API and github), or it can be a local .tar.gz file.
"""
- role_file = self.get_opt("role_file", None)
+ role_file = self.get_opt("role_file", None)
if len(self.args) == 0 and role_file is None:
# the user needs to specify one of either --role-file
@@ -321,8 +320,8 @@ class GalaxyCLI(CLI):
# the role name on the command line
raise AnsibleOptionsError("- please specify a user/role name, or a roles file, but not both")
- no_deps = self.get_opt("no_deps", False)
- force = self.get_opt('force', False)
+ no_deps = self.get_opt("no_deps", False)
+ force = self.get_opt('force', False)
roles_left = []
if role_file:
@@ -511,7 +510,7 @@ class GalaxyCLI(CLI):
raise AnsibleError("Invalid query. At least one search term, platform, galaxy tag or author must be provided.")
response = self.api.search_roles(search, platforms=self.options.platforms,
- tags=self.options.galaxy_tags, author=self.options.author, page_size=page_size)
+ tags=self.options.galaxy_tags, author=self.options.author, page_size=page_size)
if response['count'] == 0:
display.display("No roles match your search.", color=C.COLOR_ERROR)
@@ -568,9 +567,9 @@ class GalaxyCLI(CLI):
""" used to import a role into Ansible Galaxy """
colors = {
- 'INFO': 'normal',
+ 'INFO': 'normal',
'WARNING': C.COLOR_WARN,
- 'ERROR': C.COLOR_ERROR,
+ 'ERROR': C.COLOR_ERROR,
'SUCCESS': C.COLOR_OK,
'FAILED': C.COLOR_ERROR,
}
@@ -589,19 +588,19 @@ class GalaxyCLI(CLI):
if len(task) > 1:
# found multiple roles associated with github_user/github_repo
- display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user,github_repo),
- color='yellow')
+ display.display("WARNING: More than one Galaxy role associated with Github repo %s/%s." % (github_user, github_repo),
+ color='yellow')
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
for t in task:
- display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
+ display.display('%s.%s' % (t['summary_fields']['role']['namespace'], t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
- color=C.COLOR_CHANGED)
+ color=C.COLOR_CHANGED)
return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
if not self.options.wait:
display.display("Role name: %s" % task[0]['summary_fields']['role']['name'])
- display.display("Repo: %s/%s" % (task[0]['github_user'],task[0]['github_repo']))
+ display.display("Repo: %s/%s" % (task[0]['github_user'], task[0]['github_repo']))
if self.options.check_status or self.options.wait:
# Get the status of the import
@@ -634,7 +633,7 @@ class GalaxyCLI(CLI):
display.display("---------- ---------- ----------", color=C.COLOR_OK)
for secret in secrets:
display.display("%-10s %-10s %s/%s" % (secret['id'], secret['source'], secret['github_user'],
- secret['github_repo']),color=C.COLOR_OK)
+ secret['github_repo']), color=C.COLOR_OK)
return 0
if self.options.remove_id:
@@ -672,7 +671,7 @@ class GalaxyCLI(CLI):
display.display("ID User Name")
display.display("------ --------------- ----------")
for role in resp['deleted_roles']:
- display.display("%-8s %-15s %s" % (role.id,role.namespace,role.name))
+ display.display("%-8s %-15s %s" % (role.id, role.namespace, role.name))
display.display(resp['status'])
@@ -692,4 +691,3 @@ class GalaxyCLI(CLI):
if os.pathsep in data:
data = data.split(os.pathsep)[0]
return data
-
diff --git a/lib/ansible/cli/playbook.py b/lib/ansible/cli/playbook.py
index d7ea029586..d6482c910b 100644
--- a/lib/ansible/cli/playbook.py
+++ b/lib/ansible/cli/playbook.py
@@ -36,18 +36,15 @@ except ImportError:
display = Display()
-#---------------------------------------------------------------------------------------------------
-
class PlaybookCLI(CLI):
''' the tool to run *Ansible playbooks*, which are a configuration and multinode deployment system.
See the project home page (https://docs.ansible.com) for more information. '''
-
def parse(self):
# create parser for CLI options
parser = CLI.base_parser(
- usage = "%prog [options] playbook.yml [playbook2 ...]",
+ usage="%prog [options] playbook.yml [playbook2 ...]",
connect_opts=True,
meta_opts=True,
runas_opts=True,
@@ -63,13 +60,13 @@ class PlaybookCLI(CLI):
# ansible playbook specific opts
parser.add_option('--list-tasks', dest='listtasks', action='store_true',
- help="list all tasks that would be executed")
+ help="list all tasks that would be executed")
parser.add_option('--list-tags', dest='listtags', action='store_true',
- help="list all available tags")
+ help="list all available tags")
parser.add_option('--step', dest='step', action='store_true',
- help="one-step-at-a-time: confirm each task before running")
+ help="one-step-at-a-time: confirm each task before running")
parser.add_option('--start-at-task', dest='start_at_task',
- help="start the playbook at the task matching this name")
+ help="start the playbook at the task matching this name")
self.parser = parser
super(PlaybookCLI, self).parse()
@@ -86,8 +83,8 @@ class PlaybookCLI(CLI):
# Note: slightly wrong, this is written so that implicit localhost
# Manage passwords
- sshpass = None
- becomepass = None
+ sshpass = None
+ becomepass = None
passwords = {}
# initial error check, to make sure all specified playbooks are accessible
@@ -102,7 +99,7 @@ class PlaybookCLI(CLI):
if not self.options.listhosts and not self.options.listtasks and not self.options.listtags and not self.options.syntax:
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
- passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
+ passwords = {'conn_pass': sshpass, 'become_pass': becomepass}
loader, inventory, variable_manager = self._play_prereqs(self.options)
@@ -122,7 +119,6 @@ class PlaybookCLI(CLI):
# Invalid limit
raise AnsibleError("Specified --limit does not match any hosts")
-
# flush fact cache if requested
if self.options.flush_cache:
self._flush_cache(inventory, variable_manager)
diff --git a/lib/ansible/cli/pull.py b/lib/ansible/cli/pull.py
index e6347413a4..98f3a9ebfc 100644
--- a/lib/ansible/cli/pull.py
+++ b/lib/ansible/cli/pull.py
@@ -28,8 +28,8 @@ import socket
import sys
import time
-from ansible.errors import AnsibleOptionsError
from ansible.cli import CLI
+from ansible.errors import AnsibleOptionsError
from ansible.module_utils._text import to_native
from ansible.plugins import module_loader
from ansible.utils.cmd_functions import run_cmd
@@ -59,10 +59,10 @@ class PullCLI(CLI):
DEFAULT_PLAYBOOK = 'local.yml'
PLAYBOOK_ERRORS = {
1: 'File does not exist',
- 2: 'File is not readable'
+ 2: 'File is not readable',
}
SUPPORTED_REPO_MODULES = ['git']
- ARGUMENTS = { 'playbook.yml': 'The name of one the YAML format files to run as an Ansible playbook.'
+ ARGUMENTS = {'playbook.yml': 'The name of one the YAML format files to run as an Ansible playbook.'
'This can be a relative path within the checkout. By default, Ansible will'
"look for a playbook based on the host's fully-qualified domain name,"
'on the host hostname and finally a playbook named *local.yml*.', }
@@ -85,27 +85,28 @@ class PullCLI(CLI):
# options unique to pull
self.parser.add_option('--purge', default=False, action='store_true', help='purge checkout after playbook run')
self.parser.add_option('-o', '--only-if-changed', dest='ifchanged', default=False, action='store_true',
- help='only run the playbook if the repository has been updated')
+ help='only run the playbook if the repository has been updated')
self.parser.add_option('-s', '--sleep', dest='sleep', default=None,
- help='sleep for random interval (between 0 and n number of seconds) before starting. This is a useful way to disperse git requests')
+ help='sleep for random interval (between 0 and n number of seconds) before starting. '
+ 'This is a useful way to disperse git requests')
self.parser.add_option('-f', '--force', dest='force', default=False, action='store_true',
- help='run the playbook even if the repository could not be updated')
+ help='run the playbook even if the repository could not be updated')
self.parser.add_option('-d', '--directory', dest='dest', default=None, help='directory to checkout repository to')
self.parser.add_option('-U', '--url', dest='url', default=None, help='URL of the playbook repository')
self.parser.add_option('--full', dest='fullclone', action='store_true', help='Do a full clone, instead of a shallow one.')
self.parser.add_option('-C', '--checkout', dest='checkout',
- help='branch/tag/commit to checkout. Defaults to behavior of repository module.')
+ help='branch/tag/commit to checkout. Defaults to behavior of repository module.')
self.parser.add_option('--accept-host-key', default=False, dest='accept_host_key', action='store_true',
- help='adds the hostkey for the repo url if not already added')
+ help='adds the hostkey for the repo url if not already added')
self.parser.add_option('-m', '--module-name', dest='module_name', default=self.DEFAULT_REPO_TYPE,
- help='Repository module name, which ansible will use to check out the repo. Default is %s.' % self.DEFAULT_REPO_TYPE)
+ help='Repository module name, which ansible will use to check out the repo. Default is %s.' % self.DEFAULT_REPO_TYPE)
self.parser.add_option('--verify-commit', dest='verify', default=False, action='store_true',
- help='verify GPG signature of checked out commit, if it fails abort running the playbook.'
- ' This needs the corresponding VCS module to support such an operation')
+ help='verify GPG signature of checked out commit, if it fails abort running the playbook. '
+ 'This needs the corresponding VCS module to support such an operation')
self.parser.add_option('--clean', dest='clean', default=False, action='store_true',
- help='modified files in the working repository will be discarded')
+ help='modified files in the working repository will be discarded')
self.parser.add_option('--track-subs', dest='tracksubs', default=False, action='store_true',
- help='submodules will track the latest changes. This is equivalent to specifying the --remote flag to git submodule update')
+ help='submodules will track the latest changes. This is equivalent to specifying the --remote flag to git submodule update')
# for pull we don't want a default
self.parser.set_defaults(inventory=None)
@@ -120,7 +121,7 @@ class PullCLI(CLI):
if self.options.sleep:
try:
- secs = random.randint(0,int(self.options.sleep))
+ secs = random.randint(0, int(self.options.sleep))
self.options.sleep = secs
except ValueError:
raise AnsibleOptionsError("%s is not a number." % self.options.sleep)
@@ -151,7 +152,7 @@ class PullCLI(CLI):
limit_opts = 'localhost,%s,127.0.0.1' % ','.join(set([host, node, host.split('.')[0], node.split('.')[0]]))
base_opts = '-c local '
if self.options.verbosity > 0:
- base_opts += ' -%s' % ''.join([ "v" for x in range(0, self.options.verbosity) ])
+ base_opts += ' -%s' % ''.join(["v" for x in range(0, self.options.verbosity)])
# Attempt to use the inventory passed in as an argument
# It might not yet have been downloaded so use localhost as default
@@ -165,7 +166,7 @@ class PullCLI(CLI):
else:
inv_opts = "-i 'localhost,'"
- #FIXME: enable more repo modules hg/svn?
+ # FIXME: enable more repo modules hg/svn?
if self.options.module_name == 'git':
repo_opts = "name=%s dest=%s" % (self.options.url, self.options.dest)
if self.options.checkout:
diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py
index a6ff8315fd..bda1fcc9e7 100644
--- a/lib/ansible/cli/vault.py
+++ b/lib/ansible/cli/vault.py
@@ -22,11 +22,11 @@ __metaclass__ = type
import os
import sys
+from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
+from ansible.module_utils._text import to_text, to_bytes
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import VaultEditor
-from ansible.cli import CLI
-from ansible.module_utils._text import to_text, to_bytes
try:
from __main__ import display
@@ -95,9 +95,9 @@ class VaultCLI(CLI):
self.parser = CLI.base_parser(
vault_opts=True,
- usage = "usage: %%prog [%s] [options] [vaultfile.yml]" % "|".join(self.VALID_ACTIONS),
- desc = "encryption/decryption utility for Ansbile data files",
- epilog = "\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
+ usage="usage: %%prog [%s] [options] [vaultfile.yml]" % "|".join(self.VALID_ACTIONS),
+ desc="encryption/decryption utility for Ansbile data files",
+ epilog="\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
)
self.set_action()
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index 31d4343afd..666d042cf4 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -30,7 +30,7 @@ from ansible.module_utils._text import to_text
from ansible.parsing.quoting import unquote
from ansible.utils.path import makedirs_safe
-BOOL_TRUE = frozenset( [ "true", "t", "y", "1", "yes", "on" ] )
+BOOL_TRUE = frozenset(["true", "t", "y", "1", "yes", "on"])
def mk_boolean(value):
@@ -173,48 +173,48 @@ p, CONFIG_FILE = load_config_file()
# non configurable but used as defaults
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
# the default whitelist for cow stencils
-DEFAULT_COW_WHITELIST = [ 'bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant',
- 'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep',
- 'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder',
- 'vader-koala', 'vader', 'www', ]
+DEFAULT_COW_WHITELIST = ['bud-frogs', 'bunny', 'cheese', 'daemon', 'default', 'dragon', 'elephant-in-snake', 'elephant',
+ 'eyes', 'hellokitty', 'kitty', 'luke-koala', 'meow', 'milk', 'moofasa', 'moose', 'ren', 'sheep',
+ 'small', 'stegosaurus', 'stimpy', 'supermilker', 'three-eyes', 'turkey', 'turtle', 'tux', 'udder',
+ 'vader-koala', 'vader', 'www']
# sections in config file
-DEFAULTS='defaults'
+DEFAULTS = 'defaults'
-#### DEPRECATED VARS ### # FIXME: add deprecation warning when these get set
-#none left now
+# DEPRECATED VARS # FIXME: add deprecation warning when these get set
+# none left now
-#### DEPRECATED FEATURE TOGGLES: these will eventually be removed as it becomes the standard ####
+# DEPRECATED FEATURE TOGGLES: these will eventually be removed as it becomes the standard
# If --tags or --skip-tags is given multiple times on the CLI and this is True, merge the lists of tags together.
# If False, let the last argument overwrite any previous ones.
# Behaviour is overwrite through 2.2. 2.3 overwrites but prints deprecation. 2.4 the default is to merge.
-MERGE_MULTIPLE_CLI_TAGS = get_config(p, DEFAULTS, 'merge_multiple_cli_tags', 'ANSIBLE_MERGE_MULTIPLE_CLI_TAGS', True, value_type='boolean')
+MERGE_MULTIPLE_CLI_TAGS = get_config(p, DEFAULTS, 'merge_multiple_cli_tags', 'ANSIBLE_MERGE_MULTIPLE_CLI_TAGS', True, value_type='boolean')
# Controls which 'precedence path' to take, remove when decide on which!
-SOURCE_OVER_GROUPS = get_config(p, 'vars', 'source_over_groups', 'ANSIBLE_SOURCE_OVER_GROUPS', True, value_type='boolean')
-
-#### GENERALLY CONFIGURABLE THINGS ####
-DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, value_type='boolean')
-DEFAULT_VERBOSITY = get_config(p, DEFAULTS, 'verbosity', 'ANSIBLE_VERBOSITY', 0, value_type='integer')
-DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH',
- '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles', value_type='pathlist', expand_relative_paths=True)
-DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '~/.ansible/tmp')
-DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '~/.ansible/tmp', value_type='tmppath')
-DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
-DEFAULT_FACT_PATH = get_config(p, DEFAULTS, 'fact_path', 'ANSIBLE_FACT_PATH', None, value_type='path')
-DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, value_type='integer')
-DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
-DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', os.getenv('LANG', 'en_US.UTF-8'))
-DEFAULT_MODULE_SET_LOCALE = get_config(p, DEFAULTS, 'module_set_locale','ANSIBLE_MODULE_SET_LOCALE',False, value_type='boolean')
-DEFAULT_MODULE_COMPRESSION= get_config(p, DEFAULTS, 'module_compression', None, 'ZIP_DEFLATED')
-DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, value_type='integer')
-DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, value_type='integer')
-DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', None)
-DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, value_type='boolean')
-DEFAULT_PRIVATE_KEY_FILE = get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None, value_type='path')
-DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, value_type='integer')
-DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, value_type='boolean')
+SOURCE_OVER_GROUPS = get_config(p, 'vars', 'source_over_groups', 'ANSIBLE_SOURCE_OVER_GROUPS', True, value_type='boolean')
+
+# GENERALLY CONFIGURABLE THINGS ####
+DEFAULT_DEBUG = get_config(p, DEFAULTS, 'debug', 'ANSIBLE_DEBUG', False, value_type='boolean')
+DEFAULT_VERBOSITY = get_config(p, DEFAULTS, 'verbosity', 'ANSIBLE_VERBOSITY', 0, value_type='integer')
+DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH',
+ '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles', value_type='pathlist', expand_relative_paths=True)
+DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '~/.ansible/tmp')
+DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '~/.ansible/tmp', value_type='tmppath')
+DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
+DEFAULT_FACT_PATH = get_config(p, DEFAULTS, 'fact_path', 'ANSIBLE_FACT_PATH', None, value_type='path')
+DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, value_type='integer')
+DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
+DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', os.getenv('LANG', 'en_US.UTF-8'))
+DEFAULT_MODULE_SET_LOCALE = get_config(p, DEFAULTS, 'module_set_locale', 'ANSIBLE_MODULE_SET_LOCALE', False, value_type='boolean')
+DEFAULT_MODULE_COMPRESSION = get_config(p, DEFAULTS, 'module_compression', None, 'ZIP_DEFLATED')
+DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, value_type='integer')
+DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, value_type='integer')
+DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', None)
+DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, value_type='boolean')
+DEFAULT_PRIVATE_KEY_FILE = get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None, value_type='path')
+DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, value_type='integer')
+DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, value_type='boolean')
DEFAULT_VAULT_PASSWORD_FILE = get_config(p, DEFAULTS, 'vault_password_file', 'ANSIBLE_VAULT_PASSWORD_FILE', None, value_type='path')
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', 'smart')
@@ -226,12 +226,12 @@ DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBL
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_PRIVATE_ROLE_VARS = get_config(p, DEFAULTS, 'private_role_vars', 'ANSIBLE_PRIVATE_ROLE_VARS', False, value_type='boolean')
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
-DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
-DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
-DEFAULT_GATHER_SUBSET = get_config(p, DEFAULTS, 'gather_subset', 'ANSIBLE_GATHER_SUBSET', 'all').lower()
-DEFAULT_GATHER_TIMEOUT = get_config(p, DEFAULTS, 'gather_timeout', 'ANSIBLE_GATHER_TIMEOUT', 10, value_type='integer')
-DEFAULT_LOG_PATH = get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '', value_type='path')
-DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, value_type='boolean')
+DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
+DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
+DEFAULT_GATHER_SUBSET = get_config(p, DEFAULTS, 'gather_subset', 'ANSIBLE_GATHER_SUBSET', 'all').lower()
+DEFAULT_GATHER_TIMEOUT = get_config(p, DEFAULTS, 'gather_timeout', 'ANSIBLE_GATHER_TIMEOUT', 10, value_type='integer')
+DEFAULT_LOG_PATH = get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '', value_type='path')
+DEFAULT_FORCE_HANDLERS = get_config(p, DEFAULTS, 'force_handlers', 'ANSIBLE_FORCE_HANDLERS', False, value_type='boolean')
DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level', 'ANSIBLE_VAR_COMPRESSION_LEVEL', 0, value_type='integer')
DEFAULT_INTERNAL_POLL_INTERVAL = get_config(p, DEFAULTS, 'internal_poll_interval', None, 0.001, value_type='float')
DEFAULT_ALLOW_UNSAFE_LOOKUPS = get_config(p, DEFAULTS, 'allow_unsafe_lookups', None, False, value_type='boolean')
@@ -240,16 +240,16 @@ SHOW_CUSTOM_STATS = get_config(p, DEFAULTS, 'show_custom_stats', 'ANSIBLE_SHOW_C
NAMESPACE_FACTS = get_config(p, DEFAULTS, 'restrict_facts_namespace', 'ANSIBLE_RESTRICT_FACTS', False, value_type='boolean')
# Inventory
-DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTORY', '/etc/ansible/hosts', value_type='path', expand_relative_paths=True)
-INVENTORY_ENABLED = get_config(p, DEFAULTS,'inventory_enabled', 'ANSIBLE_INVENTORY_ENABLED',
- [ 'host_list', 'script', 'ini', 'yaml' ], value_type='list')
-INVENTORY_IGNORE_EXTS = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE',
- BLACKLIST_EXTS + (".orig", ".ini", ".cfg", ".retry"), value_type='list')
+DEFAULT_HOST_LIST = get_config(p, DEFAULTS, 'inventory', 'ANSIBLE_INVENTORY', '/etc/ansible/hosts', value_type='path', expand_relative_paths=True)
+INVENTORY_ENABLED = get_config(p, DEFAULTS, 'inventory_enabled', 'ANSIBLE_INVENTORY_ENABLED',
+ ['host_list', 'script', 'ini', 'yaml'], value_type='list')
+INVENTORY_IGNORE_EXTS = get_config(p, DEFAULTS, 'inventory_ignore_extensions', 'ANSIBLE_INVENTORY_IGNORE',
+ BLACKLIST_EXTS + (".orig", ".ini", ".cfg", ".retry"), value_type='list')
INVENTORY_IGNORE_PATTERNS = get_config(p, DEFAULTS, 'inventory_ignore_patterns', 'ANSIBLE_INVENTORY_IGNORE_REGEX', [], value_type='list')
-VARIABLE_PRECEDENCE = get_config(p, DEFAULTS, 'precedence', 'ANSIBLE_PRECEDENCE',
- ['all_inventory', 'groups_inventory', 'all_plugins_inventory', 'all_plugins_play',
- 'groups_plugins_inventory', 'groups_plugins_play'],
- value_type='list')
+VARIABLE_PRECEDENCE = get_config(p, DEFAULTS, 'precedence', 'ANSIBLE_PRECEDENCE',
+ ['all_inventory', 'groups_inventory', 'all_plugins_inventory', 'all_plugins_play',
+ 'groups_plugins_inventory', 'groups_plugins_play'],
+ value_type='list')
# Static includes
DEFAULT_TASK_INCLUDES_STATIC = get_config(p, DEFAULTS, 'task_includes_static', 'ANSIBLE_TASK_INCLUDES_STATIC', False, value_type='boolean')
DEFAULT_HANDLER_INCLUDES_STATIC = get_config(p, DEFAULTS, 'handler_includes_static', 'ANSIBLE_HANDLER_INCLUDES_STATIC', False, value_type='boolean')
@@ -455,4 +455,4 @@ IGNORE_FILES = ["COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION", "GUID
INTERNAL_RESULT_KEYS = ['add_host', 'add_group']
RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
# check all of these extensions when looking for 'variable' files which should be YAML or JSON.
-YAML_FILENAME_EXTENSIONS = [ ".yml", ".yaml", ".json" ]
+YAML_FILENAME_EXTENSIONS = [".yml", ".yaml", ".json"]
diff --git a/lib/ansible/inventory/data.py b/lib/ansible/inventory/data.py
index a0d3108617..dba7523bc4 100644
--- a/lib/ansible/inventory/data.py
+++ b/lib/ansible/inventory/data.py
@@ -20,8 +20,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
-import sys
import re
+import sys
from ansible import constants as C
from ansible.errors import AnsibleError
@@ -39,6 +39,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class InventoryData(object):
"""
Holds inventory data (host and group objects).
@@ -92,8 +93,8 @@ class InventoryData(object):
if not py_interp:
# sys.executable is not set in some cornercases. #13585
py_interp = '/usr/bin/python'
- display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default.'
- ' You can correct this by setting ansible_python_interpreter for localhost')
+ display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default. '
+ 'You can correct this by setting ansible_python_interpreter for localhost')
new_host.set_variable("ansible_python_interpreter", py_interp)
if "ansible_connection" not in new_host.vars:
@@ -103,7 +104,6 @@ class InventoryData(object):
return new_host
-
def _scan_groups_for_host(self, hostname, localhost=False):
''' in case something did not update inventory correctly, fallback to group scan '''
@@ -121,7 +121,6 @@ class InventoryData(object):
return found
-
def reconcile_inventory(self):
''' Ensure inventory basic rules, run after updates '''
@@ -190,7 +189,6 @@ class InventoryData(object):
return matching_host
-
def add_group(self, group):
''' adds a group to inventory if not there already '''
@@ -215,7 +213,7 @@ class InventoryData(object):
if host not in self.hosts:
h = Host(host, port)
self.hosts[host] = h
- if self.current_source: # set to 'first source' in which host was encountered
+ if self.current_source: # set to 'first source' in which host was encountered
self.set_variable(host, 'inventory_file', os.path.basename(self.current_source))
self.set_variable(host, 'inventory_dir', basedir(self.current_source))
else:
@@ -236,8 +234,7 @@ class InventoryData(object):
if g and host not in g.get_hosts():
g.add_host(h)
self._groups_dict_cache = {}
- display.debug("Added host %s to group %s" % (host,group))
-
+ display.debug("Added host %s to group %s" % (host, group))
def set_variable(self, entity, varname, value):
''' sets a varible for an inventory object '''
@@ -252,7 +249,6 @@ class InventoryData(object):
inv_object.set_variable(varname, value)
display.debug('set %s for %s' % (varname, entity))
-
def add_child(self, group, child):
''' Add host or group to group '''
@@ -278,4 +274,3 @@ class InventoryData(object):
self._groups_dict_cache[group_name] = [h.name for h in group.get_hosts()]
return self._groups_dict_cache
-
diff --git a/lib/ansible/inventory/group.py b/lib/ansible/inventory/group.py
index b1059122e6..3469b360f7 100644
--- a/lib/ansible/inventory/group.py
+++ b/lib/ansible/inventory/group.py
@@ -20,10 +20,11 @@ __metaclass__ = type
from ansible.errors import AnsibleError
from ansible.utils.vars import combine_vars
+
class Group:
''' a group of ansible hosts '''
- #__slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
+ # __slots__ = [ 'name', 'hosts', 'vars', 'child_groups', 'parent_groups', 'depth', '_hosts_cache' ]
def __init__(self, name=None):
@@ -87,7 +88,7 @@ class Group:
self.child_groups.append(group)
# update the depth of the child
- group.depth = max([self.depth+1, group.depth])
+ group.depth = max([self.depth + 1, group.depth])
# update the depth of the grandchildren
group._check_children_depth()
@@ -105,7 +106,7 @@ class Group:
try:
for group in self.child_groups:
- group.depth = max([self.depth+1, group.depth])
+ group.depth = max([self.depth + 1, group.depth])
group._check_children_depth()
except RuntimeError:
raise AnsibleError("The group named '%s' has a recursive dependency loop." % self.name)
@@ -180,6 +181,5 @@ class Group:
try:
self.priority = int(priority)
except TypeError:
- #FIXME: warn about invalid priority
+ # FIXME: warn about invalid priority
pass
-
diff --git a/lib/ansible/inventory/host.py b/lib/ansible/inventory/host.py
index 03927cc2eb..a92835290a 100644
--- a/lib/ansible/inventory/host.py
+++ b/lib/ansible/inventory/host.py
@@ -24,10 +24,11 @@ from ansible.utils.vars import combine_vars, get_unique_id
__all__ = ['Host']
+
class Host:
''' a single ansible host '''
- #__slots__ = [ 'name', 'vars', 'groups' ]
+ # __slots__ = [ 'name', 'vars', 'groups' ]
def __getstate__(self):
return self.serialize()
@@ -69,11 +70,11 @@ class Host:
def deserialize(self, data):
self.__init__(gen_uuid=False)
- self.name = data.get('name')
- self.vars = data.get('vars', dict())
+ self.name = data.get('name')
+ self.vars = data.get('vars', dict())
self.address = data.get('address', '')
- self._uuid = data.get('uuid', None)
- self.implicit= data.get('implicit', False)
+ self._uuid = data.get('uuid', None)
+ self.implicit = data.get('implicit', False)
groups = data.get('groups', [])
for group_data in groups:
@@ -100,7 +101,6 @@ class Host:
def get_name(self):
return self.name
-
def populate_ancestors(self):
# populate ancestors
for group in self.groups:
@@ -131,9 +131,8 @@ class Host:
else:
self.remove_group(oldg)
-
def set_variable(self, key, value):
- self.vars[key]=value
+ self.vars[key] = value
def get_groups(self):
return self.groups
@@ -142,10 +141,9 @@ class Host:
results = {}
results['inventory_hostname'] = self.name
results['inventory_hostname_short'] = self.name.split('.')[0]
- results['group_names'] = sorted([ g.name for g in self.get_groups() if g.name != 'all'])
+ results['group_names'] = sorted([g.name for g in self.get_groups() if g.name != 'all'])
return combine_vars(self.vars, results)
def get_vars(self):
return combine_vars(self.vars, self.get_magic_vars())
-
diff --git a/lib/ansible/inventory/manager.py b/lib/ansible/inventory/manager.py
index 0dcc6b13c4..4192d0aaa0 100644
--- a/lib/ansible/inventory/manager.py
+++ b/lib/ansible/inventory/manager.py
@@ -42,11 +42,12 @@ except ImportError:
HOSTS_PATTERNS_CACHE = {}
IGNORED_ALWAYS = [b"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
-IGNORED_PATTERNS = [ to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS ]
+IGNORED_PATTERNS = [to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS]
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
+
def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
@@ -114,6 +115,7 @@ def split_host_pattern(pattern):
return [p.strip() for p in patterns]
+
class InventoryManager(object):
''' Creates and manages inventory '''
@@ -135,7 +137,7 @@ class InventoryManager(object):
if sources is None:
self._sources = []
elif isinstance(sources, string_types):
- self._sources = [ sources ]
+ self._sources = [sources]
else:
self._sources = sources
@@ -175,7 +177,7 @@ class InventoryManager(object):
def _setup_inventory_plugins(self):
''' sets up loaded inventory plugins for usage '''
- inventory_loader = PluginLoader( 'InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
+ inventory_loader = PluginLoader('InventoryModule', 'ansible.plugins.inventory', 'inventory_plugins', 'inventory_plugins')
display.vvvv('setting up inventory plugins')
for name in C.INVENTORY_ENABLED:
@@ -197,7 +199,7 @@ class InventoryManager(object):
if source:
if ',' not in source:
- source = unfrackpath(source, follow=False)
+ source = unfrackpath(source, follow=False)
parse = self.parse_source(source, cache=cache)
if parse and not parsed:
parsed = True
@@ -257,7 +259,7 @@ class InventoryManager(object):
display.vvv(u'Parsed %s inventory source with %s plugin' % (to_text(source), plugin_name))
break
except AnsibleParserError as e:
- failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' %(to_text(source), plugin_name, to_text(e)))
+ failures.append(u'\n* Failed to parse %s with %s inventory plugin: %s\n' % (to_text(source), plugin_name, to_text(e)))
else:
display.debug(u'%s did not meet %s requirements' % (to_text(source), plugin_name))
else:
@@ -279,7 +281,7 @@ class InventoryManager(object):
global HOSTS_PATTERNS_CACHE
HOSTS_PATTERNS_CACHE = {}
self._pattern_cache = {}
- #FIXME: flush inventory cache
+ # FIXME: flush inventory cache
def refresh_inventory(self):
''' recalculate inventory '''
@@ -340,11 +342,11 @@ class InventoryManager(object):
if not ignore_limits and self._subset:
# exclude hosts not in a subset, if defined
subset = self._evaluate_patterns(self._subset)
- hosts = [ h for h in hosts if h in subset ]
+ hosts = [h for h in hosts if h in subset]
if not ignore_restrictions and self._restriction:
# exclude hosts mentioned in any restriction (ex: failed hosts)
- hosts = [ h for h in hosts if h.name in self._restriction ]
+ hosts = [h for h in hosts if h.name in self._restriction]
seen = set()
HOSTS_PATTERNS_CACHE[pattern_hash] = [x for x in hosts if x not in seen and not seen.add(x)]
@@ -365,7 +367,6 @@ class InventoryManager(object):
return hosts
-
def _evaluate_patterns(self, patterns):
"""
Takes a list of patterns and returns a list of matching host names,
@@ -382,11 +383,11 @@ class InventoryManager(object):
else:
that = self._match_one_pattern(p)
if p.startswith("!"):
- hosts = [ h for h in hosts if h not in that ]
+ hosts = [h for h in hosts if h not in that]
elif p.startswith("&"):
- hosts = [ h for h in hosts if h in that ]
+ hosts = [h for h in hosts if h in that]
else:
- to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
+ to_append = [h for h in that if h.name not in [y.name for y in hosts]]
hosts.extend(to_append)
return hosts
@@ -500,10 +501,10 @@ class InventoryManager(object):
if end:
if end == -1:
- end = len(hosts)-1
- return hosts[start:end+1]
+ end = len(hosts) - 1
+ return hosts[start:end + 1]
else:
- return [ hosts[start] ]
+ return [hosts[start]]
def _enumerate_matches(self, pattern):
"""
@@ -539,13 +540,13 @@ class InventoryManager(object):
matched = True
if not matched:
- display.warning("Could not match supplied host pattern, ignoring: %s" % pattern)
+ display.warning("Could not match supplied host pattern, ignoring: %s" % pattern)
return results
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
- #FIXME: cache?
- result = [ h for h in self.get_hosts(pattern) ]
+ # FIXME: cache?
+ result = [h for h in self.get_hosts(pattern)]
# allow implicit localhost if pattern matches and no other results
if len(result) == 0 and pattern in C.LOCALHOST:
@@ -554,7 +555,7 @@ class InventoryManager(object):
return result
def list_groups(self):
- #FIXME: cache?
+ # FIXME: cache?
return sorted(self._inventory.groups.keys(), key=lambda x: x)
def restrict_to_hosts(self, restriction):
@@ -566,8 +567,8 @@ class InventoryManager(object):
if restriction is None:
return
elif not isinstance(restriction, list):
- restriction = [ restriction ]
- self._restriction = [ h.name for h in restriction ]
+ restriction = [restriction]
+ self._restriction = [h.name for h in restriction]
def subset(self, subset_pattern):
"""
diff --git a/lib/ansible/module_utils/_text.py b/lib/ansible/module_utils/_text.py
index 8d6a9d4f19..81943bbea8 100644
--- a/lib/ansible/module_utils/_text.py
+++ b/lib/ansible/module_utils/_text.py
@@ -45,8 +45,8 @@ except LookupError:
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape',
- 'surrogate_or_strict',
- 'surrogate_then_replace'))
+ 'surrogate_or_strict',
+ 'surrogate_then_replace'))
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
diff --git a/lib/ansible/module_utils/a10.py b/lib/ansible/module_utils/a10.py
index 389ef4aba5..1a1b9f9d81 100644
--- a/lib/ansible/module_utils/a10.py
+++ b/lib/ansible/module_utils/a10.py
@@ -44,6 +44,7 @@ AXAPI_VPORT_PROTOCOLS = {
'https': 12,
}
+
def a10_argument_spec():
return dict(
host=dict(type='str', required=True),
@@ -52,11 +53,13 @@ def a10_argument_spec():
write_config=dict(type='bool', default=False)
)
+
def axapi_failure(result):
if 'response' in result and result['response'].get('status') == 'fail':
return True
return False
+
def axapi_call(module, url, post=None):
'''
Returns a datastructure based on the result of the API call
@@ -81,6 +84,7 @@ def axapi_call(module, url, post=None):
rsp.close()
return data
+
def axapi_authenticate(module, base_url, username, password):
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
result = axapi_call(module, url)
@@ -89,6 +93,7 @@ def axapi_authenticate(module, base_url, username, password):
sessid = result['session_id']
return base_url + '&session_id=' + sessid
+
def axapi_authenticate_v3(module, base_url, username, password):
url = base_url
auth_payload = {"credentials": {"username": username, "password": password}}
@@ -98,6 +103,7 @@ def axapi_authenticate_v3(module, base_url, username, password):
signature = result['authresponse']['signature']
return signature
+
def axapi_call_v3(module, url, method=None, body=None, signature=None):
'''
Returns a datastructure based on the result of the API call
@@ -126,6 +132,7 @@ def axapi_call_v3(module, url, method=None, body=None, signature=None):
rsp.close()
return data
+
def axapi_enabled_disabled(flag):
'''
The axapi uses 0/1 integer values for flags, rather than strings
@@ -137,8 +144,10 @@ def axapi_enabled_disabled(flag):
else:
return 0
+
def axapi_get_port_protocol(protocol):
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
+
def axapi_get_vport_protocol(protocol):
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)
diff --git a/lib/ansible/module_utils/ansible_tower.py b/lib/ansible/module_utils/ansible_tower.py
index eefbf76753..90de99b64f 100644
--- a/lib/ansible/module_utils/ansible_tower.py
+++ b/lib/ansible/module_utils/ansible_tower.py
@@ -79,9 +79,9 @@ def tower_check_mode(module):
def tower_argument_spec():
return dict(
- tower_host = dict(),
- tower_username = dict(),
- tower_password = dict(no_log=True),
- tower_verify_ssl = dict(type='bool', default=True),
- tower_config_file = dict(type='path'),
+ tower_host=dict(),
+ tower_username=dict(),
+ tower_password=dict(no_log=True),
+ tower_verify_ssl=dict(type='bool', default=True),
+ tower_config_file=dict(type='path'),
)
diff --git a/lib/ansible/module_utils/aos.py b/lib/ansible/module_utils/aos.py
index e0c00ea06b..1e00c7bbeb 100644
--- a/lib/ansible/module_utils/aos.py
+++ b/lib/ansible/module_utils/aos.py
@@ -37,8 +37,8 @@ from ansible.module_utils.aos import *
"""
import json
-from distutils.version import LooseVersion
from ansible.module_utils.pycompat24 import get_exception
+from distutils.version import LooseVersion
try:
import yaml
@@ -53,6 +53,7 @@ try:
except ImportError:
HAS_AOS_PYEZ = False
+
def check_aos_version(module, min=False):
"""
Check if the library aos-pyez is present.
@@ -71,6 +72,7 @@ def check_aos_version(module, min=False):
return True
+
def get_aos_session(module, auth):
"""
Resume an existing session and return an AOS object.
@@ -94,6 +96,7 @@ def get_aos_session(module, auth):
return aos
+
def find_collection_item(collection, item_name=False, item_id=False):
"""
Find collection_item based on name or id from a collection object
@@ -114,6 +117,7 @@ def find_collection_item(collection, item_name=False, item_id=False):
else:
return my_dict
+
def content_to_dict(module, content):
"""
Convert 'content' into a Python Dict based on 'content_format'
@@ -144,12 +148,12 @@ def content_to_dict(module, content):
except:
module.fail_json(msg="Unable to convert 'content' to a dict, please check if valid")
-
# replace the string with the dict
module.params['content'] = content_dict
return content_dict
+
def do_load_resource(module, collection, name):
"""
Create a new object (collection.item) by loading a datastructure directly
@@ -161,10 +165,7 @@ def do_load_resource(module, collection, name):
module.fail_json(msg="An error occurred while running 'find_collection_item'")
if item.exists:
- module.exit_json( changed=False,
- name=item.name,
- id=item.id,
- value=item.value )
+ module.exit_json(changed=False, name=item.name, id=item.id, value=item.value)
# If not in check mode, apply the changes
if not module.check_mode:
@@ -175,7 +176,4 @@ def do_load_resource(module, collection, name):
e = get_exception()
module.fail_json(msg="Unable to write item content : %r" % e)
- module.exit_json( changed=True,
- name=item.name,
- id=item.id,
- value=item.value )
+ module.exit_json(changed=True, name=item.name, id=item.id, value=item.value)
diff --git a/lib/ansible/module_utils/api.py b/lib/ansible/module_utils/api.py
index 4b0f6030d5..d05213914e 100644
--- a/lib/ansible/module_utils/api.py
+++ b/lib/ansible/module_utils/api.py
@@ -40,6 +40,7 @@ The 'api' module provides the following common argument specs:
"""
import time
+
def rate_limit_argument_spec(spec=None):
"""Creates an argument spec for working with rate limiting"""
arg_spec = (dict(
@@ -50,6 +51,7 @@ def rate_limit_argument_spec(spec=None):
arg_spec.update(spec)
return arg_spec
+
def retry_argument_spec(spec=None):
"""Creates an argument spec for working with retrying"""
arg_spec = (dict(
@@ -60,41 +62,48 @@ def retry_argument_spec(spec=None):
arg_spec.update(spec)
return arg_spec
+
def basic_auth_argument_spec(spec=None):
arg_spec = (dict(
- api_username=dict(type='str', required=False),
- api_password=dict(type='str', required=False, no_log=True),
- api_url=dict(type='str', required=False),
+ api_username=dict(type='str'),
+ api_password=dict(type='str', no_log=True),
+ api_url=dict(type='str'),
validate_certs=dict(type='bool', default=True)
))
if spec:
arg_spec.update(spec)
return arg_spec
+
def rate_limit(rate=None, rate_limit=None):
"""rate limiting decorator"""
minrate = None
if rate is not None and rate_limit is not None:
minrate = float(rate_limit) / float(rate)
+
def wrapper(f):
last = [0.0]
- def ratelimited(*args,**kwargs):
+
+ def ratelimited(*args, **kwargs):
if minrate is not None:
elapsed = time.clock() - last[0]
left = minrate - elapsed
if left > 0:
time.sleep(left)
last[0] = time.clock()
- ret = f(*args,**kwargs)
+ ret = f(*args, **kwargs)
return ret
+
return ratelimited
return wrapper
+
def retry(retries=None, retry_pause=1):
"""Retry decorator"""
def wrapper(f):
retry_count = 0
- def retried(*args,**kwargs):
+
+ def retried(*args, **kwargs):
if retries is not None:
ret = None
while True:
@@ -102,13 +111,13 @@ def retry(retries=None, retry_pause=1):
if retry_count >= retries:
raise Exception("Retry limit exceeded: %d" % retries)
try:
- ret = f(*args,**kwargs)
+ ret = f(*args, **kwargs)
except:
pass
if ret:
break
time.sleep(retry_pause)
return ret
+
return retried
return wrapper
-
diff --git a/lib/ansible/module_utils/avi.py b/lib/ansible/module_utils/avi.py
index 4e3bd877ef..2a62107a36 100644
--- a/lib/ansible/module_utils/avi.py
+++ b/lib/ansible/module_utils/avi.py
@@ -25,7 +25,6 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
# This module initially matched the namespace of network module avi. However,
# that causes namespace import error when other modules from avi namespaces
@@ -40,8 +39,7 @@ HAS_AVI = True
try:
import avi.sdk
sdk_version = getattr(avi.sdk, '__version__', None)
- if ((sdk_version is None) or (sdk_version and
- (parse_version(sdk_version) < parse_version('17.1')))):
+ if ((sdk_version is None) or (sdk_version and (parse_version(sdk_version) < parse_version('17.1')))):
# It allows the __version__ to be '' as that value is used in development builds
raise ImportError
from avi.sdk.utils.ansible_utils import avi_ansible_api
diff --git a/lib/ansible/module_utils/azure_rm_common.py b/lib/ansible/module_utils/azure_rm_common.py
index 6333c7c91c..1626d8b088 100644
--- a/lib/ansible/module_utils/azure_rm_common.py
+++ b/lib/ansible/module_utils/azure_rm_common.py
@@ -1,4 +1,3 @@
-#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
@@ -16,7 +15,6 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-#
import json
import os
@@ -102,6 +100,7 @@ except ImportError as exc:
HAS_AZURE_EXC = exc
HAS_AZURE = False
+
def azure_id_to_dict(id):
pieces = re.sub(r'^\/', '', id).split('/')
result = {}
@@ -121,6 +120,7 @@ AZURE_EXPECTED_VERSIONS = dict(
AZURE_MIN_RELEASE = '2.0.0rc5'
+
class AzureRMModuleBase(object):
def __init__(self, derived_arg_spec, bypass_checks=False, no_log=False,
@@ -202,7 +202,7 @@ class AzureRMModuleBase(object):
if Version(client_version) < Version(expected_version):
self.fail("Installed {0} client version is {1}. The supported version is {2}. Try "
"`pip install azure>={3} --upgrade`".format(client_name, client_version, expected_version,
- AZURE_MIN_RELEASE))
+ AZURE_MIN_RELEASE))
def exec_module(self, **kwargs):
self.fail("Error: {0} failed to implement exec_module method.".format(self.__class__.__name__))
@@ -220,11 +220,11 @@ class AzureRMModuleBase(object):
def log(self, msg, pretty_print=False):
pass
# Use only during module development
- #if self.debug:
- # log_file = open('azure_rm.log', 'a')
- # if pretty_print:
+ # if self.debug:
+ # log_file = open('azure_rm.log', 'a')
+ # if pretty_print:
# log_file.write(json.dumps(msg, indent=4, sort_keys=True))
- # else:
+ # else:
# log_file.write(msg + u'\n')
def validate_tags(self, tags):
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index c8b9c6d5f1..165d785688 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -32,7 +32,17 @@ BOOLEANS_TRUE = ['y', 'yes', 'on', '1', 'true', 1, True]
BOOLEANS_FALSE = ['n', 'no', 'off', '0', 'false', 0, False]
BOOLEANS = BOOLEANS_TRUE + BOOLEANS_FALSE
-SIZE_RANGES = { 'Y': 1<<80, 'Z': 1<<70, 'E': 1<<60, 'P': 1<<50, 'T': 1<<40, 'G': 1<<30, 'M': 1<<20, 'K': 1<<10, 'B': 1 }
+SIZE_RANGES = {
+ 'Y': 1 << 80,
+ 'Z': 1 << 70,
+ 'E': 1 << 60,
+ 'P': 1 << 50,
+ 'T': 1 << 40,
+ 'G': 1 << 30,
+ 'M': 1 << 20,
+ 'K': 1 << 10,
+ 'B': 1,
+}
FILE_ATTRIBUTES = {
'A': 'noatime',
@@ -83,9 +93,9 @@ from itertools import repeat, chain
try:
import syslog
- HAS_SYSLOG=True
+ HAS_SYSLOG = True
except ImportError:
- HAS_SYSLOG=False
+ HAS_SYSLOG = False
try:
from systemd import journal
@@ -93,10 +103,10 @@ try:
except ImportError:
has_journal = False
-HAVE_SELINUX=False
+HAVE_SELINUX = False
try:
import selinux
- HAVE_SELINUX=True
+ HAVE_SELINUX = True
except ImportError:
pass
@@ -161,8 +171,16 @@ except ImportError:
pass
from ansible.module_utils.pycompat24 import get_exception, literal_eval
-from ansible.module_utils.six import (PY2, PY3, b, binary_type, integer_types,
- iteritems, text_type, string_types)
+from ansible.module_utils.six import (
+ PY2,
+ PY3,
+ b,
+ binary_type,
+ integer_types,
+ iteritems,
+ string_types,
+ text_type,
+)
from ansible.module_utils.six.moves import map, reduce, shlex_quote
from ansible.module_utils._text import to_native, to_bytes, to_text
@@ -208,33 +226,33 @@ _literal_eval = literal_eval
# is an internal implementation detail
_ANSIBLE_ARGS = None
-FILE_COMMON_ARGUMENTS=dict(
- src = dict(),
- mode = dict(type='raw'),
- owner = dict(),
- group = dict(),
- seuser = dict(),
- serole = dict(),
- selevel = dict(),
- setype = dict(),
- follow = dict(type='bool', default=False),
+FILE_COMMON_ARGUMENTS = dict(
+ src=dict(),
+ mode=dict(type='raw'),
+ owner=dict(),
+ group=dict(),
+ seuser=dict(),
+ serole=dict(),
+ selevel=dict(),
+ setype=dict(),
+ follow=dict(type='bool', default=False),
# not taken by the file module, but other modules call file so it must ignore them.
- content = dict(no_log=True),
- backup = dict(),
- force = dict(),
- remote_src = dict(), # used by assemble
- regexp = dict(), # used by assemble
- delimiter = dict(), # used by assemble
- directory_mode = dict(), # used by copy
- unsafe_writes = dict(type='bool'), # should be available to any module using atomic_move
- attributes = dict(aliases=['attr']),
+ content=dict(no_log=True),
+ backup=dict(),
+ force=dict(),
+ remote_src=dict(), # used by assemble
+ regexp=dict(), # used by assemble
+ delimiter=dict(), # used by assemble
+ directory_mode=dict(), # used by copy
+ unsafe_writes=dict(type='bool'), # should be available to any module using atomic_move
+ attributes=dict(aliases=['attr']),
)
PASSWD_ARG_RE = re.compile(r'^[-]{0,2}pass[-]?(word|wd)?')
# Can't use 07777 on Python 3, can't use 0o7777 on Python 2.4
PERM_BITS = int('07777', 8) # file mode permission bits
-EXEC_PERM_BITS = int('00111', 8) # execute permission bits
+EXEC_PERM_BITS = int('00111', 8) # execute permission bits
DEFAULT_PERM = int('0666', 8) # default file permission bits
@@ -242,11 +260,12 @@ def get_platform():
''' what's the platform? example: Linux is a platform. '''
return platform.system()
+
def get_distribution():
''' return the distribution name '''
if platform.system() == 'Linux':
try:
- supported_dists = platform._supported_dists + ('arch','alpine')
+ supported_dists = platform._supported_dists + ('arch', 'alpine')
distribution = platform.linux_distribution(supported_dists=supported_dists)[0].capitalize()
if not distribution and os.path.isfile('/etc/system-release'):
distribution = platform.linux_distribution(supported_dists=['system'])[0].capitalize()
@@ -261,6 +280,7 @@ def get_distribution():
distribution = None
return distribution
+
def get_distribution_version():
''' return the distribution version '''
if platform.system() == 'Linux':
@@ -275,6 +295,7 @@ def get_distribution_version():
distribution_version = None
return distribution_version
+
def get_all_subclasses(cls):
'''
used by modules like Hardware or Network fact classes to retrieve all subclasses of a given class.
@@ -338,6 +359,7 @@ def json_dict_unicode_to_bytes(d, encoding='utf-8', errors='surrogate_or_strict'
else:
return d
+
def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'):
''' Recursively convert dict keys and values to byte str
@@ -357,6 +379,7 @@ def json_dict_bytes_to_unicode(d, encoding='utf-8', errors='surrogate_or_strict'
else:
return d
+
def return_values(obj):
""" Return native stringified values from datastructures.
@@ -381,6 +404,7 @@ def return_values(obj):
else:
raise TypeError('Unknown parameter type: %s, %s' % (type(obj), obj))
+
def remove_values(value, no_log_strings):
""" Remove strings in no_log_strings from value. If value is a container
type, then remove a lot more"""
@@ -489,6 +513,7 @@ def heuristic_log_sanitize(data, no_log_values=None):
output = remove_values(output, no_log_values)
return output
+
def bytes_to_human(size, isbits=False, unit=None):
base = 'Bytes'
@@ -505,7 +530,8 @@ def bytes_to_human(size, isbits=False, unit=None):
else:
suffix = base
- return '%.2f %s' % (float(size)/ limit, suffix)
+ return '%.2f %s' % (float(size) / limit, suffix)
+
def human_to_bytes(number, default_unit=None, isbits=False):
@@ -555,6 +581,7 @@ def human_to_bytes(number, default_unit=None, isbits=False):
return int(round(num * limit))
+
def is_executable(path):
'''is the given path executable?
@@ -568,6 +595,7 @@ def is_executable(path):
# execute bits are set.
return ((stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) & os.stat(path)[stat.ST_MODE])
+
def _load_params():
''' read the modules parameters and store them globally.
@@ -623,6 +651,7 @@ def _load_params():
'"failed": true}')
sys.exit(1)
+
def env_fallback(*args, **kwargs):
''' Load value from environment '''
for arg in args:
@@ -631,6 +660,7 @@ def env_fallback(*args, **kwargs):
else:
raise AnsibleFallbackNotFound
+
def _lenient_lowercase(lst):
"""Lowercase elements of a list.
@@ -644,6 +674,7 @@ def _lenient_lowercase(lst):
lowered.append(value)
return lowered
+
def format_attributes(attributes):
attribute_list = []
for attr in attributes:
@@ -651,13 +682,15 @@ def format_attributes(attributes):
attribute_list.append(FILE_ATTRIBUTES[attr])
return attribute_list
+
def get_flags_from_attributes(attributes):
flags = []
- for key,attr in FILE_ATTRIBUTES.items():
+ for key, attr in FILE_ATTRIBUTES.items():
if attr in attributes:
flags.append(key)
return ''.join(flags)
+
class AnsibleFallbackNotFound(Exception):
pass
@@ -674,7 +707,7 @@ class AnsibleModule(object):
see library/* for examples
'''
- self._name = os.path.basename(__file__) #initialize name until we can parse from options
+ self._name = os.path.basename(__file__) # initialize name until we can parse from options
self.argument_spec = argument_spec
self.supports_check_mode = supports_check_mode
self.check_mode = False
@@ -808,15 +841,15 @@ class AnsibleModule(object):
b_path = os.path.realpath(b_path)
path = to_native(b_path)
- mode = params.get('mode', None)
- owner = params.get('owner', None)
- group = params.get('group', None)
+ mode = params.get('mode', None)
+ owner = params.get('owner', None)
+ group = params.get('group', None)
# selinux related options
- seuser = params.get('seuser', None)
- serole = params.get('serole', None)
- setype = params.get('setype', None)
- selevel = params.get('selevel', None)
+ seuser = params.get('seuser', None)
+ serole = params.get('serole', None)
+ setype = params.get('setype', None)
+ selevel = params.get('selevel', None)
secontext = [seuser, serole, setype]
if self.selinux_mls_enabled():
@@ -834,7 +867,6 @@ class AnsibleModule(object):
selevel=selevel, secontext=secontext, attributes=attributes,
)
-
# Detect whether using selinux that is MLS-aware.
# While this means you can set the level/range with
# selinux.lsetfilecon(), it may or may not mean that you
@@ -853,7 +885,7 @@ class AnsibleModule(object):
if not HAVE_SELINUX:
seenabled = self.get_bin_path('selinuxenabled')
if seenabled is not None:
- (rc,out,err) = self.run_command(seenabled)
+ (rc, out, err) = self.run_command(seenabled)
if rc == 0:
self.fail_json(msg="Aborting, target uses selinux but python bindings (libselinux-python) aren't installed!")
return False
@@ -1127,7 +1159,7 @@ class AnsibleModule(object):
e = get_exception()
if os.path.islink(b_path) and e.errno == errno.EPERM: # Can't set mode on symbolic links
pass
- elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
+ elif e.errno in (errno.ENOENT, errno.ELOOP): # Can't set mode on broken symbolic links
pass
else:
raise e
@@ -1154,7 +1186,7 @@ class AnsibleModule(object):
existing = self.get_file_attributes(b_path)
- if existing.get('attr_flags','') != attributes:
+ if existing.get('attr_flags', '') != attributes:
attrcmd = self.get_bin_path('chattr')
if attrcmd:
attrcmd = [attrcmd, '=%s' % attributes, b_path]
@@ -1187,14 +1219,13 @@ class AnsibleModule(object):
rc, out, err = self.run_command(attrcmd)
if rc == 0:
res = out.split(' ')[0:2]
- output['attr_flags'] = res[1].replace('-','').strip()
+ output['attr_flags'] = res[1].replace('-', '').strip()
output['version'] = res[0].strip()
output['attributes'] = format_attributes(output['attr_flags'])
except:
pass
return output
-
def _symbolic_mode_to_octal(self, path_stat, symbolic_mode):
new_mode = stat.S_IMODE(path_stat.st_mode)
@@ -1217,7 +1248,7 @@ class AnsibleModule(object):
return new_mode
def _apply_operation_to_mode(self, user, operator, mode_to_apply, current_mode):
- if operator == '=':
+ if operator == '=':
if user == 'u':
mask = stat.S_IRWXU | stat.S_ISUID
elif user == 'g':
@@ -1247,13 +1278,13 @@ class AnsibleModule(object):
X_perms = {
'u': {'X': stat.S_IXUSR},
'g': {'X': stat.S_IXGRP},
- 'o': {'X': stat.S_IXOTH}
+ 'o': {'X': stat.S_IXOTH},
}
else:
X_perms = {
'u': {'X': 0},
'g': {'X': 0},
- 'o': {'X': 0}
+ 'o': {'X': 0},
}
user_perms_to_modes = {
@@ -1265,7 +1296,8 @@ class AnsibleModule(object):
't': 0,
'u': prev_mode & stat.S_IRWXU,
'g': (prev_mode & stat.S_IRWXG) << 3,
- 'o': (prev_mode & stat.S_IRWXO) << 6 },
+ 'o': (prev_mode & stat.S_IRWXO) << 6,
+ },
'g': {
'r': stat.S_IRGRP,
'w': stat.S_IWGRP,
@@ -1274,7 +1306,8 @@ class AnsibleModule(object):
't': 0,
'u': (prev_mode & stat.S_IRWXU) >> 3,
'g': prev_mode & stat.S_IRWXG,
- 'o': (prev_mode & stat.S_IRWXO) << 3 },
+ 'o': (prev_mode & stat.S_IRWXO) << 3,
+ },
'o': {
'r': stat.S_IROTH,
'w': stat.S_IWOTH,
@@ -1283,14 +1316,17 @@ class AnsibleModule(object):
't': stat.S_ISVTX,
'u': (prev_mode & stat.S_IRWXU) >> 6,
'g': (prev_mode & stat.S_IRWXG) >> 3,
- 'o': prev_mode & stat.S_IRWXO }
+ 'o': prev_mode & stat.S_IRWXO,
+ }
}
# Insert X_perms into user_perms_to_modes
for key, value in X_perms.items():
user_perms_to_modes[key].update(value)
- or_reduce = lambda mode, perm: mode | user_perms_to_modes[user][perm]
+ def or_reduce(mode, perm):
+ return mode | user_perms_to_modes[user][perm]
+
return reduce(or_reduce, perms, 0)
def set_fs_attributes_if_different(self, file_args, changed, diff=None, expand=True):
@@ -1383,10 +1419,10 @@ class AnsibleModule(object):
def _handle_aliases(self, spec=None):
# this uses exceptions as it happens before we can safely call fail_json
- aliases_results = {} #alias:canon
+ aliases_results = {} # alias:canon
if spec is None:
spec = self.argument_spec
- for (k,v) in spec.items():
+ for (k, v) in spec.items():
self._legal_inputs.append(k)
aliases = v.get('aliases', None)
default = v.get('default', None)
@@ -1409,7 +1445,7 @@ class AnsibleModule(object):
def _check_arguments(self, check_invalid_arguments):
self._syslog_facility = 'LOG_USER'
unsupported_parameters = set()
- for (k,v) in list(self.params.items()):
+ for (k, v) in list(self.params.items()):
if k == '_ansible_check_mode' and v:
self.check_mode = True
@@ -1444,7 +1480,7 @@ class AnsibleModule(object):
elif check_invalid_arguments and k not in self._legal_inputs:
unsupported_parameters.add(k)
- #clean up internal params:
+ # clean up internal params:
if k.startswith('_ansible_'):
del self.params[k]
@@ -1482,20 +1518,20 @@ class AnsibleModule(object):
if spec is None:
return
for check in spec:
- counts = [ self._count_terms([field]) for field in check ]
- non_zero = [ c for c in counts if c > 0 ]
+ counts = [self._count_terms([field]) for field in check]
+ non_zero = [c for c in counts if c > 0]
if len(non_zero) > 0:
if 0 in counts:
self.fail_json(msg="parameters are required together: %s" % (check,))
- def _check_required_arguments(self, spec=None, param=None ):
+ def _check_required_arguments(self, spec=None, param=None):
''' ensure all required arguments are present '''
missing = []
if spec is None:
spec = self.argument_spec
if param is None:
param = self.params
- for (k,v) in spec.items():
+ for (k, v) in spec.items():
required = v.get('required', False)
if required and k not in param:
missing.append(k)
@@ -1534,8 +1570,8 @@ class AnsibleModule(object):
spec = self.argument_spec
if param is None:
param = self.params
- for (k,v) in spec.items():
- choices = v.get('choices',None)
+ for (k, v) in spec.items():
+ choices = v.get('choices', None)
if choices is None:
continue
if isinstance(choices, SEQUENCETYPE) and not isinstance(choices, (binary_type, text_type)):
@@ -1561,8 +1597,8 @@ class AnsibleModule(object):
(param[k],) = overlap
if param[k] not in choices:
- choices_str=",".join([to_native(c) for c in choices])
- msg="value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
+ choices_str = ",".join([to_native(c) for c in choices])
+ msg = "value of %s must be one of: %s, got: %s" % (k, choices_str, param[k])
self.fail_json(msg=msg)
else:
self.fail_json(msg="internal error: choices for argument %s are not iterable: %s" % (k, choices))
@@ -1610,7 +1646,7 @@ class AnsibleModule(object):
if isinstance(value, string_types):
return value.split(",")
elif isinstance(value, int) or isinstance(value, float):
- return [ str(value) ]
+ return [str(value)]
raise TypeError('%s cannot be converted to a list' % type(value))
@@ -1703,14 +1739,12 @@ class AnsibleModule(object):
def _check_type_raw(self, value):
return value
-
def _check_type_bytes(self, value):
try:
self.human_to_bytes(value)
except ValueError:
raise TypeError('%s cannot be converted to a Byte value' % type(value))
-
def _check_type_bits(self, value):
try:
self.human_to_bytes(value, isbits=True)
@@ -1761,7 +1795,7 @@ class AnsibleModule(object):
self._check_argument_values(spec, param[k])
def _set_defaults(self, pre=True):
- for (k,v) in self.argument_spec.items():
+ for (k, v) in self.argument_spec.items():
default = v.get('default', None)
if pre is True:
# this prevents setting defaults on required items
@@ -1773,7 +1807,7 @@ class AnsibleModule(object):
self.params[k] = default
def _set_fallbacks(self):
- for k,v in self.argument_spec.items():
+ for (k, v) in self.argument_spec.items():
fallback = v.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
@@ -1856,16 +1890,14 @@ class AnsibleModule(object):
log_args = dict()
for param in self.params:
- canon = self.aliases.get(param, param)
+ canon = self.aliases.get(param, param)
arg_opts = self.argument_spec.get(canon, {})
no_log = arg_opts.get('no_log', False)
if self.boolean(no_log):
log_args[param] = 'NOT_LOGGING_PARAMETER'
# try to capture all passwords/passphrase named fields missed by no_log
- elif PASSWORD_MATCH.search(param) and \
- arg_opts.get('type', 'str') != 'bool' and \
- not arg_opts.get('choices', False):
+ elif PASSWORD_MATCH.search(param) and arg_opts.get('type', 'str') != 'bool' and not arg_opts.get('choices', False):
# skip boolean and enums as they are about 'password' state
log_args[param] = 'NOT_LOGGING_PASSWORD'
self.warn('Module did not set no_log for %s' % param)
@@ -1885,11 +1917,10 @@ class AnsibleModule(object):
self.log(msg, log_args=log_args)
-
def _set_cwd(self):
try:
cwd = os.getcwd()
- if not os.access(cwd, os.F_OK|os.R_OK):
+ if not os.access(cwd, os.F_OK | os.R_OK):
raise Exception()
return cwd
except:
@@ -1897,7 +1928,7 @@ class AnsibleModule(object):
# Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try:
- if os.access(cwd, os.F_OK|os.R_OK):
+ if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd)
return cwd
except:
@@ -2011,7 +2042,7 @@ class AnsibleModule(object):
def exit_json(self, **kwargs):
''' return from the module, without error '''
- if not 'changed' in kwargs:
+ if 'changed' not in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
@@ -2024,7 +2055,7 @@ class AnsibleModule(object):
assert 'msg' in kwargs, "implementation error -- msg to explain the error is required"
kwargs['failed'] = True
- if not 'changed' in kwargs:
+ if 'changed' not in kwargs:
kwargs['changed'] = False
self.do_cleanup_files()
@@ -2175,7 +2206,7 @@ class AnsibleModule(object):
native_suffix = os.path.basename(b_dest)
native_prefix = b('.ansible_tmp')
try:
- tmp_dest_fd, tmp_dest_name = tempfile.mkstemp( prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
+ tmp_dest_fd, tmp_dest_name = tempfile.mkstemp(prefix=native_prefix, dir=native_dest_dir, suffix=native_suffix)
except (OSError, IOError):
e = get_exception()
self.fail_json(msg='The destination directory (%s) is not writable by the current user. Error was: %s' % (os.path.dirname(dest), e))
@@ -2261,7 +2292,6 @@ class AnsibleModule(object):
e = get_exception()
self.fail_json(msg='Could not write data to file (%s) from (%s): %s' % (dest, src, e), exception=traceback.format_exc())
-
def _read_from_pipes(self, rpipes, rfds, file_descriptor):
data = b('')
if file_descriptor in rfds:
@@ -2359,7 +2389,7 @@ class AnsibleModule(object):
# expand things like $HOME and ~
if not shell:
- args = [ os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None ]
+ args = [os.path.expanduser(os.path.expandvars(x)) for x in args if x is not None]
rc = 0
msg = None
@@ -2387,9 +2417,9 @@ class AnsibleModule(object):
# Clean out python paths set by ansiballz
if 'PYTHONPATH' in os.environ:
pypaths = os.environ['PYTHONPATH'].split(':')
- pypaths = [x for x in pypaths \
- if not x.endswith('/ansible_modlib.zip') \
- and not x.endswith('/debug_dir')]
+ pypaths = [x for x in pypaths
+ if not x.endswith('/ansible_modlib.zip') and
+ not x.endswith('/debug_dir')]
os.environ['PYTHONPATH'] = ':'.join(pypaths)
if not os.environ['PYTHONPATH']:
del os.environ['PYTHONPATH']
@@ -2510,7 +2540,7 @@ class AnsibleModule(object):
self.fail_json(rc=e.errno, msg=to_native(e), cmd=clean_args)
except Exception:
e = get_exception()
- self.log("Error Executing CMD:%s Exception:%s" % (clean_args,to_native(traceback.format_exc())))
+ self.log("Error Executing CMD:%s Exception:%s" % (clean_args, to_native(traceback.format_exc())))
self.fail_json(rc=257, msg=to_native(e), exception=traceback.format_exc(), cmd=clean_args)
# Restore env settings
diff --git a/lib/ansible/module_utils/bigswitch_utils.py b/lib/ansible/module_utils/bigswitch_utils.py
index 24a126220a..299fcd3310 100644
--- a/lib/ansible/module_utils/bigswitch_utils.py
+++ b/lib/ansible/module_utils/bigswitch_utils.py
@@ -25,9 +25,12 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
import json
+
from ansible.module_utils.urls import fetch_url
+
class Response(object):
def __init__(self, resp, info):
diff --git a/lib/ansible/module_utils/connection.py b/lib/ansible/module_utils/connection.py
index 8848e67db3..785af210ba 100644
--- a/lib/ansible/module_utils/connection.py
+++ b/lib/ansible/module_utils/connection.py
@@ -25,27 +25,29 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
+
+import signal
import socket
import struct
-import signal
from ansible.module_utils.basic import get_exception
from ansible.module_utils._text import to_bytes, to_native
+
def send_data(s, data):
- packed_len = struct.pack('!Q',len(data))
+ packed_len = struct.pack('!Q', len(data))
return s.sendall(packed_len + data)
+
def recv_data(s):
- header_len = 8 # size of a packed unsigned long long
+ header_len = 8 # size of a packed unsigned long long
data = to_bytes("")
while len(data) < header_len:
d = s.recv(header_len - len(data))
if not d:
return None
data += d
- data_len = struct.unpack('!Q',data[:header_len])[0]
+ data_len = struct.unpack('!Q', data[:header_len])[0]
data = data[header_len:]
while len(data) < data_len:
d = s.recv(data_len - len(data))
@@ -54,6 +56,7 @@ def recv_data(s):
data += d
return data
+
def exec_command(module, command):
try:
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
diff --git a/lib/ansible/module_utils/database.py b/lib/ansible/module_utils/database.py
index 6170614e90..701c8bdc0b 100644
--- a/lib/ansible/module_utils/database.py
+++ b/lib/ansible/module_utils/database.py
@@ -26,9 +26,11 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
class SQLParseError(Exception):
pass
+
class UnclosedQuoteError(SQLParseError):
pass
@@ -38,6 +40,7 @@ class UnclosedQuoteError(SQLParseError):
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1)
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
+
def _find_end_quote(identifier, quote_char):
accumulate = 0
while True:
@@ -47,12 +50,12 @@ def _find_end_quote(identifier, quote_char):
raise UnclosedQuoteError
accumulate = accumulate + quote
try:
- next_char = identifier[quote+1]
+ next_char = identifier[quote + 1]
except IndexError:
return accumulate
if next_char == quote_char:
try:
- identifier = identifier[quote+2:]
+ identifier = identifier[quote + 2:]
accumulate = accumulate + 2
except IndexError:
raise UnclosedQuoteError
@@ -73,10 +76,10 @@ def _identifier_parse(identifier, quote_char):
already_quoted = False
else:
if end_quote < len(identifier) - 1:
- if identifier[end_quote+1] == '.':
+ if identifier[end_quote + 1] == '.':
dot = end_quote + 1
first_identifier = identifier[:dot]
- next_identifier = identifier[dot+1:]
+ next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
further_identifiers.insert(0, first_identifier)
else:
@@ -88,19 +91,19 @@ def _identifier_parse(identifier, quote_char):
try:
dot = identifier.index('.')
except ValueError:
- identifier = identifier.replace(quote_char, quote_char*2)
+ identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
if dot == 0 or dot >= len(identifier) - 1:
- identifier = identifier.replace(quote_char, quote_char*2)
+ identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
first_identifier = identifier[:dot]
- next_identifier = identifier[dot+1:]
+ next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
- first_identifier = first_identifier.replace(quote_char, quote_char*2)
+ first_identifier = first_identifier.replace(quote_char, quote_char * 2)
first_identifier = ''.join((quote_char, first_identifier, quote_char))
further_identifiers.insert(0, first_identifier)
@@ -113,6 +116,7 @@ def pg_quote_identifier(identifier, id_type):
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
return '.'.join(identifier_fragments)
+
def mysql_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='`')
if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
diff --git a/lib/ansible/module_utils/docker_common.py b/lib/ansible/module_utils/docker_common.py
index 0321920a92..fad8e5172c 100644
--- a/lib/ansible/module_utils/docker_common.py
+++ b/lib/ansible/module_utils/docker_common.py
@@ -217,7 +217,7 @@ class AnsibleDockerClient(Client):
docker_host=self._get_value('docker_host', params['docker_host'], 'DOCKER_HOST',
DEFAULT_DOCKER_HOST),
tls_hostname=self._get_value('tls_hostname', params['tls_hostname'],
- 'DOCKER_TLS_HOSTNAME', 'localhost'),
+ 'DOCKER_TLS_HOSTNAME', 'localhost'),
api_version=self._get_value('api_version', params['api_version'], 'DOCKER_API_VERSION',
'auto'),
cacert_path=self._get_value('cacert_path', params['cacert_path'], 'DOCKER_CERT_PATH', None),
@@ -244,7 +244,7 @@ class AnsibleDockerClient(Client):
def _get_tls_config(self, **kwargs):
self.log("get_tls_config:")
for key in kwargs:
- self.log(" %s: %s" % (key, kwargs[key]))
+ self.log(" %s: %s" % (key, kwargs[key]))
try:
tls_config = TLSConfig(**kwargs)
return tls_config
@@ -327,11 +327,10 @@ class AnsibleDockerClient(Client):
def _handle_ssl_error(self, error):
match = re.match(r"hostname.*doesn\'t match (\'.*\')", str(error))
if match:
- msg = "You asked for verification that Docker host name matches %s. The actual hostname is %s. " \
- "Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. " \
- "You may also use TLS without verification by setting the tls parameter to true." \
- % (self.auth_params['tls_hostname'], match.group(1), match.group(1))
- self.fail(msg)
+ self.fail("You asked for verification that Docker host name matches %s. The actual hostname is %s. "
+ "Most likely you need to set DOCKER_TLS_HOSTNAME or pass tls_hostname with a value of %s. "
+ "You may also use TLS without verification by setting the tls parameter to true."
+ % (self.auth_params['tls_hostname'], match.group(1), match.group(1)))
self.fail("SSL Exception: %s" % (error))
def get_container(self, name=None):
@@ -448,5 +447,3 @@ class AnsibleDockerClient(Client):
new_tag = self.find_image(name, tag)
return new_tag, old_tag == new_tag
-
-
diff --git a/lib/ansible/module_utils/ec2.py b/lib/ansible/module_utils/ec2.py
index 38c8814b5e..31cabdcfee 100644
--- a/lib/ansible/module_utils/ec2.py
+++ b/lib/ansible/module_utils/ec2.py
@@ -34,7 +34,7 @@ from ansible.module_utils.cloud import CloudRetry
try:
import boto
- import boto.ec2 #boto does weird import stuff
+ import boto.ec2 # boto does weird import stuff
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
@@ -54,6 +54,7 @@ except:
from ansible.module_utils.six import string_types, binary_type, text_type
+
class AnsibleAWSError(Exception):
pass
@@ -98,6 +99,7 @@ def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type '
'parameter in the boto3_conn function call')
+
def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **params):
profile = params.pop('profile_name', None)
@@ -120,6 +122,7 @@ def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **par
boto3_inventory_conn = _boto3_conn
+
def aws_common_argument_spec():
return dict(
ec2_url=dict(),
@@ -291,6 +294,7 @@ def ec2_connect(module):
return ec2
+
def paging(pause=0, marker_property='marker'):
""" Adds paging to boto retrieval functions that support a 'marker'
this is configurable as not all boto functions seem to use the
@@ -330,7 +334,6 @@ def camel_dict_to_snake_dict(camel_dict):
return all_cap_re.sub(r'\1_\2', s1).lower()
-
def value_is_list(camel_list):
checked_list = []
@@ -344,7 +347,6 @@ def camel_dict_to_snake_dict(camel_dict):
return checked_list
-
snake_dict = {}
for k, v in camel_dict.items():
if isinstance(v, dict):
@@ -403,7 +405,7 @@ def ansible_dict_to_boto3_filter_list(filters_dict):
"""
filters_list = []
- for k,v in filters_dict.items():
+ for k, v in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
@@ -470,7 +472,7 @@ def ansible_dict_to_boto3_tag_list(tags_dict, tag_name_key_name='Key', tag_value
"""
tags_list = []
- for k,v in tags_dict.items():
+ for k, v in tags_dict.items():
tags_list.append({tag_name_key_name: k, tag_value_key_name: v})
return tags_list
@@ -491,7 +493,6 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
else:
return sg.name
-
def get_sg_id(sg, boto3):
if boto3:
@@ -520,7 +521,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
all_sec_groups = ec2_connection.describe_security_groups()['SecurityGroups']
else:
if vpc_id:
- filters = { 'vpc-id': vpc_id }
+ filters = {'vpc-id': vpc_id}
all_sec_groups = ec2_connection.get_all_security_groups(filters=filters)
else:
all_sec_groups = ec2_connection.get_all_security_groups()
@@ -536,7 +537,7 @@ def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id
if len(still_unmatched) > 0:
raise ValueError("The following group names are not valid: %s" % ', '.join(still_unmatched))
- sec_group_id_list += [ str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list ]
+ sec_group_id_list += [str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list]
return sec_group_id_list
diff --git a/lib/ansible/module_utils/f5_utils.py b/lib/ansible/module_utils/f5_utils.py
index 2c70b66270..6557974709 100644
--- a/lib/ansible/module_utils/f5_utils.py
+++ b/lib/ansible/module_utils/f5_utils.py
@@ -56,7 +56,6 @@ def f5_argument_spec():
server_port=dict(
type='int',
default=443,
- required=False,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
@@ -80,7 +79,7 @@ def f5_parse_arguments(module):
import ssl
if not hasattr(ssl, 'SSLContext'):
module.fail_json(
- msg="bigsuds does not support verifying certificates with python < 2.7.9." \
+ msg="bigsuds does not support verifying certificates with python < 2.7.9."
"Either update python or set validate_certs=False on the task'")
return (
@@ -122,26 +121,22 @@ def bigip_api(bigip, user, password, validate_certs, port=443):
# Fully Qualified name (with the partition)
-def fq_name(partition,name):
+def fq_name(partition, name):
if name is not None and not name.startswith('/'):
- return '/%s/%s' % (partition,name)
+ return '/%s/%s' % (partition, name)
return name
# Fully Qualified name (with partition) for a list
-def fq_list_names(partition,list_names):
+def fq_list_names(partition, list_names):
if list_names is None:
return None
- return map(lambda x: fq_name(partition,x),list_names)
-
-
-
+ return map(lambda x: fq_name(partition, x), list_names)
# New style
from abc import ABCMeta, abstractproperty
-from ansible.module_utils.six import with_metaclass
from collections import defaultdict
try:
@@ -158,7 +153,7 @@ except ImportError:
from ansible.module_utils.basic import *
-from ansible.module_utils.six import iteritems
+from ansible.module_utils.six import iteritems, with_metaclass
F5_COMMON_ARGS = dict(
@@ -187,7 +182,6 @@ F5_COMMON_ARGS = dict(
server_port=dict(
type='int',
default=443,
- required=False,
fallback=(env_fallback, ['F5_SERVER_PORT'])
),
state=dict(
@@ -286,7 +280,7 @@ class AnsibleF5Parameters(object):
def __init__(self, params=None):
self._values = defaultdict(lambda: None)
if params:
- for k,v in iteritems(params):
+ for k, v in iteritems(params):
if self.api_map is not None and k in self.api_map:
dict_to_use = self.api_map
map_key = self.api_map[k]
diff --git a/lib/ansible/module_utils/fortios.py b/lib/ansible/module_utils/fortios.py
index 8ce32eadf0..661ec499ce 100644
--- a/lib/ansible/module_utils/fortios.py
+++ b/lib/ansible/module_utils/fortios.py
@@ -33,31 +33,31 @@ from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
-#check for pyFG lib
+# check for pyFG lib
try:
from pyFG import FortiOS, FortiConfig
from pyFG.exceptions import CommandExecutionException, FailedCommit
- HAS_PYFG=True
+ HAS_PYFG = True
except ImportError:
- HAS_PYFG=False
+ HAS_PYFG = False
fortios_argument_spec = dict(
- file_mode = dict(type='bool', default=False),
- config_file = dict(type='path'),
- host = dict( ),
- username = dict( ),
- password = dict(type='str', no_log=True ),
- timeout = dict(type='int', default=60),
- vdom = dict(type='str', default=None ),
- backup = dict(type='bool', default=False),
- backup_path = dict(type='path'),
- backup_filename = dict(type='str'),
+ file_mode=dict(type='bool', default=False),
+ config_file=dict(type='path'),
+ host=dict(),
+ username=dict(),
+ password=dict(type='str', no_log=True),
+ timeout=dict(type='int', default=60),
+ vdom=dict(type='str'),
+ backup=dict(type='bool', default=False),
+ backup_path=dict(type='path'),
+ backup_filename=dict(type='str'),
)
fortios_required_if = [
['file_mode', False, ['host', 'username', 'password']],
['file_mode', True, ['config_file']],
- ['backup', True , ['backup_path'] ],
+ ['backup', True, ['backup_path']],
]
fortios_mutually_exclusive = [
@@ -67,12 +67,12 @@ fortios_mutually_exclusive = [
]
fortios_error_codes = {
- '-3':"Object not found",
- '-61':"Command error"
+ '-3': "Object not found",
+ '-61': "Command error"
}
-def backup(module,running_config):
+def backup(module, running_config):
backup_path = module.params['backup_path']
backup_filename = module.params['backup_filename']
if not os.path.exists(backup_path):
@@ -91,8 +91,6 @@ def backup(module,running_config):
module.fail_json(msg="Can't create backup file {0} Permission denied ?".format(filename))
-
-
class AnsibleFortios(object):
def __init__(self, module):
if not HAS_PYFG:
@@ -103,7 +101,6 @@ class AnsibleFortios(object):
}
self.module = module
-
def _connect(self):
if self.module.params['file_mode']:
self.forti_device = FortiOS('')
@@ -122,11 +119,10 @@ class AnsibleFortios(object):
e = get_exception()
self.module.fail_json(msg='Error connecting device. %s' % e)
-
def load_config(self, path):
self.path = path
self._connect()
- #load in file_mode
+ # load in file_mode
if self.module.params['file_mode']:
try:
f = open(self.module.params['config_file'], 'r')
@@ -135,10 +131,10 @@ class AnsibleFortios(object):
except IOError:
e = get_exception()
self.module.fail_json(msg='Error reading configuration file. %s' % e)
- self.forti_device.load_config(config_text=running, path = path)
+ self.forti_device.load_config(config_text=running, path=path)
else:
- #get config
+ # get config
try:
self.forti_device.load_config(path=path)
except Exception:
@@ -146,22 +142,21 @@ class AnsibleFortios(object):
e = get_exception()
self.module.fail_json(msg='Error reading running config. %s' % e)
- #set configs in object
+ # set configs in object
self.result['running_config'] = self.forti_device.running_config.to_text()
self.candidate_config = self.forti_device.candidate_config
- #backup if needed
+ # backup if needed
if self.module.params['backup']:
backup(self.module, self.forti_device.running_config.to_text())
-
def apply_changes(self):
change_string = self.forti_device.compare_config()
if change_string:
self.result['change_string'] = change_string
self.result['changed'] = True
- #Commit if not check mode
+ # Commit if not check mode
if change_string and not self.module.check_mode:
if self.module.params['file_mode']:
try:
@@ -175,35 +170,31 @@ class AnsibleFortios(object):
try:
self.forti_device.commit()
except FailedCommit:
- #Something's wrong (rollback is automatic)
+ # Something's wrong (rollback is automatic)
self.forti_device.close()
e = get_exception()
error_list = self.get_error_infos(e)
- self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message )
+ self.module.fail_json(msg_error_list=error_list, msg="Unable to commit change, check your args, the error was %s" % e.message)
self.forti_device.close()
self.module.exit_json(**self.result)
-
def del_block(self, block_id):
self.forti_device.candidate_config[self.path].del_block(block_id)
-
def add_block(self, block_id, block):
self.forti_device.candidate_config[self.path][block_id] = block
-
def get_error_infos(self, cli_errors):
error_list = []
for errors in cli_errors.args:
for error in errors:
error_code = error[0]
error_string = error[1]
- error_type = fortios_error_codes.get(error_code,"unknown")
- error_list.append(dict(error_code=error_code, error_type=error_type, error_string= error_string))
+ error_type = fortios_error_codes.get(error_code, "unknown")
+ error_list.append(dict(error_code=error_code, error_type=error_type, error_string=error_string))
return error_list
def get_empty_configuration_block(self, block_name, block_type):
return FortiConfig(block_name, block_type)
-
diff --git a/lib/ansible/module_utils/gcdns.py b/lib/ansible/module_utils/gcdns.py
index c4b9cbd9db..fb5c74d1b4 100644
--- a/lib/ansible/module_utils/gcdns.py
+++ b/lib/ansible/module_utils/gcdns.py
@@ -27,9 +27,6 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
-from ansible.module_utils.gcp import gcp_connect
-from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
-
try:
from libcloud.dns.types import Provider
from libcloud.dns.providers import get_driver
@@ -37,9 +34,13 @@ try:
except ImportError:
HAS_LIBCLOUD_BASE = False
+from ansible.module_utils.gcp import gcp_connect
+from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
+
USER_AGENT_PRODUCT = "Ansible-gcdns"
USER_AGENT_VERSION = "v1"
+
def gcdns_connect(module, provider=None):
"""Return a GCP connection for Google Cloud DNS."""
if not HAS_LIBCLOUD_BASE:
@@ -48,6 +49,7 @@ def gcdns_connect(module, provider=None):
provider = provider or Provider.GOOGLE
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
+
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return gcp_error(error)
diff --git a/lib/ansible/module_utils/gce.py b/lib/ansible/module_utils/gce.py
index 1084b9dccd..c9d87542c3 100644
--- a/lib/ansible/module_utils/gce.py
+++ b/lib/ansible/module_utils/gce.py
@@ -25,10 +25,6 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-
-from ansible.module_utils.gcp import gcp_connect
-from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
try:
from libcloud.compute.types import Provider
@@ -37,9 +33,13 @@ try:
except ImportError:
HAS_LIBCLOUD_BASE = False
+from ansible.module_utils.gcp import gcp_connect
+from ansible.module_utils.gcp import unexpected_error_msg as gcp_error
+
USER_AGENT_PRODUCT = "Ansible-gce"
USER_AGENT_VERSION = "v1"
+
def gce_connect(module, provider=None):
"""Return a GCP connection for Google Compute Engine."""
if not HAS_LIBCLOUD_BASE:
@@ -48,6 +48,7 @@ def gce_connect(module, provider=None):
return gcp_connect(module, provider, get_driver, USER_AGENT_PRODUCT, USER_AGENT_VERSION)
+
def unexpected_error_msg(error):
"""Create an error string based on passed in error."""
return gcp_error(error)
diff --git a/lib/ansible/module_utils/gcp.py b/lib/ansible/module_utils/gcp.py
index 2ec38476bc..f18cad1440 100644
--- a/lib/ansible/module_utils/gcp.py
+++ b/lib/ansible/module_utils/gcp.py
@@ -502,8 +502,7 @@ class GCPUtils(object):
@staticmethod
def underscore_to_camel(txt):
- return txt.split('_')[0] + ''.join(x.capitalize()
- or '_' for x in txt.split('_')[1:])
+ return txt.split('_')[0] + ''.join(x.capitalize() or '_' for x in txt.split('_')[1:])
@staticmethod
def remove_non_gcp_params(params):
@@ -626,7 +625,7 @@ class GCPUtils(object):
# TODO(supertom): Isolate 'build-new-request' stuff.
resource_name_singular = GCPUtils.get_entity_name_from_resource_name(
resource_name)
- if op_resp['operationType'] == 'insert' or not 'entity_name' in parsed_url:
+ if op_resp['operationType'] == 'insert' or 'entity_name' not in parsed_url:
parsed_url['entity_name'] = GCPUtils.parse_gcp_url(op_resp['targetLink'])[
'entity_name']
args = {'project': project_id,
diff --git a/lib/ansible/module_utils/infinibox.py b/lib/ansible/module_utils/infinibox.py
index c16037e9a0..0b271211c1 100644
--- a/lib/ansible/module_utils/infinibox.py
+++ b/lib/ansible/module_utils/infinibox.py
@@ -82,9 +82,9 @@ def infinibox_argument_spec():
"""Return standard base dictionary used for the argument_spec argument in AnsibleModule"""
return dict(
- system = dict(required=True),
- user = dict(),
- password = dict(no_log=True),
+ system=dict(required=True),
+ user=dict(),
+ password=dict(no_log=True),
)
diff --git a/lib/ansible/module_utils/json_utils.py b/lib/ansible/module_utils/json_utils.py
index 480b258882..0b190ed26a 100644
--- a/lib/ansible/module_utils/json_utils.py
+++ b/lib/ansible/module_utils/json_utils.py
@@ -29,6 +29,7 @@ try:
except ImportError:
import simplejson as json
+
# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any
# changes are propagated there.
def _filter_non_json_lines(data):
diff --git a/lib/ansible/module_utils/junos.py b/lib/ansible/module_utils/junos.py
index 31bfe8a5d1..b45acfb640 100644
--- a/lib/ansible/module_utils/junos.py
+++ b/lib/ansible/module_utils/junos.py
@@ -47,12 +47,13 @@ ARGS_DEFAULT_VALUE = {
'timeout': 10
}
+
def check_args(module, warnings):
provider = module.params['provider'] or {}
for key in junos_argument_spec:
if key not in ('provider',) and module.params[key]:
warnings.append('argument %s has been deprecated and will be '
- 'removed in a future version' % key)
+ 'removed in a future version' % key)
# set argument's default value if not provided in input
# This is done to avoid unwanted argument deprecation warning
@@ -66,6 +67,7 @@ def check_args(module, warnings):
if provider.get(param):
module.no_log_values.update(return_values(provider[param]))
+
def _validate_rollback_id(module, value):
try:
if not 0 <= int(value) <= 49:
@@ -73,6 +75,7 @@ def _validate_rollback_id(module, value):
except ValueError:
module.fail_json(msg='rollback must be between 0 and 49')
+
def load_configuration(module, candidate=None, action='merge', rollback=None, format='xml'):
if all((candidate is None, rollback is None)):
@@ -117,6 +120,7 @@ def load_configuration(module, candidate=None, action='merge', rollback=None, fo
cfg.append(candidate)
return send_request(module, obj)
+
def get_configuration(module, compare=False, format='xml', rollback='0'):
if format not in CONFIG_FORMATS:
module.fail_json(msg='invalid config format specified')
@@ -127,6 +131,7 @@ def get_configuration(module, compare=False, format='xml', rollback='0'):
xattrs['rollback'] = str(rollback)
return send_request(module, Element('get-configuration', xattrs))
+
def commit_configuration(module, confirm=False, check=False, comment=None, confirm_timeout=None):
obj = Element('commit-configuration')
if confirm:
@@ -141,6 +146,7 @@ def commit_configuration(module, confirm=False, check=False, comment=None, confi
subele.text = str(confirm_timeout)
return send_request(module, obj)
+
def command(module, command, format='text', rpc_only=False):
xattrs = {'format': format}
if rpc_only:
@@ -148,8 +154,14 @@ def command(module, command, format='text', rpc_only=False):
xattrs['format'] = 'text'
return send_request(module, Element('command', xattrs, text=command))
-lock_configuration = lambda x: send_request(x, Element('lock-configuration'))
-unlock_configuration = lambda x: send_request(x, Element('unlock-configuration'))
+
+def lock_configuration(x):
+ return send_request(x, Element('lock-configuration'))
+
+
+def unlock_configuration(x):
+ return send_request(x, Element('unlock-configuration'))
+
@contextmanager
def locked_config(module):
@@ -159,6 +171,7 @@ def locked_config(module):
finally:
unlock_configuration(module)
+
def get_diff(module):
reply = get_configuration(module, compare=True, format='text')
@@ -166,6 +179,7 @@ def get_diff(module):
if output is not None:
return to_text(output.text, encoding='latin1').strip()
+
def load_config(module, candidate, warnings, action='merge', commit=False, format='xml',
comment=None, confirm=False, confirm_timeout=None):
@@ -192,5 +206,6 @@ def load_config(module, candidate, warnings, action='merge', commit=False, forma
return diff
+
def get_param(module, key):
return module.params[key] or module.params['provider'].get(key)
diff --git a/lib/ansible/module_utils/known_hosts.py b/lib/ansible/module_utils/known_hosts.py
index 1e5d27856b..bda5ac33e2 100644
--- a/lib/ansible/module_utils/known_hosts.py
+++ b/lib/ansible/module_utils/known_hosts.py
@@ -153,7 +153,7 @@ def not_in_host_file(self, host):
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
# this is a hashed known host entry
try:
- (kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
+ (kn_salt, kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|", 2)
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
hash.update(host)
if hash.digest() == kn_host.decode('base64'):
@@ -204,7 +204,7 @@ def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
if rc != 0 or not out:
msg = 'failed to retrieve hostkey'
if not out:
- msg += '. "%s" returned no matches.' % this_cmd
+ msg += '. "%s" returned no matches.' % this_cmd
else:
msg += ' using command "%s". [stdout]: %s' % (this_cmd, out)
diff --git a/lib/ansible/module_utils/lxd.py b/lib/ansible/module_utils/lxd.py
index e3eb078729..5b7c17dd42 100644
--- a/lib/ansible/module_utils/lxd.py
+++ b/lib/ansible/module_utils/lxd.py
@@ -38,12 +38,14 @@ except ImportError:
# httplib/http.client connection using unix domain socket
import socket
import ssl
+
try:
from httplib import HTTPConnection, HTTPSConnection
except ImportError:
# Python 3
from http.client import HTTPConnection, HTTPSConnection
+
class UnixHTTPConnection(HTTPConnection):
def __init__(self, path):
HTTPConnection.__init__(self, 'localhost')
@@ -54,11 +56,13 @@ class UnixHTTPConnection(HTTPConnection):
sock.connect(self.path)
self.sock = sock
+
class LXDClientException(Exception):
def __init__(self, msg, **kwargs):
self.msg = msg
self.kwargs = kwargs
+
class LXDClient(object):
def __init__(self, url, key_file=None, cert_file=None, debug=False):
"""LXD Client.
diff --git a/lib/ansible/module_utils/mysql.py b/lib/ansible/module_utils/mysql.py
index e6163c52fc..4255392af9 100644
--- a/lib/ansible/module_utils/mysql.py
+++ b/lib/ansible/module_utils/mysql.py
@@ -35,6 +35,7 @@ try:
except ImportError:
mysqldb_found = False
+
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
connect_timeout=30):
config = {}
diff --git a/lib/ansible/module_utils/netcfg.py b/lib/ansible/module_utils/netcfg.py
index 5d2a4a91d2..d4f4a5ff4a 100644
--- a/lib/ansible/module_utils/netcfg.py
+++ b/lib/ansible/module_utils/netcfg.py
@@ -92,13 +92,20 @@ class ConfigLine(object):
assert isinstance(obj, ConfigLine), 'child must be of type `ConfigLine`'
self._children.append(obj)
+
def ignore_line(text, tokens=None):
for item in (tokens or DEFAULT_COMMENT_TOKENS):
if text.startswith(item):
return True
-_obj_to_text = lambda x: [o.text for o in x]
-_obj_to_raw = lambda x: [o.raw for o in x]
+
+def _obj_to_text(x):
+ return [o.text for o in x]
+
+
+def _obj_to_raw(x):
+ return [o.raw for o in x]
+
def _obj_to_block(objects, visited=None):
items = list()
@@ -110,6 +117,7 @@ def _obj_to_block(objects, visited=None):
items.append(child)
return _obj_to_raw(items)
+
def dumps(objects, output='block', comments=False):
if output == 'block':
items = _obj_to_block(objects)
@@ -130,6 +138,7 @@ def dumps(objects, output='block', comments=False):
return '\n'.join(items)
+
class NetworkConfig(object):
def __init__(self, indent=1, contents=None):
@@ -328,7 +337,7 @@ class NetworkConfig(object):
offset = 0
obj = None
- ## global config command
+ # global config command
if not parents:
for line in lines:
item = ConfigLine(line)
diff --git a/lib/ansible/module_utils/netcli.py b/lib/ansible/module_utils/netcli.py
index 9e7ed8790b..daa5518fe6 100644
--- a/lib/ansible/module_utils/netcli.py
+++ b/lib/ansible/module_utils/netcli.py
@@ -24,17 +24,16 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
import re
-import time
import shlex
+import time
-from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE
-from ansible.module_utils.basic import get_exception
+from ansible.module_utils.basic import BOOLEANS_TRUE, BOOLEANS_FALSE, get_exception
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils.six.moves import zip
+
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
@@ -49,20 +48,23 @@ class FailedConditionsError(Exception):
super(FailedConditionsError, self).__init__(msg)
self.failed_conditions = failed_conditions
+
class FailedConditionalError(Exception):
def __init__(self, msg, failed_conditional):
super(FailedConditionalError, self).__init__(msg)
self.failed_conditional = failed_conditional
+
class AddCommandError(Exception):
def __init__(self, msg, command):
super(AddCommandError, self).__init__(msg)
self.command = command
+
class AddConditionError(Exception):
def __init__(self, msg, condition):
super(AddConditionError, self).__init__(msg)
- self.condition=condition
+ self.condition = condition
class Cli(object):
@@ -105,6 +107,7 @@ class Cli(object):
return responses
+
class Command(object):
def __init__(self, command, output=None, prompt=None, response=None,
@@ -122,6 +125,7 @@ class Command(object):
def __str__(self):
return self.command_string
+
class CommandRunner(object):
def __init__(self, module):
diff --git a/lib/ansible/module_utils/netconf.py b/lib/ansible/module_utils/netconf.py
index 580de33c2b..26ae60fed5 100644
--- a/lib/ansible/module_utils/netconf.py
+++ b/lib/ansible/module_utils/netconf.py
@@ -26,14 +26,13 @@
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from contextlib import contextmanager
-from xml.etree.ElementTree import Element, SubElement
-from xml.etree.ElementTree import tostring, fromstring
+from xml.etree.ElementTree import Element, SubElement, fromstring, tostring
from ansible.module_utils.connection import exec_command
-
NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"}
+
def send_request(module, obj, check_rc=True):
request = tostring(obj)
rc, out, err = exec_command(module, request)
@@ -58,6 +57,7 @@ def send_request(module, obj, check_rc=True):
return warnings
return fromstring(out)
+
def children(root, iterable):
for item in iterable:
try:
@@ -65,33 +65,40 @@ def children(root, iterable):
except NameError:
ele = SubElement(root, item)
+
def lock(module, target='candidate'):
obj = Element('lock')
children(obj, ('target', target))
return send_request(module, obj)
+
def unlock(module, target='candidate'):
obj = Element('unlock')
children(obj, ('target', target))
return send_request(module, obj)
+
def commit(module):
return send_request(module, Element('commit'))
+
def discard_changes(module):
return send_request(module, Element('discard-changes'))
+
def validate(module):
obj = Element('validate')
children(obj, ('source', 'candidate'))
return send_request(module, obj)
+
def get_config(module, source='running', filter=None):
obj = Element('get-config')
children(obj, ('source', source))
children(obj, ('filter', filter))
return send_request(module, obj)
+
@contextmanager
def locked_config(module):
try:
diff --git a/lib/ansible/module_utils/network.py b/lib/ansible/module_utils/network.py
index 2103540947..ab22beebcf 100644
--- a/lib/ansible/module_utils/network.py
+++ b/lib/ansible/module_utils/network.py
@@ -52,6 +52,7 @@ NET_CONNECTION_ARGS = dict()
NET_CONNECTIONS = dict()
+
def _transitional_argument_spec():
argument_spec = {}
for key, value in iteritems(NET_TRANSPORT_ARGS):
@@ -59,6 +60,7 @@ def _transitional_argument_spec():
argument_spec[key] = value
return argument_spec
+
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
@@ -75,12 +77,14 @@ class ModuleStub(object):
self.params[key] = value.get('default')
self.fail_json = fail_json
+
class NetworkError(Exception):
def __init__(self, msg, **kwargs):
super(NetworkError, self).__init__(msg)
self.kwargs = kwargs
+
class Config(object):
def __init__(self, connection):
@@ -185,6 +189,7 @@ class NetworkModule(AnsibleModule):
exc = get_exception()
self.fail_json(msg=to_native(exc))
+
def register_transport(transport, default=False):
def register(cls):
NET_CONNECTIONS[transport] = cls
@@ -193,6 +198,6 @@ def register_transport(transport, default=False):
return cls
return register
+
def add_argument(key, value):
NET_CONNECTION_ARGS[key] = value
-
diff --git a/lib/ansible/module_utils/network_common.py b/lib/ansible/module_utils/network_common.py
index add4ce18ea..c3d1a3dd76 100644
--- a/lib/ansible/module_utils/network_common.py
+++ b/lib/ansible/module_utils/network_common.py
@@ -24,9 +24,10 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-from ansible.module_utils.six import iteritems
+
from ansible.module_utils.basic import AnsibleFallbackNotFound
+from ansible.module_utils.six import iteritems
+
def to_list(val):
if isinstance(val, (list, tuple, set)):
@@ -36,6 +37,7 @@ def to_list(val):
else:
return list()
+
class ComplexDict(object):
"""Transforms a dict to with an argument spec
@@ -77,7 +79,7 @@ class ComplexDict(object):
if attr.get('read_from'):
spec = self._module.argument_spec.get(attr['read_from'])
if not spec:
- raise ValueError('argument_spec %s does not exist' % attr['read_from'])
+ raise ValueError('argument_spec %s does not exist' % attr['read_from'])
for key, value in iteritems(spec):
if key not in attr:
attr[key] = value
@@ -88,7 +90,6 @@ class ComplexDict(object):
self_has_key = True
attr['required'] = True
-
def _dict(self, value):
obj = {}
for name, attr in iteritems(self._attributes):
@@ -131,8 +132,7 @@ class ComplexDict(object):
if 'choices' in attr:
if value[name] not in attr['choices']:
- raise ValueError('%s must be one of %s, got %s' % \
- (name, ', '.join(attr['choices']), value[name]))
+ raise ValueError('%s must be one of %s, got %s' % (name, ', '.join(attr['choices']), value[name]))
if value[name] is not None:
value_type = attr.get('type', 'str')
@@ -141,6 +141,7 @@ class ComplexDict(object):
return value
+
class ComplexList(ComplexDict):
"""Extends ```ComplexDict``` to handle a list of dicts """
@@ -148,4 +149,3 @@ class ComplexList(ComplexDict):
if not isinstance(values, (list, tuple)):
raise TypeError('value must be an ordered iterable')
return [(super(ComplexList, self).__call__(v)) for v in values]
-
diff --git a/lib/ansible/module_utils/openstack.py b/lib/ansible/module_utils/openstack.py
index 4d86f04aee..afbd8857eb 100644
--- a/lib/ansible/module_utils/openstack.py
+++ b/lib/ansible/module_utils/openstack.py
@@ -30,6 +30,7 @@ import os
from ansible.module_utils.six import iteritems
+
def openstack_argument_spec():
# DEPRECATED: This argument spec is only used for the deprecated old
# OpenStack modules. It turns out that modern OpenStack auth is WAY
@@ -37,17 +38,17 @@ def openstack_argument_spec():
# Consume standard OpenStack environment variables.
# This is mainly only useful for ad-hoc command line operation as
# in playbooks one would assume variables would be used appropriately
- OS_AUTH_URL=os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
- OS_PASSWORD=os.environ.get('OS_PASSWORD', None)
- OS_REGION_NAME=os.environ.get('OS_REGION_NAME', None)
- OS_USERNAME=os.environ.get('OS_USERNAME', 'admin')
- OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME', OS_USERNAME)
+ OS_AUTH_URL = os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
+ OS_PASSWORD = os.environ.get('OS_PASSWORD', None)
+ OS_REGION_NAME = os.environ.get('OS_REGION_NAME', None)
+ OS_USERNAME = os.environ.get('OS_USERNAME', 'admin')
+ OS_TENANT_NAME = os.environ.get('OS_TENANT_NAME', OS_USERNAME)
spec = dict(
- login_username = dict(default=OS_USERNAME),
- auth_url = dict(default=OS_AUTH_URL),
- region_name = dict(default=OS_REGION_NAME),
- availability_zone = dict(default=None),
+ login_username=dict(default=OS_USERNAME),
+ auth_url=dict(default=OS_AUTH_URL),
+ region_name=dict(default=OS_REGION_NAME),
+ availability_zone=dict(),
)
if OS_PASSWORD:
spec['login_password'] = dict(default=OS_PASSWORD)
@@ -59,6 +60,7 @@ def openstack_argument_spec():
spec['login_tenant_name'] = dict(required=True)
return spec
+
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret = []
@@ -71,6 +73,7 @@ def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret.append(interface_spec['addr'])
return ret
+
def openstack_full_argument_spec(**kwargs):
spec = dict(
cloud=dict(default=None),
diff --git a/lib/ansible/module_utils/openswitch.py b/lib/ansible/module_utils/openswitch.py
index 60a6a33e02..5f1925fbab 100644
--- a/lib/ansible/module_utils/openswitch.py
+++ b/lib/ansible/module_utils/openswitch.py
@@ -37,6 +37,7 @@ from ansible.module_utils.urls import fetch_url, url_argument_spec
add_argument('use_ssl', dict(default=True, type='bool'))
add_argument('validate_certs', dict(default=True, type='bool'))
+
def get_opsidl():
extschema = restparser.parseSchema(settings.get('ext_schema'))
ovsschema = settings.get('ovs_schema')
@@ -129,7 +130,7 @@ class Rest(object):
def authorize(self, params, **kwargs):
raise NotImplementedError
- ### REST methods ###
+ # REST methods
def _url_builder(self, path):
if path[0] == '/':
@@ -160,12 +161,12 @@ class Rest(object):
def delete(self, path, data=None, headers=None):
return self.request('DELETE', path, data, headers)
- ### Command methods ###
+ # Command methods
def run_commands(self, commands):
raise NotImplementedError
- ### Config methods ###
+ # Config methods
def configure(self, commands):
path = '/system/full-configuration'
@@ -212,7 +213,7 @@ class Cli(CliBase):
NET_PASSWD_RE = re.compile(r"[\r\n]?password: $", re.I)
- ### Config methods ###
+ # Config methods
def configure(self, commands, **kwargs):
cmds = ['configure terminal']
diff --git a/lib/ansible/module_utils/ordnance.py b/lib/ansible/module_utils/ordnance.py
index d87176c51c..997da3dc68 100644
--- a/lib/ansible/module_utils/ordnance.py
+++ b/lib/ansible/module_utils/ordnance.py
@@ -1,5 +1,6 @@
_DEVICE_CONFIGS = {}
+
def get_config(module, flags=[]):
cmd = 'show running-config '
cmd += ' '.join(flags)
diff --git a/lib/ansible/module_utils/ovirt.py b/lib/ansible/module_utils/ovirt.py
index 2dfd65926e..571adb595c 100644
--- a/lib/ansible/module_utils/ovirt.py
+++ b/lib/ansible/module_utils/ovirt.py
@@ -504,7 +504,6 @@ class BaseModule(object):
after[k] = update[k]
return after
-
def create(
self,
entity=None,
@@ -579,9 +578,14 @@ class BaseModule(object):
# Wait for the entity to be created and to be in the defined state:
entity_service = self._service.service(entity.id)
- state_condition = lambda entity: entity
+ def state_condition(entity):
+ return entity
+
if result_state:
- state_condition = lambda entity: entity and entity.status == result_state
+
+ def state_condition(entity):
+ return entity and entity.status == result_state
+
wait(
service=entity_service,
condition=state_condition,
diff --git a/lib/ansible/module_utils/postgres.py b/lib/ansible/module_utils/postgres.py
index 951b4fec5e..457d160bd2 100644
--- a/lib/ansible/module_utils/postgres.py
+++ b/lib/ansible/module_utils/postgres.py
@@ -27,24 +27,21 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# standard ansible imports
-from ansible.module_utils.basic import get_exception
-
-# standard PG imports
-HAS_PSYCOPG2 = False
try:
import psycopg2
import psycopg2.extras
-except ImportError:
- pass
-else:
HAS_PSYCOPG2 = True
+except ImportError:
+ HAS_PSYCOPG2 = False
+from ansible.module_utils.basic import get_exception
from ansible.module_utils.six import iteritems
+
class LibraryError(Exception):
pass
+
def ensure_libs(sslrootcert=None):
if not HAS_PSYCOPG2:
raise LibraryError('psycopg2 is not installed. we need psycopg2.')
@@ -54,14 +51,14 @@ def ensure_libs(sslrootcert=None):
# no problems
return None
+
def postgres_common_argument_spec():
return dict(
- login_user = dict(default='postgres'),
- login_password = dict(default='', no_log=True),
- login_host = dict(default=''),
- login_unix_socket = dict(default=''),
- port = dict(type='int', default=5432),
- ssl_mode = dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
- ssl_rootcert = dict(default=None),
+ login_user=dict(default='postgres'),
+ login_password=dict(default='', no_log=True),
+ login_host=dict(default=''),
+ login_unix_socket=dict(default=''),
+ port=dict(type='int', default=5432),
+ ssl_mode=dict(default='prefer', choices=['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full']),
+ ssl_rootcert=dict(),
)
-
diff --git a/lib/ansible/module_utils/pycompat24.py b/lib/ansible/module_utils/pycompat24.py
index 765a3ab3b9..6a8ad52ff3 100644
--- a/lib/ansible/module_utils/pycompat24.py
+++ b/lib/ansible/module_utils/pycompat24.py
@@ -25,10 +25,10 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
import sys
+
def get_exception():
"""Get the current exception.
diff --git a/lib/ansible/module_utils/redhat.py b/lib/ansible/module_utils/redhat.py
index 9a1521ee47..cc79f0f858 100644
--- a/lib/ansible/module_utils/redhat.py
+++ b/lib/ansible/module_utils/redhat.py
@@ -125,9 +125,9 @@ class Rhsm(RegistrationBase):
# Pass supplied **kwargs as parameters to subscription-manager. Ignore
# non-configuration parameters and replace '_' with '.'. For example,
# 'server_hostname' becomes '--system.hostname'.
- for k,v in kwargs.items():
+ for k, v in kwargs.items():
if re.search(r'^(system|rhsm)_', k):
- args.append('--%s=%s' % (k.replace('_','.'), v))
+ args.append('--%s=%s' % (k.replace('_', '.'), v))
self.module.run_command(args, check_rc=True)
@@ -213,7 +213,7 @@ class RhsmPool(object):
def __init__(self, module, **kwargs):
self.module = module
- for k,v in kwargs.items():
+ for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self):
@@ -255,7 +255,7 @@ class RhsmPools(object):
continue
# If a colon ':' is found, parse
elif ':' in line:
- (key, value) = line.split(':',1)
+ (key, value) = line.split(':', 1)
key = key.strip().replace(" ", "") # To unify
value = value.strip()
if key in ['ProductName', 'SubscriptionName']:
@@ -265,7 +265,7 @@ class RhsmPools(object):
# Associate value with most recently recorded product
products[-1].__setattr__(key, value)
# FIXME - log some warning?
- #else:
+ # else:
# warnings.warn("Unhandled subscription key/value: %s/%s" % (key,value))
return products
@@ -277,4 +277,3 @@ class RhsmPools(object):
for product in self.products:
if r.search(product._name):
yield product
-
diff --git a/lib/ansible/module_utils/service.py b/lib/ansible/module_utils/service.py
index dcf58e485b..54f07d29d6 100644
--- a/lib/ansible/module_utils/service.py
+++ b/lib/ansible/module_utils/service.py
@@ -25,20 +25,20 @@
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#
-import os
-import shlex
-import subprocess
import glob
-import select
+import os
import pickle
import platform
+import select
+import shlex
+import subprocess
import traceback
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
+
def sysv_is_enabled(name):
'''
This function will check if the service name supplied
@@ -48,6 +48,7 @@ def sysv_is_enabled(name):
'''
return bool(glob.glob('/etc/rc?.d/S??%s' % name))
+
def get_sysv_script(name):
'''
This function will return the expected path for an init script
@@ -62,6 +63,7 @@ def get_sysv_script(name):
return result
+
def sysv_exists(name):
'''
This function will return True or False depending on
@@ -71,6 +73,7 @@ def sysv_exists(name):
'''
return os.path.exists(get_sysv_script(name))
+
def fail_if_missing(module, found, service, msg=''):
'''
This function will return an error or exit gracefully depending on check mode status
@@ -87,6 +90,7 @@ def fail_if_missing(module, found, service, msg=''):
else:
module.fail_json(msg='Could not find the requested service %s: %s' % (service, msg))
+
def daemonize(module, cmd):
'''
Execute a command while detaching as a deamon, returns rc, stdout, and stderr.
@@ -100,10 +104,10 @@ def daemonize(module, cmd):
'''
# init some vars
- chunk = 4096 #FIXME: pass in as arg?
+ chunk = 4096 # FIXME: pass in as arg?
errors = 'surrogate_or_strict'
- #start it!
+ # start it!
try:
pipe = os.pipe()
pid = os.fork()
@@ -162,7 +166,7 @@ def daemonize(module, cmd):
fds = [p.stdout, p.stderr]
# loop reading output till its done
- output = { p.stdout: b(""), p.sterr: b("") }
+ output = {p.stdout: b(""), p.sterr: b("")}
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if (rfd + wfd + efd) or p.poll():
@@ -207,6 +211,7 @@ def daemonize(module, cmd):
# py2 and py3)
return pickle.loads(to_bytes(return_data, errors=errors))
+
def check_ps(module, pattern):
# Set ps flags
diff --git a/lib/ansible/module_utils/shell.py b/lib/ansible/module_utils/shell.py
index 094fe9e118..bb8a14bc11 100644
--- a/lib/ansible/module_utils/shell.py
+++ b/lib/ansible/module_utils/shell.py
@@ -281,5 +281,8 @@ class CliBase(object):
exc = get_exception()
raise NetworkError(to_native(exc))
- run_commands = lambda self, x: self.execute(to_list(x))
- exec_command = lambda self, x: self.shell.send(self.to_command(x))
+ def run_commands(self, x):
+ return self.execute(to_list(x))
+
+ def exec_command(self, x):
+ return self.shell.send(self.to_command(x))
diff --git a/lib/ansible/module_utils/splitter.py b/lib/ansible/module_utils/splitter.py
index 9c8ca69112..09605eda05 100644
--- a/lib/ansible/module_utils/splitter.py
+++ b/lib/ansible/module_utils/splitter.py
@@ -26,6 +26,7 @@
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
@@ -36,7 +37,7 @@ def _get_quote_state(token, quote_char):
prev_char = None
for idx, cur_char in enumerate(token):
if idx > 0:
- prev_char = token[idx-1]
+ prev_char = token[idx - 1]
if cur_char in '"\'' and prev_char != '\\':
if quote_char:
if cur_char == quote_char:
@@ -45,13 +46,14 @@ def _get_quote_state(token, quote_char):
quote_char = cur_char
return quote_char
+
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
given opening/closing type and adjusts the current depth for that
block based on the difference
'''
- num_open = token.count(open_token)
+ num_open = token.count(open_token)
num_close = token.count(close_token)
if num_open != num_close:
cur_depth += (num_open - num_close)
@@ -59,6 +61,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
cur_depth = 0
return cur_depth
+
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
@@ -99,13 +102,13 @@ def split_args(args):
quote_char = None
inside_quotes = False
- print_depth = 0 # used to count nested jinja2 {{ }} blocks
- block_depth = 0 # used to count nested jinja2 {% %} blocks
- comment_depth = 0 # used to count nested jinja2 {# #} blocks
+ print_depth = 0 # used to count nested jinja2 {{ }} blocks
+ block_depth = 0 # used to count nested jinja2 {% %} blocks
+ comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
- for itemidx,item in enumerate(items):
+ for itemidx, item in enumerate(items):
# we split on spaces and newlines separately, so that we
# can tell which character we split on for reassembly
@@ -113,7 +116,7 @@ def split_args(args):
tokens = item.strip().split(' ')
line_continuation = False
- for idx,token in enumerate(tokens):
+ for idx, token in enumerate(tokens):
# if we hit a line continuation character, but
# we're not inside quotes, ignore it and continue
@@ -201,12 +204,13 @@ def split_args(args):
return params
+
def is_quoted(data):
return len(data) > 0 and (data[0] == '"' and data[-1] == '"' or data[0] == "'" and data[-1] == "'")
+
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data
-
diff --git a/lib/ansible/module_utils/univention_umc.py b/lib/ansible/module_utils/univention_umc.py
index e110c0746e..400e20bcc8 100644
--- a/lib/ansible/module_utils/univention_umc.py
+++ b/lib/ansible/module_utils/univention_umc.py
@@ -105,20 +105,20 @@ def uldap():
def construct():
try:
secret_file = open('/etc/ldap.secret', 'r')
- bind_dn = 'cn=admin,{}'.format(base_dn())
+ bind_dn = 'cn=admin,{}'.format(base_dn())
except IOError: # pragma: no cover
secret_file = open('/etc/machine.secret', 'r')
- bind_dn = config_registry()["ldap/hostdn"]
- pwd_line = secret_file.readline()
- pwd = re.sub('\n', '', pwd_line)
+ bind_dn = config_registry()["ldap/hostdn"]
+ pwd_line = secret_file.readline()
+ pwd = re.sub('\n', '', pwd_line)
import univention.admin.uldap
return univention.admin.uldap.access(
- host = config_registry()['ldap/master'],
- base = base_dn(),
- binddn = bind_dn,
- bindpw = pwd,
- start_tls = 1
+ host=config_registry()['ldap/master'],
+ base=base_dn(),
+ binddn=bind_dn,
+ bindpw=pwd,
+ start_tls=1,
)
return _singleton('uldap', construct)
diff --git a/lib/ansible/module_utils/urls.py b/lib/ansible/module_utils/urls.py
index b7ff6076f1..74cb1a1b8c 100644
--- a/lib/ansible/module_utils/urls.py
+++ b/lib/ansible/module_utils/urls.py
@@ -206,17 +206,14 @@ except ImportError:
HAS_MATCH_HOSTNAME = False
if not HAS_MATCH_HOSTNAME:
- ###
- ### The following block of code is under the terms and conditions of the
- ### Python Software Foundation License
- ###
+ # The following block of code is under the terms and conditions of the
+ # Python Software Foundation License
"""The match_hostname() function from Python 3.4, essential when using SSL."""
class CertificateError(ValueError):
pass
-
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
@@ -269,7 +266,6 @@ if not HAS_MATCH_HOSTNAME:
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
-
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
@@ -299,20 +295,13 @@ if not HAS_MATCH_HOSTNAME:
return
dnsnames.append(value)
if len(dnsnames) > 1:
- raise CertificateError("hostname %r "
- "doesn't match either of %s"
- % (hostname, ', '.join(map(repr, dnsnames))))
+ raise CertificateError("hostname %r " "doesn't match either of %s" % (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
- raise CertificateError("hostname %r "
- "doesn't match %r"
- % (hostname, dnsnames[0]))
+ raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
- raise CertificateError("no appropriate commonName or "
- "subjectAltName fields were found")
+ raise CertificateError("no appropriate commonName or subjectAltName fields were found")
- ###
- ### End of Python Software Foundation Licensed code
- ###
+ # End of Python Software Foundation Licensed code
HAS_MATCH_HOSTNAME = True
@@ -399,7 +388,7 @@ if hasattr(httplib, 'HTTPSConnection') and hasattr(urllib_request, 'HTTPSHandler
self.sock = self.context.wrap_socket(sock, server_hostname=server_hostname)
elif HAS_URLLIB3_SSL_WRAP_SOCKET:
self.sock = ssl_wrap_socket(sock, keyfile=self.key_file, cert_reqs=ssl.CERT_NONE, certfile=self.cert_file, ssl_version=PROTOCOL,
- server_hostname=server_hostname)
+ server_hostname=server_hostname)
else:
self.sock = ssl.wrap_socket(sock, keyfile=self.key_file, certfile=self.cert_file, ssl_version=PROTOCOL)
@@ -448,24 +437,24 @@ def generic_urlparse(parts):
if hasattr(parts, 'netloc'):
# urlparse is newer, just read the fields straight
# from the parts object
- generic_parts['scheme'] = parts.scheme
- generic_parts['netloc'] = parts.netloc
- generic_parts['path'] = parts.path
- generic_parts['params'] = parts.params
- generic_parts['query'] = parts.query
+ generic_parts['scheme'] = parts.scheme
+ generic_parts['netloc'] = parts.netloc
+ generic_parts['path'] = parts.path
+ generic_parts['params'] = parts.params
+ generic_parts['query'] = parts.query
generic_parts['fragment'] = parts.fragment
generic_parts['username'] = parts.username
generic_parts['password'] = parts.password
generic_parts['hostname'] = parts.hostname
- generic_parts['port'] = parts.port
+ generic_parts['port'] = parts.port
else:
# we have to use indexes, and then parse out
# the other parts not supported by indexing
- generic_parts['scheme'] = parts[0]
- generic_parts['netloc'] = parts[1]
- generic_parts['path'] = parts[2]
- generic_parts['params'] = parts[3]
- generic_parts['query'] = parts[4]
+ generic_parts['scheme'] = parts[0]
+ generic_parts['netloc'] = parts[1]
+ generic_parts['path'] = parts[2]
+ generic_parts['params'] = parts[3]
+ generic_parts['query'] = parts[4]
generic_parts['fragment'] = parts[5]
# get the username, password, etc.
try:
@@ -488,12 +477,12 @@ def generic_urlparse(parts):
generic_parts['username'] = username
generic_parts['password'] = password
generic_parts['hostname'] = hostname
- generic_parts['port'] = port
+ generic_parts['port'] = port
except:
generic_parts['username'] = None
generic_parts['password'] = None
generic_parts['hostname'] = parts[1]
- generic_parts['port'] = None
+ generic_parts['port'] = None
return generic_parts
@@ -551,9 +540,8 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
if do_redirect:
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
- newheaders = dict((k,v) for k,v in req.headers.items()
- if k.lower() not in ("content-length", "content-type")
- )
+ newheaders = dict((k, v) for k, v in req.headers.items()
+ if k.lower() not in ("content-length", "content-type"))
try:
# Python 2-3.3
origin_req_host = req.get_origin_req_host()
@@ -561,9 +549,9 @@ def RedirectHandlerFactory(follow_redirects=None, validate_certs=True):
# Python 3.4+
origin_req_host = req.origin_req_host
return urllib_request.Request(newurl,
- headers=newheaders,
- origin_req_host=origin_req_host,
- unverifiable=True)
+ headers=newheaders,
+ origin_req_host=origin_req_host,
+ unverifiable=True)
else:
raise urllib_error.HTTPError(req.get_full_url(), code, msg, hdrs, fp)
@@ -660,7 +648,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
dir_contents = os.listdir(path)
for f in dir_contents:
full_path = os.path.join(path, f)
- if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt','.pem'):
+ if os.path.isfile(full_path) and os.path.splitext(f)[1] in ('.crt', '.pem'):
try:
cert_file = open(full_path, 'rb')
cert = cert_file.read()
@@ -738,7 +726,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
if proxy_parts.get('scheme') == 'http':
s.sendall(self.CONNECT_COMMAND % (self.hostname, self.port))
if proxy_parts.get('username'):
- credentials = "%s:%s" % (proxy_parts.get('username',''), proxy_parts.get('password',''))
+ credentials = "%s:%s" % (proxy_parts.get('username', ''), proxy_parts.get('password', ''))
s.sendall(b('Proxy-Authorization: Basic %s\r\n') % base64.b64encode(to_bytes(credentials, errors='surrogate_or_strict')).strip())
s.sendall(b('\r\n'))
connect_result = b("")
@@ -767,7 +755,7 @@ class SSLValidationHandler(urllib_request.BaseHandler):
ssl_s = ssl.wrap_socket(s, ca_certs=tmp_ca_cert_path, cert_reqs=ssl.CERT_REQUIRED, ssl_version=PROTOCOL)
match_hostname(ssl_s.getpeercert(), self.hostname)
# close the ssl connection
- #ssl_s.unwrap()
+ # ssl_s.unwrap()
s.close()
except (ssl.SSLError, CertificateError):
e = get_exception()
@@ -923,7 +911,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
data = to_bytes(data, nonstring='passthru')
if method:
- if method.upper() not in ('OPTIONS','GET','HEAD','POST','PUT','DELETE','TRACE','CONNECT','PATCH'):
+ if method.upper() not in ('OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT', 'PATCH'):
raise ConnectionError('invalid HTTP request method; %s' % method.upper())
request = RequestWithMethod(url, method.upper(), data)
else:
@@ -951,7 +939,7 @@ def open_url(url, data=None, headers=None, method=None, use_proxy=True,
request.add_header(header, headers[header])
urlopen_args = [request, None]
- if sys.version_info >= (2,6,0):
+ if sys.version_info >= (2, 6, 0):
# urlopen in python prior to 2.6.0 did not
# have a timeout parameter
urlopen_args.append(timeout)
diff --git a/lib/ansible/module_utils/vca.py b/lib/ansible/module_utils/vca.py
index 4adc3fd07b..7750a51090 100644
--- a/lib/ansible/module_utils/vca.py
+++ b/lib/ansible/module_utils/vca.py
@@ -15,6 +15,7 @@
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
+
try:
from pyvcloud.vcloudair import VCA
HAS_PYVCLOUD = True
@@ -29,16 +30,18 @@ LOGIN_HOST = {'vca': 'vca.vmware.com', 'vchs': 'vchs.vmware.com'}
DEFAULT_SERVICE_TYPE = 'vca'
DEFAULT_VERSION = '5.7'
+
class VcaError(Exception):
def __init__(self, msg, **kwargs):
self.kwargs = kwargs
super(VcaError, self).__init__(msg)
+
def vca_argument_spec():
return dict(
username=dict(type='str', aliases=['user'], required=True),
- password=dict(type='str', aliases=['pass','passwd'], required=True, no_log=True),
+ password=dict(type='str', aliases=['pass', 'passwd'], required=True, no_log=True),
org=dict(),
service_id=dict(),
instance_id=dict(),
@@ -50,6 +53,7 @@ def vca_argument_spec():
verify_certs=dict(type='bool', default=True)
)
+
class VcaAnsibleModule(AnsibleModule):
def __init__(self, *args, **kwargs):
@@ -193,7 +197,6 @@ class VcaAnsibleModule(AnsibleModule):
self.exit_json(**kwargs)
-
# -------------------------------------------------------------
# 9/18/2015 @privateip
# All of the functions below here were migrated from the original
@@ -206,6 +209,7 @@ VCA_REQ_ARGS = ['instance_id', 'vdc_name']
VCHS_REQ_ARGS = ['service_id']
VCD_REQ_ARGS = []
+
def _validate_module(module):
if not HAS_PYVCLOUD:
module.fail_json(msg="python module pyvcloud is needed for this module")
@@ -216,19 +220,19 @@ def _validate_module(module):
for arg in VCA_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
- "is vca" % arg)
+ "is vca" % arg)
if service_type == 'vchs':
for arg in VCHS_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
- "is vchs" % arg)
+ "is vchs" % arg)
if service_type == 'vcd':
for arg in VCD_REQ_ARGS:
if module.params.get(arg) is None:
module.fail_json(msg="argument %s is mandatory when service type "
- "is vcd" % arg)
+ "is vcd" % arg)
def serialize_instances(instance_list):
@@ -237,6 +241,7 @@ def serialize_instances(instance_list):
instances.append(dict(apiUrl=i['apiUrl'], instance_id=i['id']))
return instances
+
def _vca_login(vca, password, instance):
if not vca.login(password=password):
raise VcaError("Login Failed: Please check username or password",
@@ -245,10 +250,11 @@ def _vca_login(vca, password, instance):
if not vca.login_to_instance_sso(instance=instance):
s_json = serialize_instances(vca.instances)
raise VcaError("Login to Instance failed: Seems like instance_id provided "
- "is wrong .. Please check", valid_instances=s_json)
+ "is wrong .. Please check", valid_instances=s_json)
return vca
+
def _vchs_login(vca, password, service, org):
if not vca.login(password=password):
raise VcaError("Login Failed: Please check username or password",
@@ -256,7 +262,7 @@ def _vchs_login(vca, password, service, org):
if not vca.login_to_org(service, org):
raise VcaError("Failed to login to org, Please check the orgname",
- error=vca.response.content)
+ error=vca.response.content)
def _vcd_login(vca, password, org):
@@ -272,6 +278,7 @@ def _vcd_login(vca, password, org):
if not vca.login(token=vca.token, org=org, org_url=vca.vcloud_session.org_url):
raise VcaError("Failed to login to org", error=vca.response.content)
+
def vca_login(module):
service_type = module.params.get('service_type')
username = module.params.get('username')
@@ -323,8 +330,3 @@ def vca_login(module):
module.fail_json(msg=e.message, **e.kwargs)
return vca
-
-
-
-
-
diff --git a/lib/ansible/playbook/__init__.py b/lib/ansible/playbook/__init__.py
index bbf1cdf3bb..c47025d5eb 100644
--- a/lib/ansible/playbook/__init__.py
+++ b/lib/ansible/playbook/__init__.py
@@ -45,7 +45,7 @@ class Playbook:
# be either a play or an include statement
self._entries = []
self._basedir = to_text(os.getcwd(), errors='surrogate_or_strict')
- self._loader = loader
+ self._loader = loader
self._file_name = None
@staticmethod
diff --git a/lib/ansible/playbook/attribute.py b/lib/ansible/playbook/attribute.py
index a338dc834f..5eab1753ff 100644
--- a/lib/ansible/playbook/attribute.py
+++ b/lib/ansible/playbook/attribute.py
@@ -21,6 +21,7 @@ __metaclass__ = type
from copy import deepcopy
+
class Attribute:
def __init__(self, isa=None, private=False, default=None, required=False, listof=None, priority=0, class_type=None, always_post_validate=False,
diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py
index eaddab0076..cdafd5f25d 100644
--- a/lib/ansible/playbook/base.py
+++ b/lib/ansible/playbook/base.py
@@ -149,21 +149,21 @@ class BaseMeta(type):
class Base(with_metaclass(BaseMeta, object)):
# connection/transport
- _connection = FieldAttribute(isa='string')
- _port = FieldAttribute(isa='int')
- _remote_user = FieldAttribute(isa='string')
+ _connection = FieldAttribute(isa='string')
+ _port = FieldAttribute(isa='int')
+ _remote_user = FieldAttribute(isa='string')
# variables
- _vars = FieldAttribute(isa='dict', priority=100, inherit=False)
+ _vars = FieldAttribute(isa='dict', priority=100, inherit=False)
# flags and misc. settings
- _environment = FieldAttribute(isa='list')
- _no_log = FieldAttribute(isa='bool')
- _always_run = FieldAttribute(isa='bool')
- _run_once = FieldAttribute(isa='bool')
- _ignore_errors = FieldAttribute(isa='bool')
- _check_mode = FieldAttribute(isa='bool')
- _any_errors_fatal = FieldAttribute(isa='bool', default=False, always_post_validate=True)
+ _environment = FieldAttribute(isa='list')
+ _no_log = FieldAttribute(isa='bool')
+ _always_run = FieldAttribute(isa='bool')
+ _run_once = FieldAttribute(isa='bool')
+ _ignore_errors = FieldAttribute(isa='bool')
+ _check_mode = FieldAttribute(isa='bool')
+ _any_errors_fatal = FieldAttribute(isa='bool', default=False, always_post_validate=True)
# param names which have been deprecated/removed
DEPRECATED_ATTRIBUTES = [
@@ -180,7 +180,7 @@ class Base(with_metaclass(BaseMeta, object)):
# other internal params
self._validated = False
- self._squashed = False
+ self._squashed = False
self._finalized = False
# every object gets a random uuid:
@@ -200,13 +200,13 @@ class Base(with_metaclass(BaseMeta, object)):
print("DUMPING OBJECT ------------------------------------------------------")
print("%s- %s (%s, id=%s)" % (" " * depth, self.__class__.__name__, self, id(self)))
if hasattr(self, '_parent') and self._parent:
- self._parent.dump_me(depth+2)
+ self._parent.dump_me(depth + 2)
dep_chain = self._parent.get_dep_chain()
if dep_chain:
for dep in dep_chain:
- dep.dump_me(depth+2)
+ dep.dump_me(depth + 2)
if hasattr(self, '_play') and self._play:
- self._play.dump_me(depth+2)
+ self._play.dump_me(depth + 2)
def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms '''
@@ -405,12 +405,12 @@ class Base(with_metaclass(BaseMeta, object)):
)
value = value.split(',')
else:
- value = [ value ]
+ value = [value]
if attribute.listof is not None:
for item in value:
if not isinstance(item, attribute.listof):
- raise AnsibleParserError("the field '%s' should be a list of %s,"
- " but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
+ raise AnsibleParserError("the field '%s' should be a list of %s, "
+ "but the item '%s' is a %s" % (name, attribute.listof, item, type(item)), obj=self.get_ds())
elif attribute.required and attribute.listof == string_types:
if item is None or item.strip() == "":
raise AnsibleParserError("the field '%s' is required, and cannot have empty values" % (name,), obj=self.get_ds())
@@ -423,7 +423,7 @@ class Base(with_metaclass(BaseMeta, object)):
else:
# Making a list like this handles strings of
# text and bytes properly
- value = [ value ]
+ value = [value]
if not isinstance(value, set):
value = set(value)
elif attribute.isa == 'dict':
@@ -440,12 +440,12 @@ class Base(with_metaclass(BaseMeta, object)):
setattr(self, name, value)
except (TypeError, ValueError) as e:
- raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s."
- " Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
+ raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s. "
+ "The error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
except (AnsibleUndefinedVariable, UndefinedError) as e:
if templar._fail_on_undefined_errors and name != 'name':
- raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined."
- " The error was: %s" % (name,e), obj=self.get_ds())
+ raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined. "
+ "The error was: %s" % (name, e), obj=self.get_ds())
self._finalized = True
@@ -490,16 +490,16 @@ class Base(with_metaclass(BaseMeta, object)):
'''
if not isinstance(value, list):
- value = [ value ]
+ value = [value]
if not isinstance(new_value, list):
- new_value = [ new_value ]
+ new_value = [new_value]
if prepend:
combined = new_value + value
else:
combined = value + new_value
- return [i for i,_ in itertools.groupby(combined) if i is not None]
+ return [i for i, _ in itertools.groupby(combined) if i is not None]
def dump_attrs(self):
'''
diff --git a/lib/ansible/playbook/become.py b/lib/ansible/playbook/become.py
index ce85047fdf..b3f82270af 100644
--- a/lib/ansible/playbook/become.py
+++ b/lib/ansible/playbook/become.py
@@ -29,13 +29,14 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class Become:
# Privilege escalation
- _become = FieldAttribute(isa='bool')
- _become_method = FieldAttribute(isa='string')
- _become_user = FieldAttribute(isa='string')
- _become_flags = FieldAttribute(isa='string')
+ _become = FieldAttribute(isa='bool')
+ _become_method = FieldAttribute(isa='string')
+ _become_user = FieldAttribute(isa='string')
+ _become_flags = FieldAttribute(isa='string')
def __init__(self):
super(Become, self).__init__()
@@ -44,8 +45,8 @@ class Become:
# Fail out if user specifies conflicting privilege escalations
has_become = 'become' in ds or 'become_user'in ds
- has_sudo = 'sudo' in ds or 'sudo_user' in ds
- has_su = 'su' in ds or 'su_user' in ds
+ has_sudo = 'sudo' in ds or 'sudo_user' in ds
+ has_su = 'su' in ds or 'su_user' in ds
if has_become:
msg = 'The become params ("become", "become_user") and'
@@ -101,4 +102,3 @@ class Become:
become_method = C.DEFAULT_BECOME_METHOD
if become_user is None:
become_user = C.DEFAULT_BECOME_USER
-
diff --git a/lib/ansible/playbook/block.py b/lib/ansible/playbook/block.py
index 888bf216a8..432db55339 100644
--- a/lib/ansible/playbook/block.py
+++ b/lib/ansible/playbook/block.py
@@ -28,29 +28,30 @@ from ansible.playbook.helpers import load_list_of_tasks
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
+
class Block(Base, Become, Conditional, Taggable):
# main block fields containing the task lists
- _block = FieldAttribute(isa='list', default=[], inherit=False)
- _rescue = FieldAttribute(isa='list', default=[], inherit=False)
- _always = FieldAttribute(isa='list', default=[], inherit=False)
+ _block = FieldAttribute(isa='list', default=[], inherit=False)
+ _rescue = FieldAttribute(isa='list', default=[], inherit=False)
+ _always = FieldAttribute(isa='list', default=[], inherit=False)
# other fields
- _delegate_to = FieldAttribute(isa='string')
- _delegate_facts = FieldAttribute(isa='bool', default=False)
- _name = FieldAttribute(isa='string', default='')
+ _delegate_to = FieldAttribute(isa='string')
+ _delegate_facts = FieldAttribute(isa='bool', default=False)
+ _name = FieldAttribute(isa='string', default='')
# for future consideration? this would be functionally
# similar to the 'else' clause for exceptions
- #_otherwise = FieldAttribute(isa='list')
+ # _otherwise = FieldAttribute(isa='list')
def __init__(self, play=None, parent_block=None, role=None, task_include=None, use_handlers=False, implicit=False):
- self._play = play
- self._role = role
- self._parent = None
- self._dep_chain = None
+ self._play = play
+ self._role = role
+ self._parent = None
+ self._dep_chain = None
self._use_handlers = use_handlers
- self._implicit = implicit
+ self._implicit = implicit
# end of role flag
self._eor = False
@@ -182,9 +183,9 @@ class Block(Base, Become, Conditional, Taggable):
return new_task_list
new_me = super(Block, self).copy()
- new_me._play = self._play
+ new_me._play = self._play
new_me._use_handlers = self._use_handlers
- new_me._eor = self._eor
+ new_me._eor = self._eor
if self._dep_chain is not None:
new_me._dep_chain = self._dep_chain[:]
@@ -194,7 +195,7 @@ class Block(Base, Become, Conditional, Taggable):
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
if not exclude_tasks:
- new_me.block = _dupe_task_list(self.block or [], new_me)
+ new_me.block = _dupe_task_list(self.block or [], new_me)
new_me.rescue = _dupe_task_list(self.rescue or [], new_me)
new_me.always = _dupe_task_list(self.always or [], new_me)
@@ -355,7 +356,7 @@ class Block(Base, Become, Conditional, Taggable):
def evaluate_block(block):
new_block = self.copy(exclude_tasks=True)
- new_block.block = evaluate_and_append_task(block.block)
+ new_block.block = evaluate_and_append_task(block.block)
new_block.rescue = evaluate_and_append_task(block.rescue)
new_block.always = evaluate_and_append_task(block.always)
return new_block
@@ -385,4 +386,3 @@ class Block(Base, Become, Conditional, Taggable):
return self._parent.all_parents_static()
return True
-
diff --git a/lib/ansible/playbook/conditional.py b/lib/ansible/playbook/conditional.py
index b683614462..310703ca3d 100644
--- a/lib/ansible/playbook/conditional.py
+++ b/lib/ansible/playbook/conditional.py
@@ -41,6 +41,7 @@ DEFINED_REGEX = re.compile(r'(hostvars\[.+\]|[\w_]+)\s+(not\s+is|is|is\s+not)\s+
LOOKUP_REGEX = re.compile(r'lookup\s*\(')
VALID_VAR_REGEX = re.compile("^[_A-Za-z][_a-zA-Z0-9]*$")
+
class Conditional:
'''
@@ -63,7 +64,7 @@ class Conditional:
def _validate_when(self, attr, name, value):
if not isinstance(value, list):
- setattr(self, name, [ value ])
+ setattr(self, name, [value])
def _get_attr_when(self):
'''
@@ -234,7 +235,4 @@ class Conditional:
# trigger the AnsibleUndefinedVariable exception again below
raise
except Exception as new_e:
- raise AnsibleUndefinedVariable(
- "error while evaluating conditional (%s): %s" % (original, e)
- )
-
+ raise AnsibleUndefinedVariable("error while evaluating conditional (%s): %s" % (original, e))
diff --git a/lib/ansible/playbook/handler.py b/lib/ansible/playbook/handler.py
index dd8ebae86a..b0452c577a 100644
--- a/lib/ansible/playbook/handler.py
+++ b/lib/ansible/playbook/handler.py
@@ -22,6 +22,7 @@ __metaclass__ = type
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.task import Task
+
class Handler(Task):
_listen = FieldAttribute(isa='list')
@@ -41,7 +42,7 @@ class Handler(Task):
return t.load_data(data, variable_manager=variable_manager, loader=loader)
def flag_for_host(self, host):
- #assert instanceof(host, Host)
+ # assert instanceof(host, Host)
if host not in self._flagged_hosts:
self._flagged_hosts.append(host)
diff --git a/lib/ansible/playbook/handler_task_include.py b/lib/ansible/playbook/handler_task_include.py
index 4cf573ecbd..6fc7de3f44 100644
--- a/lib/ansible/playbook/handler_task_include.py
+++ b/lib/ansible/playbook/handler_task_include.py
@@ -19,9 +19,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-#from ansible.inventory.host import Host
-from ansible.playbook.task_include import TaskInclude
+# from ansible.inventory.host import Host
from ansible.playbook.handler import Handler
+from ansible.playbook.task_include import TaskInclude
+
class HandlerTaskInclude(Handler, TaskInclude):
@@ -29,4 +30,3 @@ class HandlerTaskInclude(Handler, TaskInclude):
def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
t = HandlerTaskInclude(block=block, role=role, task_include=task_include)
return t.load_data(data, variable_manager=variable_manager, loader=loader)
-
diff --git a/lib/ansible/playbook/helpers.py b/lib/ansible/playbook/helpers.py
index 63dd2bd2fa..f2a5b6fb33 100644
--- a/lib/ansible/playbook/helpers.py
+++ b/lib/ansible/playbook/helpers.py
@@ -179,13 +179,13 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
include_target = templar.template(t.args['_raw_params'])
except AnsibleUndefinedVariable:
raise AnsibleParserError(
- "Error when evaluating variable in include name: %s.\n\n" \
- "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
- "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
+ "Error when evaluating variable in include name: %s.\n\n"
+ "When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n"
+ "or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n"
"sources like group or host vars." % t.args['_raw_params'],
obj=task_ds,
suppress_extended_error=True,
- )
+ )
if t._role:
include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target)
else:
@@ -209,11 +209,11 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
raise
display.deprecated(
- "Included file '%s' not found, however since this include is not " \
- "explicitly marked as 'static: yes', we will try and include it dynamically " \
- "later. In the future, this will be an error unless 'static: no' is used " \
- "on the include task. If you do not want missing includes to be considered " \
- "dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
+ "Included file '%s' not found, however since this include is not "
+ "explicitly marked as 'static: yes', we will try and include it dynamically "
+ "later. In the future, this will be an error unless 'static: no' is used "
+ "on the include task. If you do not want missing includes to be considered "
+ "dynamic, use 'static: yes' on the include or set the global ansible.cfg "
"options to make all inclues static for tasks and/or handlers" % include_file, version="2.7"
)
task_list.append(t)
@@ -242,7 +242,7 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
if len(tags) > 0:
if len(ti_copy.tags) > 0:
raise AnsibleParserError(
- "Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
+ "Include tasks should not specify tags in more than one way (both via args and directly on the task). "
"Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
obj=task_ds,
suppress_extended_error=True,
@@ -275,8 +275,8 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
role=role,
task_include=None,
variable_manager=variable_manager,
- loader=loader
- )
+ loader=loader,
+ )
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
@@ -293,9 +293,11 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
if not templar.is_template(ir.args[param]):
needs_templating = True
break
- is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
- (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
- (not needs_templating and ir.all_parents_static() and not ir.loop)
+ is_static = (
+ C.DEFAULT_TASK_INCLUDES_STATIC or
+ (use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or
+ (not needs_templating and ir.all_parents_static() and not ir.loop)
+ )
display.debug('Determined that if include_role static is %s' % str(is_static))
if is_static:
# uses compiled list from object
@@ -331,4 +333,3 @@ def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None,
roles.append(i)
return roles
-
diff --git a/lib/ansible/playbook/included_file.py b/lib/ansible/playbook/included_file.py
index 5dce570735..5a7902db74 100644
--- a/lib/ansible/playbook/included_file.py
+++ b/lib/ansible/playbook/included_file.py
@@ -30,13 +30,14 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class IncludedFile:
def __init__(self, filename, args, task):
self._filename = filename
- self._args = args
- self._task = task
- self._hosts = []
+ self._args = args
+ self._task = task
+ self._hosts = []
def add_host(self, host):
if host not in self._hosts:
@@ -69,7 +70,7 @@ class IncludedFile:
continue
include_results = res._result['results']
else:
- include_results = [ res._result ]
+ include_results = [res._result]
for include_result in include_results:
# if the task result was skipped or failed, continue
diff --git a/lib/ansible/playbook/loop_control.py b/lib/ansible/playbook/loop_control.py
index fdc50580b1..b983ee88ed 100644
--- a/lib/ansible/playbook/loop_control.py
+++ b/lib/ansible/playbook/loop_control.py
@@ -26,8 +26,8 @@ from ansible.playbook.base import Base
class LoopControl(Base):
_loop_var = FieldAttribute(isa='str')
- _label = FieldAttribute(isa='str')
- _pause = FieldAttribute(isa='int')
+ _label = FieldAttribute(isa='str')
+ _pause = FieldAttribute(isa='int')
def __init__(self):
super(LoopControl, self).__init__()
@@ -36,4 +36,3 @@ class LoopControl(Base):
def load(data, variable_manager=None, loader=None):
t = LoopControl()
return t.load_data(data, variable_manager=variable_manager, loader=loader)
-
diff --git a/lib/ansible/playbook/play.py b/lib/ansible/playbook/play.py
index cfc7039747..f0a87b8a79 100644
--- a/lib/ansible/playbook/play.py
+++ b/lib/ansible/playbook/play.py
@@ -20,10 +20,8 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import constants as C
-
from ansible.errors import AnsibleParserError
from ansible.module_utils.six import string_types
-
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
@@ -56,40 +54,40 @@ class Play(Base, Taggable, Become):
"""
# =================================================================================
- _name = FieldAttribute(isa='string', default='', always_post_validate=True)
+ _name = FieldAttribute(isa='string', default='', always_post_validate=True)
# TODO: generalize connection
- _accelerate = FieldAttribute(isa='bool', default=False, always_post_validate=True)
- _accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
- _accelerate_port = FieldAttribute(isa='int', default=5099, always_post_validate=True)
+ _accelerate = FieldAttribute(isa='bool', default=False, always_post_validate=True)
+ _accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
+ _accelerate_port = FieldAttribute(isa='int', default=5099, always_post_validate=True)
# Connection
- _fact_path = FieldAttribute(isa='string', default=None)
- _gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True)
- _gather_subset = FieldAttribute(isa='barelist', default=None, always_post_validate=True)
- _gather_timeout = FieldAttribute(isa='int', default=None, always_post_validate=True)
- _hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True)
+ _fact_path = FieldAttribute(isa='string', default=None)
+ _gather_facts = FieldAttribute(isa='bool', default=None, always_post_validate=True)
+ _gather_subset = FieldAttribute(isa='barelist', default=None, always_post_validate=True)
+ _gather_timeout = FieldAttribute(isa='int', default=None, always_post_validate=True)
+ _hosts = FieldAttribute(isa='list', required=True, listof=string_types, always_post_validate=True)
# Variable Attributes
- _vars_files = FieldAttribute(isa='list', default=[], priority=99)
- _vars_prompt = FieldAttribute(isa='list', default=[], always_post_validate=True)
- _vault_password = FieldAttribute(isa='string', always_post_validate=True)
+ _vars_files = FieldAttribute(isa='list', default=[], priority=99)
+ _vars_prompt = FieldAttribute(isa='list', default=[], always_post_validate=True)
+ _vault_password = FieldAttribute(isa='string', always_post_validate=True)
# Role Attributes
- _roles = FieldAttribute(isa='list', default=[], priority=90)
+ _roles = FieldAttribute(isa='list', default=[], priority=90)
# Block (Task) Lists Attributes
- _handlers = FieldAttribute(isa='list', default=[])
- _pre_tasks = FieldAttribute(isa='list', default=[])
- _post_tasks = FieldAttribute(isa='list', default=[])
- _tasks = FieldAttribute(isa='list', default=[])
+ _handlers = FieldAttribute(isa='list', default=[])
+ _pre_tasks = FieldAttribute(isa='list', default=[])
+ _post_tasks = FieldAttribute(isa='list', default=[])
+ _tasks = FieldAttribute(isa='list', default=[])
# Flag/Setting Attributes
- _force_handlers = FieldAttribute(isa='bool', always_post_validate=True)
+ _force_handlers = FieldAttribute(isa='bool', always_post_validate=True)
_max_fail_percentage = FieldAttribute(isa='percent', always_post_validate=True)
- _serial = FieldAttribute(isa='list', default=[], always_post_validate=True)
- _strategy = FieldAttribute(isa='string', default=C.DEFAULT_STRATEGY, always_post_validate=True)
- _order = FieldAttribute(isa='string', always_post_validate=True)
+ _serial = FieldAttribute(isa='list', default=[], always_post_validate=True)
+ _strategy = FieldAttribute(isa='string', default=C.DEFAULT_STRATEGY, always_post_validate=True)
+ _order = FieldAttribute(isa='string', always_post_validate=True)
# =================================================================================
@@ -132,8 +130,8 @@ class Play(Base, Taggable, Become):
# this should never happen, but error out with a helpful message
# to the user if it does...
if 'remote_user' in ds:
- raise AnsibleParserError("both 'user' and 'remote_user' are set for %s."
- " The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
+ raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. "
+ "The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
ds['remote_user'] = ds['user']
del ds['user']
@@ -207,14 +205,14 @@ class Play(Base, Taggable, Become):
display.deprecated("Using the 'short form' for vars_prompt has been deprecated", version="2.7")
for vname, prompt in prompt_data.items():
vars_prompts.append(dict(
- name = vname,
- prompt = prompt,
- default = None,
- private = None,
- confirm = None,
- encrypt = None,
- salt_size = None,
- salt = None,
+ name=vname,
+ prompt=prompt,
+ default=None,
+ private=None,
+ confirm=None,
+ encrypt=None,
+ salt_size=None,
+ salt=None,
))
else:
vars_prompts.append(prompt_data)
diff --git a/lib/ansible/playbook/play_context.py b/lib/ansible/playbook/play_context.py
index 2fa3707d76..4c49d8b364 100644
--- a/lib/ansible/playbook/play_context.py
+++ b/lib/ansible/playbook/play_context.py
@@ -54,42 +54,42 @@ except ImportError:
# in variable names.
MAGIC_VARIABLE_MAPPING = dict(
- connection = ('ansible_connection',),
- remote_addr = ('ansible_ssh_host', 'ansible_host'),
- remote_user = ('ansible_ssh_user', 'ansible_user'),
- port = ('ansible_ssh_port', 'ansible_port'),
- timeout = ('ansible_ssh_timeout', 'ansible_timeout'),
- ssh_executable = ('ansible_ssh_executable',),
- accelerate_port = ('ansible_accelerate_port',),
- password = ('ansible_ssh_pass', 'ansible_password'),
- private_key_file = ('ansible_ssh_private_key_file', 'ansible_private_key_file'),
- pipelining = ('ansible_ssh_pipelining', 'ansible_pipelining'),
- shell = ('ansible_shell_type',),
- network_os = ('ansible_network_os',),
- become = ('ansible_become',),
- become_method = ('ansible_become_method',),
- become_user = ('ansible_become_user',),
- become_pass = ('ansible_become_password','ansible_become_pass'),
- become_exe = ('ansible_become_exe',),
- become_flags = ('ansible_become_flags',),
- ssh_common_args = ('ansible_ssh_common_args',),
- docker_extra_args= ('ansible_docker_extra_args',),
- sftp_extra_args = ('ansible_sftp_extra_args',),
- scp_extra_args = ('ansible_scp_extra_args',),
- ssh_extra_args = ('ansible_ssh_extra_args',),
- ssh_transfer_method = ('ansible_ssh_transfer_method',),
- sudo = ('ansible_sudo',),
- sudo_user = ('ansible_sudo_user',),
- sudo_pass = ('ansible_sudo_password', 'ansible_sudo_pass'),
- sudo_exe = ('ansible_sudo_exe',),
- sudo_flags = ('ansible_sudo_flags',),
- su = ('ansible_su',),
- su_user = ('ansible_su_user',),
- su_pass = ('ansible_su_password', 'ansible_su_pass'),
- su_exe = ('ansible_su_exe',),
- su_flags = ('ansible_su_flags',),
- executable = ('ansible_shell_executable',),
- module_compression = ('ansible_module_compression',),
+ connection=('ansible_connection', ),
+ remote_addr=('ansible_ssh_host', 'ansible_host'),
+ remote_user=('ansible_ssh_user', 'ansible_user'),
+ port=('ansible_ssh_port', 'ansible_port'),
+ timeout=('ansible_ssh_timeout', 'ansible_timeout'),
+ ssh_executable=('ansible_ssh_executable', ),
+ accelerate_port=('ansible_accelerate_port', ),
+ password=('ansible_ssh_pass', 'ansible_password'),
+ private_key_file=('ansible_ssh_private_key_file', 'ansible_private_key_file'),
+ pipelining=('ansible_ssh_pipelining', 'ansible_pipelining'),
+ shell=('ansible_shell_type', ),
+ network_os=('ansible_network_os', ),
+ become=('ansible_become', ),
+ become_method=('ansible_become_method', ),
+ become_user=('ansible_become_user', ),
+ become_pass=('ansible_become_password', 'ansible_become_pass'),
+ become_exe=('ansible_become_exe', ),
+ become_flags=('ansible_become_flags', ),
+ ssh_common_args=('ansible_ssh_common_args', ),
+ docker_extra_args=('ansible_docker_extra_args', ),
+ sftp_extra_args=('ansible_sftp_extra_args', ),
+ scp_extra_args=('ansible_scp_extra_args', ),
+ ssh_extra_args=('ansible_ssh_extra_args', ),
+ ssh_transfer_method=('ansible_ssh_transfer_method', ),
+ sudo=('ansible_sudo', ),
+ sudo_user=('ansible_sudo_user', ),
+ sudo_pass=('ansible_sudo_password', 'ansible_sudo_pass'),
+ sudo_exe=('ansible_sudo_exe', ),
+ sudo_flags=('ansible_sudo_flags', ),
+ su=('ansible_su', ),
+ su_user=('ansible_su_user', ),
+ su_pass=('ansible_su_password', 'ansible_su_pass'),
+ su_exe=('ansible_su_exe', ),
+ su_flags=('ansible_su_flags', ),
+ executable=('ansible_shell_executable', ),
+ module_compression=('ansible_module_compression', ),
)
b_SU_PROMPT_LOCALIZATIONS = [
@@ -155,6 +155,7 @@ RESET_VARS = (
'ansible_port',
)
+
class PlayContext(Base):
'''
@@ -165,60 +166,60 @@ class PlayContext(Base):
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
- _docker_extra_args = FieldAttribute(isa='string')
- _remote_addr = FieldAttribute(isa='string')
- _password = FieldAttribute(isa='string')
+ _docker_extra_args = FieldAttribute(isa='string')
+ _remote_addr = FieldAttribute(isa='string')
+ _password = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
- _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
- _shell = FieldAttribute(isa='string')
- _network_os = FieldAttribute(isa='string')
- _connection_user = FieldAttribute(isa='string')
- _ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
- _ssh_common_args = FieldAttribute(isa='string')
- _sftp_extra_args = FieldAttribute(isa='string')
- _scp_extra_args = FieldAttribute(isa='string')
- _ssh_extra_args = FieldAttribute(isa='string')
- _ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
+ _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
+ _shell = FieldAttribute(isa='string')
+ _network_os = FieldAttribute(isa='string')
+ _connection_user = FieldAttribute(isa='string')
+ _ssh_args = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_ARGS)
+ _ssh_common_args = FieldAttribute(isa='string')
+ _sftp_extra_args = FieldAttribute(isa='string')
+ _scp_extra_args = FieldAttribute(isa='string')
+ _ssh_extra_args = FieldAttribute(isa='string')
+ _ssh_executable = FieldAttribute(isa='string', default=C.ANSIBLE_SSH_EXECUTABLE)
_ssh_transfer_method = FieldAttribute(isa='string', default=C.DEFAULT_SSH_TRANSFER_METHOD)
- _connection_lockfd= FieldAttribute(isa='int')
- _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
- _accelerate = FieldAttribute(isa='bool', default=False)
- _accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
- _accelerate_port = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)
- _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
+ _connection_lockfd = FieldAttribute(isa='int')
+ _pipelining = FieldAttribute(isa='bool', default=C.ANSIBLE_SSH_PIPELINING)
+ _accelerate = FieldAttribute(isa='bool', default=False)
+ _accelerate_ipv6 = FieldAttribute(isa='bool', default=False, always_post_validate=True)
+ _accelerate_port = FieldAttribute(isa='int', default=C.ACCELERATE_PORT, always_post_validate=True)
+ _executable = FieldAttribute(isa='string', default=C.DEFAULT_EXECUTABLE)
_module_compression = FieldAttribute(isa='string', default=C.DEFAULT_MODULE_COMPRESSION)
# privilege escalation fields
- _become = FieldAttribute(isa='bool')
- _become_method = FieldAttribute(isa='string')
- _become_user = FieldAttribute(isa='string')
- _become_pass = FieldAttribute(isa='string')
- _become_exe = FieldAttribute(isa='string')
- _become_flags = FieldAttribute(isa='string')
- _prompt = FieldAttribute(isa='string')
+ _become = FieldAttribute(isa='bool')
+ _become_method = FieldAttribute(isa='string')
+ _become_user = FieldAttribute(isa='string')
+ _become_pass = FieldAttribute(isa='string')
+ _become_exe = FieldAttribute(isa='string')
+ _become_flags = FieldAttribute(isa='string')
+ _prompt = FieldAttribute(isa='string')
# backwards compatibility fields for sudo/su
- _sudo_exe = FieldAttribute(isa='string')
- _sudo_flags = FieldAttribute(isa='string')
- _sudo_pass = FieldAttribute(isa='string')
- _su_exe = FieldAttribute(isa='string')
- _su_flags = FieldAttribute(isa='string')
- _su_pass = FieldAttribute(isa='string')
+ _sudo_exe = FieldAttribute(isa='string')
+ _sudo_flags = FieldAttribute(isa='string')
+ _sudo_pass = FieldAttribute(isa='string')
+ _su_exe = FieldAttribute(isa='string')
+ _su_flags = FieldAttribute(isa='string')
+ _su_pass = FieldAttribute(isa='string')
# general flags
- _verbosity = FieldAttribute(isa='int', default=0)
- _only_tags = FieldAttribute(isa='set', default=set())
- _skip_tags = FieldAttribute(isa='set', default=set())
- _check_mode = FieldAttribute(isa='bool', default=False)
- _force_handlers = FieldAttribute(isa='bool', default=False)
- _start_at_task = FieldAttribute(isa='string')
- _step = FieldAttribute(isa='bool', default=False)
- _diff = FieldAttribute(isa='bool', default=C.DIFF_ALWAYS)
+ _verbosity = FieldAttribute(isa='int', default=0)
+ _only_tags = FieldAttribute(isa='set', default=set())
+ _skip_tags = FieldAttribute(isa='set', default=set())
+ _check_mode = FieldAttribute(isa='bool', default=False)
+ _force_handlers = FieldAttribute(isa='bool', default=False)
+ _start_at_task = FieldAttribute(isa='string')
+ _step = FieldAttribute(isa='bool', default=False)
+ _diff = FieldAttribute(isa='bool', default=C.DIFF_ALWAYS)
# Fact gathering settings
- _gather_subset = FieldAttribute(isa='string', default=C.DEFAULT_GATHER_SUBSET)
- _gather_timeout = FieldAttribute(isa='string', default=C.DEFAULT_GATHER_TIMEOUT)
- _fact_path = FieldAttribute(isa='string', default=C.DEFAULT_FACT_PATH)
+ _gather_subset = FieldAttribute(isa='string', default=C.DEFAULT_GATHER_SUBSET)
+ _gather_timeout = FieldAttribute(isa='string', default=C.DEFAULT_GATHER_TIMEOUT)
+ _fact_path = FieldAttribute(isa='string', default=C.DEFAULT_FACT_PATH)
def __init__(self, play=None, options=None, passwords=None, connection_lockfd=None):
@@ -227,10 +228,10 @@ class PlayContext(Base):
if passwords is None:
passwords = {}
- self.password = passwords.get('conn_pass','')
- self.become_pass = passwords.get('become_pass','')
+ self.password = passwords.get('conn_pass', '')
+ self.become_pass = passwords.get('become_pass', '')
- self.prompt = ''
+ self.prompt = ''
self.success_key = ''
# a file descriptor to be used during locking operations
@@ -243,7 +244,6 @@ class PlayContext(Base):
if play:
self.set_play(play)
-
def set_play(self, play):
'''
Configures this connection information instance with data from
@@ -283,18 +283,18 @@ class PlayContext(Base):
'''
# privilege escalation
- self.become = options.become
+ self.become = options.become
self.become_method = options.become_method
- self.become_user = options.become_user
+ self.become_user = options.become_user
self.check_mode = boolean(options.check)
# get ssh options FIXME: make these common to all connections
for flag in ['ssh_common_args', 'docker_extra_args', 'sftp_extra_args', 'scp_extra_args', 'ssh_extra_args']:
- setattr(self, flag, getattr(options,flag, ''))
+ setattr(self, flag, getattr(options, flag, ''))
# general flags (should we move out?)
- for flag in ['connection','remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
+ for flag in ['connection', 'remote_user', 'private_key_file', 'verbosity', 'force_handlers', 'step', 'start_at_task', 'diff']:
attribute = getattr(options, flag, False)
if attribute:
setattr(self, flag, attribute)
@@ -435,7 +435,7 @@ class PlayContext(Base):
if connection_type in delegated_vars:
break
else:
- remote_addr_local = new_info.remote_addr in C.LOCALHOST
+ remote_addr_local = new_info.remote_addr in C.LOCALHOST
inv_hostname_local = delegated_vars.get('inventory_hostname') in C.LOCALHOST
if remote_addr_local and inv_hostname_local:
setattr(new_info, 'connection', 'local')
@@ -464,13 +464,12 @@ class PlayContext(Base):
if task.check_mode is not None:
new_info.check_mode = task.check_mode
-
return new_info
def make_become_cmd(self, cmd, executable=None):
""" helper function to create privilege escalation commands """
- prompt = None
+ prompt = None
success_key = None
self.prompt = None
@@ -479,8 +478,8 @@ class PlayContext(Base):
if not executable:
executable = self.executable
- becomecmd = None
- randbits = ''.join(random.choice(string.ascii_lowercase) for x in range(32))
+ becomecmd = None
+ randbits = ''.join(random.choice(string.ascii_lowercase) for x in range(32))
success_key = 'BECOME-SUCCESS-%s' % randbits
success_cmd = shlex_quote('echo %s; %s' % (success_key, cmd))
@@ -490,18 +489,22 @@ class PlayContext(Base):
command = success_cmd
# set executable to use for the privilege escalation method, with various overrides
- exe = self.become_exe or \
- getattr(self, '%s_exe' % self.become_method, None) or \
- C.DEFAULT_BECOME_EXE or \
- getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \
- self.become_method
+ exe = (
+ self.become_exe or
+ getattr(self, '%s_exe' % self.become_method, None) or
+ C.DEFAULT_BECOME_EXE or
+ getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or
+ self.become_method
+ )
# set flags to use for the privilege escalation method, with various overrides
- flags = self.become_flags or \
- getattr(self, '%s_flags' % self.become_method, None) or \
- C.DEFAULT_BECOME_FLAGS or \
- getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \
- ''
+ flags = (
+ self.become_flags or
+ getattr(self, '%s_flags' % self.become_method, None) or
+ C.DEFAULT_BECOME_FLAGS or
+ getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or
+ ''
+ )
if self.become_method == 'sudo':
# If we have a password, we run sudo with a randomly-generated
@@ -517,11 +520,10 @@ class PlayContext(Base):
# force quick error if password is required but not supplied, should prevent sudo hangs.
if self.become_pass:
prompt = '[sudo via ansible, key=%s] password: ' % randbits
- becomecmd = '%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n',''), prompt, self.become_user, command)
+ becomecmd = '%s %s -p "%s" -u %s %s' % (exe, flags.replace('-n', ''), prompt, self.become_user, command)
else:
becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, command)
-
elif self.become_method == 'su':
# passing code ref to examine prompt as simple string comparisson isn't good enough with su
@@ -537,7 +539,7 @@ class PlayContext(Base):
elif self.become_method == 'pbrun':
- prompt='Password:'
+ prompt = 'Password:'
becomecmd = '%s %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
elif self.become_method == 'ksu':
@@ -575,7 +577,7 @@ class PlayContext(Base):
if self.become_user:
flags += ' -u %s ' % self.become_user
- #FIXME: make shell independent
+ # FIXME: make shell independent
becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)
elif self.become_method == 'dzdo':
@@ -591,7 +593,7 @@ class PlayContext(Base):
exe = self.become_exe or 'pmrun'
- prompt='Enter UPM user password:'
+ prompt = 'Enter UPM user password:'
becomecmd = '%s %s %s' % (exe, flags, shlex_quote(command))
else:
@@ -645,4 +647,3 @@ class PlayContext(Base):
self.connection = conn_type
return self._attributes['connection']
-
diff --git a/lib/ansible/playbook/playbook_include.py b/lib/ansible/playbook/playbook_include.py
index f67ed0f8e8..3cf6aca188 100644
--- a/lib/ansible/playbook/playbook_include.py
+++ b/lib/ansible/playbook/playbook_include.py
@@ -34,9 +34,9 @@ from ansible.template import Templar
class PlaybookInclude(Base, Conditional, Taggable):
- _name = FieldAttribute(isa='string')
- _include = FieldAttribute(isa='string')
- _vars = FieldAttribute(isa='dict', default=dict())
+ _name = FieldAttribute(isa='string')
+ _include = FieldAttribute(isa='string')
+ _vars = FieldAttribute(isa='dict', default=dict())
@staticmethod
def load(data, basedir, variable_manager=None, loader=None):
@@ -113,7 +113,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
- for (k,v) in iteritems(ds):
+ for (k, v) in iteritems(ds):
if k == 'include':
self._preprocess_include(ds, new_ds, k, v)
else:
@@ -152,4 +152,3 @@ class PlaybookInclude(Base, Conditional, Taggable):
if 'vars' in new_ds:
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
new_ds['vars'] = params
-
diff --git a/lib/ansible/playbook/role/__init__.py b/lib/ansible/playbook/role/__init__.py
index e1e876c54f..2c1d485fb3 100644
--- a/lib/ansible/playbook/role/__init__.py
+++ b/lib/ansible/playbook/role/__init__.py
@@ -42,6 +42,7 @@ __all__ = ['Role', 'hash_params']
# in a static method. This is also used in the base class for
# strategies (ansible/plugins/strategy/__init__.py)
+
def hash_params(params):
"""
Construct a data structure of parameters that is hashable.
@@ -96,21 +97,21 @@ class Role(Base, Become, Conditional, Taggable):
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, from_files=None):
- self._role_name = None
- self._role_path = None
- self._role_params = dict()
- self._loader = None
-
- self._metadata = None
- self._play = play
- self._parents = []
- self._dependencies = []
- self._task_blocks = []
- self._handler_blocks = []
- self._default_vars = dict()
- self._role_vars = dict()
- self._had_task_run = dict()
- self._completed = dict()
+ self._role_name = None
+ self._role_path = None
+ self._role_params = dict()
+ self._loader = None
+
+ self._metadata = None
+ self._play = play
+ self._parents = []
+ self._dependencies = []
+ self._task_blocks = []
+ self._handler_blocks = []
+ self._default_vars = dict()
+ self._role_vars = dict()
+ self._had_task_run = dict()
+ self._completed = dict()
if from_files is None:
from_files = {}
@@ -166,11 +167,11 @@ class Role(Base, Become, Conditional, Taggable):
obj=role_include._ds)
def _load_role_data(self, role_include, parent_role=None):
- self._role_name = role_include.role
- self._role_path = role_include.get_role_path()
- self._role_params = role_include.get_role_params()
+ self._role_name = role_include.role
+ self._role_path = role_include.get_role_path()
+ self._role_params = role_include.get_role_params()
self._variable_manager = role_include.get_variable_manager()
- self._loader = role_include.get_loader()
+ self._loader = role_include.get_loader()
if parent_role:
self.add_parent(parent_role)
@@ -209,7 +210,7 @@ class Role(Base, Become, Conditional, Taggable):
try:
self._task_blocks = load_list_of_blocks(task_data, play=self._play, role=self, loader=self._loader, variable_manager=self._variable_manager)
except AssertionError:
- raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=task_data)
+ raise AnsibleParserError("The tasks/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=task_data)
handler_data = self._load_role_yaml('handlers')
if handler_data:
@@ -217,10 +218,10 @@ class Role(Base, Become, Conditional, Taggable):
self._handler_blocks = load_list_of_blocks(handler_data, play=self._play, role=self, use_handlers=True, loader=self._loader,
variable_manager=self._variable_manager)
except AssertionError:
- raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name , obj=handler_data)
+ raise AnsibleParserError("The handlers/main.yml file for role '%s' must contain a list of tasks" % self._role_name, obj=handler_data)
# vars and default vars are regular dictionaries
- self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'))
+ self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'))
if self._role_vars is None:
self._role_vars = dict()
elif not isinstance(self._role_vars, dict):
@@ -239,7 +240,7 @@ class Role(Base, Become, Conditional, Taggable):
if self._loader.path_exists(main_file):
return self._loader.load_from_file(main_file)
elif main is not None:
- raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir,main))
+ raise AnsibleParserError("Could not find specified file in role: %s/%s" % (subdir, main))
return None
def _resolve_main(self, basepath, main=None):
@@ -269,8 +270,8 @@ class Role(Base, Become, Conditional, Taggable):
else:
for m in possible_mains:
if self._loader.is_file(m):
- return m # exactly one main file
- return possible_mains[0] # zero mains (we still need to return something)
+ return m # exactly one main file
+ return possible_mains[0] # zero mains (we still need to return something)
def _load_dependencies(self):
'''
@@ -286,7 +287,6 @@ class Role(Base, Become, Conditional, Taggable):
return deps
- #------------------------------------------------------------------------------
# other functions
def add_parent(self, parent_role):
@@ -347,7 +347,7 @@ class Role(Base, Become, Conditional, Taggable):
in the proper order in which they should be executed or evaluated.
'''
- child_deps = []
+ child_deps = []
for dep in self.get_direct_dependencies():
for child_dep in dep.get_all_dependencies():
@@ -425,13 +425,13 @@ class Role(Base, Become, Conditional, Taggable):
def serialize(self, include_deps=True):
res = super(Role, self).serialize()
- res['_role_name'] = self._role_name
- res['_role_path'] = self._role_path
- res['_role_vars'] = self._role_vars
- res['_role_params'] = self._role_params
+ res['_role_name'] = self._role_name
+ res['_role_path'] = self._role_path
+ res['_role_vars'] = self._role_vars
+ res['_role_params'] = self._role_params
res['_default_vars'] = self._default_vars
res['_had_task_run'] = self._had_task_run.copy()
- res['_completed'] = self._completed.copy()
+ res['_completed'] = self._completed.copy()
if self._metadata:
res['_metadata'] = self._metadata.serialize()
@@ -450,13 +450,13 @@ class Role(Base, Become, Conditional, Taggable):
return res
def deserialize(self, data, include_deps=True):
- self._role_name = data.get('_role_name', '')
- self._role_path = data.get('_role_path', '')
- self._role_vars = data.get('_role_vars', dict())
- self._role_params = data.get('_role_params', dict())
+ self._role_name = data.get('_role_name', '')
+ self._role_path = data.get('_role_path', '')
+ self._role_vars = data.get('_role_vars', dict())
+ self._role_params = data.get('_role_params', dict())
self._default_vars = data.get('_default_vars', dict())
self._had_task_run = data.get('_had_task_run', dict())
- self._completed = data.get('_completed', dict())
+ self._completed = data.get('_completed', dict())
if include_deps:
deps = []
@@ -488,4 +488,3 @@ class Role(Base, Become, Conditional, Taggable):
parent.set_loader(loader)
for dep in self.get_direct_dependencies():
dep.set_loader(loader)
-
diff --git a/lib/ansible/playbook/role/definition.py b/lib/ansible/playbook/role/definition.py
index 1ee1724bed..2e0ef66cb9 100644
--- a/lib/ansible/playbook/role/definition.py
+++ b/lib/ansible/playbook/role/definition.py
@@ -51,16 +51,16 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
super(RoleDefinition, self).__init__()
- self._play = play
+ self._play = play
self._variable_manager = variable_manager
- self._loader = loader
+ self._loader = loader
- self._role_path = None
+ self._role_path = None
self._role_basedir = role_basedir
- self._role_params = dict()
+ self._role_params = dict()
- #def __repr__(self):
- # return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')
+ # def __repr__(self):
+ # return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')
@staticmethod
def load(data, variable_manager=None, loader=None):
@@ -205,8 +205,8 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# remember to update it manually.
if key not in base_attribute_names or key in ('connection', 'port', 'remote_user'):
if key in ('connection', 'port', 'remote_user'):
- display.deprecated("Using '%s' as a role param has been deprecated. " % key + \
- "In the future, these values should be entered in the `vars:` " + \
+ display.deprecated("Using '%s' as a role param has been deprecated. " % key +
+ "In the future, these values should be entered in the `vars:` " +
"section for roles, but for now we'll store it as both a param and an attribute.", version="2.7")
role_def[key] = value
# this key does not match a field attribute, so it must be a role param
diff --git a/lib/ansible/playbook/role/include.py b/lib/ansible/playbook/role/include.py
index 5e6e451561..578e47ec9e 100644
--- a/lib/ansible/playbook/role/include.py
+++ b/lib/ansible/playbook/role/include.py
@@ -40,7 +40,7 @@ class RoleInclude(RoleDefinition):
is included for execution in a play.
"""
- _delegate_to = FieldAttribute(isa='string')
+ _delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
@@ -57,4 +57,3 @@ class RoleInclude(RoleDefinition):
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader)
return ri.load_data(data, variable_manager=variable_manager, loader=loader)
-
diff --git a/lib/ansible/playbook/role/metadata.py b/lib/ansible/playbook/role/metadata.py
index 50e104f4bc..28e1728ebd 100644
--- a/lib/ansible/playbook/role/metadata.py
+++ b/lib/ansible/playbook/role/metadata.py
@@ -39,8 +39,8 @@ class RoleMetadata(Base):
'''
_allow_duplicates = FieldAttribute(isa='bool', default=False)
- _dependencies = FieldAttribute(isa='list', default=[])
- _galaxy_info = FieldAttribute(isa='GalaxyInfo')
+ _dependencies = FieldAttribute(isa='list', default=[])
+ _galaxy_info = FieldAttribute(isa='GalaxyInfo')
def __init__(self, owner=None):
self._owner = owner
@@ -103,8 +103,8 @@ class RoleMetadata(Base):
def serialize(self):
return dict(
- allow_duplicates = self._allow_duplicates,
- dependencies = self._dependencies,
+ allow_duplicates=self._allow_duplicates,
+ dependencies=self._dependencies,
)
def deserialize(self, data):
diff --git a/lib/ansible/playbook/role/requirement.py b/lib/ansible/playbook/role/requirement.py
index 4df7e05da7..cbad40b537 100644
--- a/lib/ansible/playbook/role/requirement.py
+++ b/lib/ansible/playbook/role/requirement.py
@@ -46,6 +46,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class RoleRequirement(RoleDefinition):
"""
@@ -193,7 +194,7 @@ class RoleRequirement(RoleDefinition):
raise AnsibleError("error executing: %s" % " ".join(clone_cmd))
rc = popen.wait()
if rc != 0:
- raise AnsibleError ("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc))
+ raise AnsibleError("- command %s failed in directory %s (rc=%s)" % (' '.join(clone_cmd), tempdir, rc))
if scm == 'git' and version:
checkout_cmd = [scm, 'checkout', version]
@@ -228,4 +229,3 @@ class RoleRequirement(RoleDefinition):
shutil.rmtree(tempdir, ignore_errors=True)
return temp_file.name
-
diff --git a/lib/ansible/playbook/role_include.py b/lib/ansible/playbook/role_include.py
index 4ed9369cba..44d57fab57 100644
--- a/lib/ansible/playbook/role_include.py
+++ b/lib/ansible/playbook/role_include.py
@@ -60,12 +60,11 @@ class IncludeRole(Task):
self._parent_role = role
self._role_name = None
-
def get_block_list(self, play=None, variable_manager=None, loader=None):
# only need play passed in when dynamic
if play is None:
- myplay = self._parent._play
+ myplay = self._parent._play
else:
myplay = play
@@ -99,7 +98,7 @@ class IncludeRole(Task):
ir = IncludeRole(block, role, task_include=task_include).load_data(data, variable_manager=variable_manager, loader=loader)
- ### Process options
+ # Process options
# name is needed, or use role as alias
ir._role_name = ir.args.get('name', ir.args.get('role'))
if ir._role_name is None:
@@ -107,11 +106,11 @@ class IncludeRole(Task):
# build options for role includes
for key in ['tasks', 'vars', 'defaults']:
- from_key ='%s_from' % key
+ from_key = '%s_from' % key
if ir.args.get(from_key):
ir._from_files[key] = basename(ir.args.get(from_key))
- #FIXME: find a way to make this list come from object ( attributes does not work as per below)
+ # FIXME: find a way to make this list come from object ( attributes does not work as per below)
# manual list as otherwise the options would set other task parameters we don't want.
for option in ['private', 'allow_duplicates']:
if option in ir.args:
@@ -125,7 +124,7 @@ class IncludeRole(Task):
new_me.statically_loaded = self.statically_loaded
new_me._from_files = self._from_files.copy()
new_me._parent_role = self._parent_role
- new_me._role_name = self._role_name
+ new_me._role_name = self._role_name
return new_me
diff --git a/lib/ansible/playbook/taggable.py b/lib/ansible/playbook/taggable.py
index ed9dc2caa5..511dda85cf 100644
--- a/lib/ansible/playbook/taggable.py
+++ b/lib/ansible/playbook/taggable.py
@@ -30,7 +30,7 @@ from ansible.template import Templar
class Taggable:
untagged = frozenset(['untagged'])
- _tags = FieldAttribute(isa='list', default=[], listof=(string_types,int))
+ _tags = FieldAttribute(isa='list', default=[], listof=(string_types, int))
def __init__(self):
super(Taggable, self).__init__()
@@ -41,9 +41,9 @@ class Taggable:
elif isinstance(ds, string_types):
value = ds.split(',')
if isinstance(value, list):
- return [ x.strip() for x in value ]
+ return [x.strip() for x in value]
else:
- return [ ds ]
+ return [ds]
else:
raise AnsibleError('tags must be specified as a list', obj=ds)
@@ -73,7 +73,7 @@ class Taggable:
else:
tags = set([tags])
else:
- tags = set([i for i,_ in itertools.groupby(tags)])
+ tags = set([i for i, _ in itertools.groupby(tags)])
else:
# this makes isdisjoint work for untagged
tags = self.untagged
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index abe62e381b..979f0ce8a5 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -65,29 +65,29 @@ class Task(Base, Conditional, Taggable, Become):
# will be used if defined
# might be possible to define others
- _args = FieldAttribute(isa='dict', default=dict())
- _action = FieldAttribute(isa='string')
-
- _async = FieldAttribute(isa='int', default=0)
- _changed_when = FieldAttribute(isa='list', default=[])
- _delay = FieldAttribute(isa='int', default=5)
- _delegate_to = FieldAttribute(isa='string')
- _delegate_facts = FieldAttribute(isa='bool', default=False)
- _failed_when = FieldAttribute(isa='list', default=[])
- _loop = FieldAttribute(isa='string', private=True, inherit=False)
- _loop_args = FieldAttribute(isa='list', private=True, inherit=False)
- _loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
- _name = FieldAttribute(isa='string', default='')
- _notify = FieldAttribute(isa='list')
- _poll = FieldAttribute(isa='int', default=10)
- _register = FieldAttribute(isa='string')
- _retries = FieldAttribute(isa='int')
- _until = FieldAttribute(isa='list', default=[])
+ _args = FieldAttribute(isa='dict', default=dict())
+ _action = FieldAttribute(isa='string')
+
+ _async = FieldAttribute(isa='int', default=0)
+ _changed_when = FieldAttribute(isa='list', default=[])
+ _delay = FieldAttribute(isa='int', default=5)
+ _delegate_to = FieldAttribute(isa='string')
+ _delegate_facts = FieldAttribute(isa='bool', default=False)
+ _failed_when = FieldAttribute(isa='list', default=[])
+ _loop = FieldAttribute(isa='string', private=True, inherit=False)
+ _loop_args = FieldAttribute(isa='list', private=True, inherit=False)
+ _loop_control = FieldAttribute(isa='class', class_type=LoopControl, inherit=False)
+ _name = FieldAttribute(isa='string', default='')
+ _notify = FieldAttribute(isa='list')
+ _poll = FieldAttribute(isa='int', default=10)
+ _register = FieldAttribute(isa='string')
+ _retries = FieldAttribute(isa='int')
+ _until = FieldAttribute(isa='list', default=[])
def __init__(self, block=None, role=None, task_include=None):
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
- self._role = role
+ self._role = role
self._parent = None
if task_include:
@@ -125,10 +125,10 @@ class Task(Base, Conditional, Taggable, Become):
return ds
elif isinstance(ds, dict):
buf = ""
- for (k,v) in iteritems(ds):
+ for (k, v) in iteritems(ds):
if k.startswith('_'):
continue
- buf = buf + "%s=%s " % (k,v)
+ buf = buf + "%s=%s " % (k, v)
buf = buf.strip()
return buf
@@ -186,11 +186,11 @@ class Task(Base, Conditional, Taggable, Become):
if 'cmd' in args:
if args.get('_raw_params', '') != '':
raise AnsibleError("The 'cmd' argument cannot be used when other raw parameters are specified."
- " Please put everything in one or the other place.", obj=ds)
+ " Please put everything in one or the other place.", obj=ds)
args['_raw_params'] = args.pop('cmd')
- new_ds['action'] = action
- new_ds['args'] = args
+ new_ds['action'] = action
+ new_ds['args'] = args
new_ds['delegate_to'] = delegate_to
# we handle any 'vars' specified in the ds here, as we may
@@ -203,7 +203,7 @@ class Task(Base, Conditional, Taggable, Become):
else:
new_ds['vars'] = dict()
- for (k,v) in iteritems(ds):
+ for (k, v) in iteritems(ds):
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
# we don't want to re-assign these values, which were
# determined by the ModuleArgsParser() above
@@ -216,9 +216,9 @@ class Task(Base, Conditional, Taggable, Become):
# here, and show a deprecation message as we will remove this at
# some point in the future.
if action == 'include' and k not in self._valid_attrs and k not in self.DEPRECATED_ATTRIBUTES:
- display.deprecated("Specifying include variables at the top-level of the task is deprecated."
- " Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n"
- " for currently supported syntax regarding included files and variables", version="2.7")
+ display.deprecated("Specifying include variables at the top-level of the task is deprecated. "
+ "Please see:\nhttp://docs.ansible.com/ansible/playbooks_roles.html#task-include-files-and-encouraging-reuse\n\n "
+ "for currently supported syntax regarding included files and variables", version="2.7")
new_ds['vars'][k] = v
else:
new_ds[k] = v
diff --git a/lib/ansible/playbook/task_include.py b/lib/ansible/playbook/task_include.py
index bfb7e89a85..b190439560 100644
--- a/lib/ansible/playbook/task_include.py
+++ b/lib/ansible/playbook/task_include.py
@@ -76,4 +76,3 @@ class TaskInclude(Task):
del all_vars['when']
return all_vars
-
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 57fdf1e7e4..2a4110eaae 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -62,11 +62,11 @@ class PluginLoader:
def __init__(self, class_name, package, config, subdir, aliases={}, required_base_class=None):
- self.class_name = class_name
- self.base_class = required_base_class
- self.package = package
- self.subdir = subdir
- self.aliases = aliases
+ self.class_name = class_name
+ self.base_class = required_base_class
+ self.package = package
+ self.subdir = subdir
+ self.aliases = aliases
if config and not isinstance(config, list):
config = [config]
@@ -82,8 +82,8 @@ class PluginLoader:
if class_name not in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
- self._module_cache = MODULE_CACHE[class_name]
- self._paths = PATH_CACHE[class_name]
+ self._module_cache = MODULE_CACHE[class_name]
+ self._paths = PATH_CACHE[class_name]
self._plugin_path_cache = PLUGIN_PATH_CACHE[class_name]
self._extra_dirs = []
@@ -95,10 +95,10 @@ class PluginLoader:
'''
class_name = data.get('class_name')
- package = data.get('package')
- config = data.get('config')
- subdir = data.get('subdir')
- aliases = data.get('aliases')
+ package = data.get('package')
+ config = data.get('config')
+ subdir = data.get('subdir')
+ aliases = data.get('aliases')
base_class = data.get('base_class')
PATH_CACHE[class_name] = data.get('PATH_CACHE')
@@ -114,16 +114,16 @@ class PluginLoader:
'''
return dict(
- class_name = self.class_name,
- base_class = self.base_class,
- package = self.package,
- config = self.config,
- subdir = self.subdir,
- aliases = self.aliases,
- _extra_dirs = self._extra_dirs,
- _searched_paths = self._searched_paths,
- PATH_CACHE = PATH_CACHE[self.class_name],
- PLUGIN_PATH_CACHE = PLUGIN_PATH_CACHE[self.class_name],
+ class_name=self.class_name,
+ base_class=self.base_class,
+ package=self.package,
+ config=self.config,
+ subdir=self.subdir,
+ aliases=self.aliases,
+ _extra_dirs=self._extra_dirs,
+ _searched_paths=self._searched_paths,
+ PATH_CACHE=PATH_CACHE[self.class_name],
+ PLUGIN_PATH_CACHE=PLUGIN_PATH_CACHE[self.class_name],
)
def format_paths(self, paths):
@@ -145,7 +145,7 @@ class PluginLoader:
for root, subdirs, files in os.walk(dir, followlinks=True):
if '__init__.py' in files:
for x in subdirs:
- results.append(os.path.join(root,x))
+ results.append(os.path.join(root, x))
return results
def _get_package_paths(self, subdirs=True):
@@ -271,7 +271,7 @@ class PluginLoader:
# HACK: We have no way of executing python byte
# compiled files as ansible modules so specifically exclude them
- ### FIXME: I believe this is only correct for modules and
+ # FIXME: I believe this is only correct for modules and
# module_utils. For all other plugins we want .pyc and .pyo should
# bew valid
if full_path.endswith(('.pyc', '.pyo')):
@@ -313,10 +313,10 @@ class PluginLoader:
if alias_name in pull_cache:
if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
display.deprecated('%s is kept for backwards compatibility '
- 'but usage is discouraged. The module '
- 'documentation details page may explain '
- 'more about this rationale.' %
- name.lstrip('_'))
+ 'but usage is discouraged. The module '
+ 'documentation details page may explain '
+ 'more about this rationale.' %
+ name.lstrip('_'))
return pull_cache[alias_name]
return None
@@ -550,4 +550,3 @@ vars_loader = PluginLoader(
C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins',
)
-
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index bed9b48d22..1cdcf080fe 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -59,18 +59,18 @@ class ActionBase(with_metaclass(ABCMeta, object)):
'''
def __init__(self, task, connection, play_context, loader, templar, shared_loader_obj):
- self._task = task
- self._connection = connection
- self._play_context = play_context
- self._loader = loader
- self._templar = templar
+ self._task = task
+ self._connection = connection
+ self._play_context = play_context
+ self._loader = loader
+ self._templar = templar
self._shared_loader_obj = shared_loader_obj
# Backwards compat: self._display isn't really needed, just import the global display and use that.
- self._display = display
- self._cleanup_remote_tmp = False
+ self._display = display
+ self._cleanup_remote_tmp = False
self._supports_check_mode = True
- self._supports_async = False
+ self._supports_async = False
@abstractmethod
def run(self, tmp=None, task_vars=None):
@@ -154,7 +154,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# insert shared code and arguments into the module
(module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args,
- task_vars=task_vars, module_compression=self._play_context.module_compression)
+ task_vars=task_vars, module_compression=self._play_context.module_compression)
# FUTURE: we'll have to get fancier about this to support powershell over SSH on Windows...
if self._connection.transport == "winrm":
@@ -162,9 +162,9 @@ class ActionBase(with_metaclass(ABCMeta, object)):
final_environment = dict()
self._compute_environment_string(final_environment)
module_data = build_windows_module_payload(module_name=module_name, module_path=module_path,
- b_module_data=module_data, module_args=module_args,
- task_vars=task_vars, task=self._task,
- play_context=self._play_context, environment=final_environment)
+ b_module_data=module_data, module_args=module_args,
+ task_vars=task_vars, task=self._task,
+ play_context=self._play_context, environment=final_environment)
return (module_style, module_shebang, module_data, module_path)
@@ -177,7 +177,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if self._task.environment is not None:
environments = self._task.environment
if not isinstance(environments, list):
- environments = [ environments ]
+ environments = [environments]
# the environments as inherited need to be reversed, to make
# sure we merge in the parent's values first so those in the
@@ -213,7 +213,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
Determines if we are required and can do pipelining
'''
if self._connection.always_pipeline_modules:
- return True #eg, winrm
+ return True # eg, winrm
# any of these require a true
for condition in [
@@ -249,7 +249,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if use_system_tmp:
tmpdir = None
else:
- tmpdir = self._remote_expand_user(C.DEFAULT_REMOTE_TMP, sudoable=False)
+ tmpdir = self._remote_expand_user(C.DEFAULT_REMOTE_TMP, sudoable=False)
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode, tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False)
@@ -263,16 +263,16 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if self._play_context.verbosity > 3:
output = u'SSH encountered an unknown error. The output was:\n%s%s' % (result['stdout'], result['stderr'])
else:
- output = (u'SSH encountered an unknown error during the connection.'
- ' We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
+ output = (u'SSH encountered an unknown error during the connection. '
+ 'We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue')
elif u'No space left on device' in result['stderr']:
output = result['stderr']
else:
- output = ('Authentication or permission failure.'
- ' In some cases, you may have been able to authenticate and did not have permissions on the target directory.'
- ' Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp".'
- ' Failed command was: %s, exited with result %d' % (cmd, result['rc']))
+ output = ('Authentication or permission failure. '
+ 'In some cases, you may have been able to authenticate and did not have permissions on the target directory. '
+ 'Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp". '
+ 'Failed command was: %s, exited with result %d' % (cmd, result['rc']))
if 'stdout' in result and result['stdout'] != u'':
output = output + u": %s" % result['stdout']
raise AnsibleConnectionFailure(output)
@@ -309,8 +309,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
if tmp_rm_data.get('rc', 0) != 0:
- display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
- tmp_rm_res.get('stderr', 'No error string available.')))
+ display.warning('Error deleting remote temporary files (rc: %s, stderr: %s})'
+ % (tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
def _transfer_file(self, local_path, remote_path):
self._connection.put_file(local_path, remote_path)
@@ -408,7 +408,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
setfacl_mode = 'r-x'
else:
chmod_mode = 'rX'
- ### Note: this form fails silently on freebsd. We currently
+ # NOTE: this form fails silently on freebsd. We currently
# never call _fixup_perms2() with execute=False but if we
# start to we'll have to fix this.
setfacl_mode = 'r-X'
@@ -426,22 +426,23 @@ class ActionBase(with_metaclass(ABCMeta, object)):
res = self._remote_chown(remote_paths, self._play_context.become_user)
if res['rc'] != 0 and remote_user == 'root':
# chown failed even if remove_user is root
- raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root.'
- ' Unprivileged become user would be unable to read the file.')
+ raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. '
+ 'Unprivileged become user would be unable to read the file.')
elif res['rc'] != 0:
if C.ALLOW_WORLD_READABLE_TMPFILES:
# chown and fs acls failed -- do things this insecure
# way only if the user opted in in the config file
- display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user.'
- ' This may be insecure. For information on securing this, see'
- ' https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
+ display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
+ 'This may be insecure. For information on securing this, see '
+ 'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
res = self._remote_chmod(remote_paths, 'a+%s' % chmod_mode)
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
else:
- raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user'
- ' (rc: {0}, err: {1}). For information on working around this,'
- ' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], to_native(res['stderr'])))
+ raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user '
+ '(rc: %s, err: %s}). For information on working around this, see '
+ 'https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'
+ % (res['rc'], to_native(res['stderr'])))
elif execute:
# Can't depend on the file being transferred with execute permissions.
# Only need user perms because no become was used here
@@ -479,7 +480,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
'''
Get information from remote file.
'''
- module_args=dict(
+ module_args = dict(
path=path,
follow=follow,
get_md5=False,
@@ -534,7 +535,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
elif errormsg.endswith(u'MODULE FAILURE'):
x = "4" # python not found or module uncaught exception
elif 'json' in errormsg or 'simplejson' in errormsg:
- x = "5" # json or simplejson modules needed
+ x = "5" # json or simplejson modules needed
finally:
return x
@@ -611,8 +612,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# give the module the socket for persistent connections
module_args['_ansible_socket'] = task_vars.get('ansible_socket')
-
-
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False):
'''
Transfer and run a module along with its arguments.
@@ -641,7 +640,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir
- if not tmp or not 'tmp' in tmp:
+ if not tmp or 'tmp' not in tmp:
tmp = self._make_tmp_path()
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
@@ -661,7 +660,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = ""
- for k,v in iteritems(module_args):
+ for k, v in iteritems(module_args):
args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
self._transfer_data(args_file_path, args_data)
elif module_style in ('non_native_want_json', 'binary'):
@@ -690,7 +689,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
remote_files.append(remote_async_module_path)
async_limit = self._task.async
- async_jid = str(random.randint(0, 999999999999))
+ async_jid = str(random.randint(0, 999999999999))
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
@@ -710,7 +709,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if not self._should_remove_tmp_path(tmp):
async_cmd.append("-preserve_tmp")
- cmd= " ".join(to_text(x) for x in async_cmd)
+ cmd = " ".join(to_text(x) for x in async_cmd)
else:
@@ -736,7 +735,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
if remote_files:
# remove none/empty
- remote_files = [ x for x in remote_files if x]
+ remote_files = [x for x in remote_files if x]
self._fixup_perms2(remote_files, self._play_context.remote_user)
# actually execute
@@ -745,7 +744,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# parse the main result
data = self._parse_returned_data(res)
- #NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
+ # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async)
@@ -756,7 +755,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
self._remove_tmp_path(tmp)
- #FIXME: for backwards compat, figure out if still makes sense
+ # FIXME: for backwards compat, figure out if still makes sense
if wrap_async:
data['changed'] = True
@@ -784,7 +783,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if key in data and not data[key]:
del data[key]
-
def _clean_returned_data(self, data):
remove_keys = set()
fact_keys = set(data.keys())
diff --git a/lib/ansible/plugins/action/asa_config.py b/lib/ansible/plugins/action/asa_config.py
index ffcb0f057f..590bc332af 100644
--- a/lib/ansible/plugins/action/asa_config.py
+++ b/lib/ansible/plugins/action/asa_config.py
@@ -22,7 +22,6 @@ __metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.net_config import ActionModule as NetActionModule
+
class ActionModule(NetActionModule, ActionBase):
pass
-
-
diff --git a/lib/ansible/plugins/action/asa_template.py b/lib/ansible/plugins/action/asa_template.py
index cc150d6183..94a1ab4dda 100644
--- a/lib/ansible/plugins/action/asa_template.py
+++ b/lib/ansible/plugins/action/asa_template.py
@@ -22,5 +22,6 @@ __metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.net_template import ActionModule as NetActionModule
+
class ActionModule(NetActionModule, ActionBase):
pass
diff --git a/lib/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py
index 2da6527c4c..3fd48c5498 100644
--- a/lib/ansible/plugins/action/assemble.py
+++ b/lib/ansible/plugins/action/assemble.py
@@ -87,14 +87,14 @@ class ActionModule(ActionBase):
if task_vars is None:
task_vars = dict()
- src = self._task.args.get('src', None)
- dest = self._task.args.get('dest', None)
- delimiter = self._task.args.get('delimiter', None)
+ src = self._task.args.get('src', None)
+ dest = self._task.args.get('dest', None)
+ delimiter = self._task.args.get('delimiter', None)
remote_src = self._task.args.get('remote_src', 'yes')
- regexp = self._task.args.get('regexp', None)
- follow = self._task.args.get('follow', False)
+ regexp = self._task.args.get('regexp', None)
+ follow = self._task.args.get('follow', False)
ignore_hidden = self._task.args.get('ignore_hidden', False)
- decrypt = self._task.args.get('decrypt', True)
+ decrypt = self._task.args.get('decrypt', True)
if src is None or dest is None:
result['failed'] = True
@@ -159,7 +159,7 @@ class ActionModule(ActionBase):
# fix file permissions when the copy is done as a different user
self._fixup_perms2((tmp, remote_path))
- new_module_args.update( dict( src=xfered,))
+ new_module_args.update(dict(src=xfered,))
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False)
if diff:
diff --git a/lib/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py
index 978331d3d5..f66c03a0c6 100644
--- a/lib/ansible/plugins/action/copy.py
+++ b/lib/ansible/plugins/action/copy.py
@@ -40,13 +40,13 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars)
- source = self._task.args.get('src', None)
+ source = self._task.args.get('src', None)
content = self._task.args.get('content', None)
- dest = self._task.args.get('dest', None)
- raw = boolean(self._task.args.get('raw', 'no'))
- force = boolean(self._task.args.get('force', 'yes'))
+ dest = self._task.args.get('dest', None)
+ raw = boolean(self._task.args.get('raw', 'no'))
+ force = boolean(self._task.args.get('force', 'yes'))
remote_src = boolean(self._task.args.get('remote_src', False))
- follow = boolean(self._task.args.get('follow', False))
+ follow = boolean(self._task.args.get('follow', False))
decrypt = boolean(self._task.args.get('decrypt', True))
result['failed'] = True
@@ -256,8 +256,8 @@ class ActionModule(ActionBase):
del new_module_args[key]
module_return = self._execute_module(module_name='copy',
- module_args=new_module_args, task_vars=task_vars,
- tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+ module_args=new_module_args, task_vars=task_vars,
+ tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True
else:
@@ -291,8 +291,8 @@ class ActionModule(ActionBase):
# Execute the file module.
module_return = self._execute_module(module_name='file',
- module_args=new_module_args, task_vars=task_vars,
- tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+ module_args=new_module_args, task_vars=task_vars,
+ tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True
if not module_return.get('checksum'):
diff --git a/lib/ansible/plugins/action/eos_template.py b/lib/ansible/plugins/action/eos_template.py
index 9530e8479f..9a3640e791 100644
--- a/lib/ansible/plugins/action/eos_template.py
+++ b/lib/ansible/plugins/action/eos_template.py
@@ -27,6 +27,7 @@ import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.eos import ActionModule as _ActionModule
+
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
index db418ecad2..1792e73513 100644
--- a/lib/ansible/plugins/action/fetch.py
+++ b/lib/ansible/plugins/action/fetch.py
@@ -42,10 +42,10 @@ class ActionModule(ActionBase):
result['msg'] = 'check mode not (yet) supported for this module'
return result
- source = self._task.args.get('src', None)
- dest = self._task.args.get('dest', None)
- flat = boolean(self._task.args.get('flat'))
- fail_on_missing = boolean(self._task.args.get('fail_on_missing'))
+ source = self._task.args.get('src', None)
+ dest = self._task.args.get('dest', None)
+ flat = boolean(self._task.args.get('flat'))
+ fail_on_missing = boolean(self._task.args.get('fail_on_missing'))
validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5', True)))
# validate_md5 is the deprecated way to specify validate_checksum
@@ -120,7 +120,7 @@ class ActionModule(ActionBase):
target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
- dest = dest.replace("//","/")
+ dest = dest.replace("//", "/")
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
result['changed'] = False
@@ -175,8 +175,8 @@ class ActionModule(ActionBase):
if validate_checksum and new_checksum != remote_checksum:
result.update(dict(failed=True, md5sum=new_md5,
- msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
- checksum=new_checksum, remote_checksum=remote_checksum))
+ msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
+ checksum=new_checksum, remote_checksum=remote_checksum))
else:
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
'remote_md5sum': None, 'checksum': new_checksum,
diff --git a/lib/ansible/plugins/action/group_by.py b/lib/ansible/plugins/action/group_by.py
index 99f9db2a88..8f2f6e3626 100644
--- a/lib/ansible/plugins/action/group_by.py
+++ b/lib/ansible/plugins/action/group_by.py
@@ -23,7 +23,7 @@ from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Create inventory groups based on variables '''
- ### We need to be able to modify the inventory
+ # We need to be able to modify the inventory
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
@@ -38,7 +38,7 @@ class ActionModule(ActionBase):
return result
group_name = self._task.args.get('key')
- group_name = group_name.replace(' ','-')
+ group_name = group_name.replace(' ', '-')
result['changed'] = False
result['add_group'] = group_name
diff --git a/lib/ansible/plugins/action/ios_template.py b/lib/ansible/plugins/action/ios_template.py
index a3fa9d5360..fd3d1b03fa 100644
--- a/lib/ansible/plugins/action/ios_template.py
+++ b/lib/ansible/plugins/action/ios_template.py
@@ -27,6 +27,7 @@ import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.ios import ActionModule as _ActionModule
+
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
diff --git a/lib/ansible/plugins/action/iosxr_template.py b/lib/ansible/plugins/action/iosxr_template.py
index a2bfa6e499..0915149ca8 100644
--- a/lib/ansible/plugins/action/iosxr_template.py
+++ b/lib/ansible/plugins/action/iosxr_template.py
@@ -27,6 +27,7 @@ import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.iosxr import ActionModule as _ActionModule
+
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
@@ -100,4 +101,3 @@ class ActionModule(_ActionModule):
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
-
diff --git a/lib/ansible/plugins/action/junos.py b/lib/ansible/plugins/action/junos.py
index 7e5581ab67..1d63c4a5ef 100644
--- a/lib/ansible/plugins/action/junos.py
+++ b/lib/ansible/plugins/action/junos.py
@@ -112,7 +112,7 @@ class ActionModule(_ActionModule):
path = unfrackpath("$HOME/.ansible/pc")
# use play_context.connection instea of play_context.port to avoid
# collision if netconf is listening on port 22
- #cp = ssh._create_control_path(play_context.remote_addr, play_context.connection, play_context.remote_user)
+ # cp = ssh._create_control_path(play_context.remote_addr, play_context.connection, play_context.remote_user)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
return cp % dict(directory=path)
diff --git a/lib/ansible/plugins/action/junos_template.py b/lib/ansible/plugins/action/junos_template.py
index 4524e0c8dd..55e785cdb1 100644
--- a/lib/ansible/plugins/action/junos_template.py
+++ b/lib/ansible/plugins/action/junos_template.py
@@ -27,6 +27,7 @@ import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.junos import ActionModule as _ActionModule
+
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
diff --git a/lib/ansible/plugins/action/normal.py b/lib/ansible/plugins/action/normal.py
index 71a8e57b71..070942a9f4 100644
--- a/lib/ansible/plugins/action/normal.py
+++ b/lib/ansible/plugins/action/normal.py
@@ -27,7 +27,7 @@ class ActionModule(ActionBase):
# individual modules might disagree but as the generic the action plugin, pass at this point.
self._supports_check_mode = True
- self._supports_async = True
+ self._supports_async = True
results = super(ActionModule, self).run(tmp, task_vars)
diff --git a/lib/ansible/plugins/action/nxos_template.py b/lib/ansible/plugins/action/nxos_template.py
index 47f7d55a78..cc24b0c21a 100644
--- a/lib/ansible/plugins/action/nxos_template.py
+++ b/lib/ansible/plugins/action/nxos_template.py
@@ -27,6 +27,7 @@ import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.nxos import ActionModule as _ActionModule
+
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
diff --git a/lib/ansible/plugins/action/ops_config.py b/lib/ansible/plugins/action/ops_config.py
index ffcb0f057f..590bc332af 100644
--- a/lib/ansible/plugins/action/ops_config.py
+++ b/lib/ansible/plugins/action/ops_config.py
@@ -22,7 +22,6 @@ __metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.net_config import ActionModule as NetActionModule
+
class ActionModule(NetActionModule, ActionBase):
pass
-
-
diff --git a/lib/ansible/plugins/action/ops_template.py b/lib/ansible/plugins/action/ops_template.py
index 45cc9bb70c..d235f9f1a3 100644
--- a/lib/ansible/plugins/action/ops_template.py
+++ b/lib/ansible/plugins/action/ops_template.py
@@ -38,7 +38,7 @@ class ActionModule(NetActionModule, ActionBase):
self._handle_template()
result.update(self._execute_module(module_name=self._task.action,
- module_args=self._task.args, task_vars=task_vars))
+ module_args=self._task.args, task_vars=task_vars))
if self._task.args.get('backup') and result.get('_backup'):
contents = json.dumps(result['_backup'], indent=4)
@@ -48,5 +48,3 @@ class ActionModule(NetActionModule, ActionBase):
del result['_backup']
return result
-
-
diff --git a/lib/ansible/plugins/action/package.py b/lib/ansible/plugins/action/package.py
index f45a81640b..ab4f2d5056 100644
--- a/lib/ansible/plugins/action/package.py
+++ b/lib/ansible/plugins/action/package.py
@@ -34,7 +34,7 @@ class ActionModule(ActionBase):
''' handler for package operations '''
self._supports_check_mode = True
- self._supports_async = True
+ self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
@@ -42,12 +42,12 @@ class ActionModule(ActionBase):
if module == 'auto':
try:
- if self._task.delegate_to: # if we delegate, we should use delegated host's facts
+ if self._task.delegate_to: # if we delegate, we should use delegated host's facts
module = self._templar.template("{{hostvars['%s']['ansible_facts']['ansible_pkg_mgr']}}" % self._task.delegate_to)
else:
module = self._templar.template('{{ansible_facts["ansible_pkg_mgr"]}}')
except:
- pass # could not get it from template!
+ pass # could not get it from template!
if module == 'auto':
facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars)
diff --git a/lib/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py
index 0e03c04c23..06c96487a8 100644
--- a/lib/ansible/plugins/action/patch.py
+++ b/lib/ansible/plugins/action/patch.py
@@ -34,7 +34,7 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars)
- src = self._task.args.get('src', None)
+ src = self._task.args.get('src', None)
remote_src = boolean(self._task.args.get('remote_src', 'no'))
if src is None:
@@ -61,7 +61,7 @@ class ActionModule(ActionBase):
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src))
self._transfer_file(src, tmp_src)
- self._fixup_perms2((tmp, tmp_src) )
+ self._fixup_perms2((tmp, tmp_src))
new_module_args = self._task.args.copy()
new_module_args.update(
diff --git a/lib/ansible/plugins/action/pause.py b/lib/ansible/plugins/action/pause.py
index 5a70e550b1..478b8db0a1 100644
--- a/lib/ansible/plugins/action/pause.py
+++ b/lib/ansible/plugins/action/pause.py
@@ -59,13 +59,13 @@ class ActionModule(ActionBase):
prompt = None
seconds = None
result.update(dict(
- changed = False,
- rc = 0,
- stderr = '',
- stdout = '',
- start = None,
- stop = None,
- delta = None,
+ changed=False,
+ rc=0,
+ stderr='',
+ stdout='',
+ start=None,
+ stop=None,
+ delta=None,
))
# Is 'args' empty, then this is the default prompted pause
@@ -163,7 +163,6 @@ class ActionModule(ActionBase):
else:
raise AnsibleError('user requested abort!')
-
except AnsibleTimeoutExceeded:
# this is the exception we expect when the alarm signal
# fires, so we simply ignore it to move into the cleanup
diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py
index 9327ab204a..f369e67ca5 100644
--- a/lib/ansible/plugins/action/script.py
+++ b/lib/ansible/plugins/action/script.py
@@ -59,9 +59,9 @@ class ActionModule(ActionBase):
# out now so we know the file name we need to transfer to the remote,
# and everything else is an argument to the script which we need later
# to append to the remote command
- parts = self._task.args.get('_raw_params', '').strip().split()
+ parts = self._task.args.get('_raw_params', '').strip().split()
source = parts[0]
- args = ' '.join(parts[1:])
+ args = ' '.join(parts[1:])
try:
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=self._task.args.get('decrypt', True))
diff --git a/lib/ansible/plugins/action/service.py b/lib/ansible/plugins/action/service.py
index 91ab0e6bd3..6af9034e2e 100644
--- a/lib/ansible/plugins/action/service.py
+++ b/lib/ansible/plugins/action/service.py
@@ -33,7 +33,7 @@ class ActionModule(ActionBase):
''' handler for package operations '''
self._supports_check_mode = True
- self._supports_async = True
+ self._supports_async = True
result = super(ActionModule, self).run(tmp, task_vars)
@@ -41,12 +41,12 @@ class ActionModule(ActionBase):
if module == 'auto':
try:
- if self._task.delegate_to: # if we delegate, we should use delegated host's facts
+ if self._task.delegate_to: # if we delegate, we should use delegated host's facts
module = self._templar.template("{{hostvars['%s']['ansible_facts']['ansible_service_mgr']}}" % self._task.delegate_to)
else:
module = self._templar.template('{{ansible_facts["ansible_service_mgr"]}}')
except:
- pass # could not get it from template!
+ pass # could not get it from template!
if module == 'auto':
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
diff --git a/lib/ansible/plugins/action/set_stats.py b/lib/ansible/plugins/action/set_stats.py
index f57a920321..1b85f10d38 100644
--- a/lib/ansible/plugins/action/set_stats.py
+++ b/lib/ansible/plugins/action/set_stats.py
@@ -28,7 +28,7 @@ class ActionModule(ActionBase):
TRANSFERS_FILES = False
- #TODO: document this in non-empty set_stats.py module
+ # TODO: document this in non-empty set_stats.py module
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
diff --git a/lib/ansible/plugins/action/sros_config.py b/lib/ansible/plugins/action/sros_config.py
index dcd84deff4..1b6902ca1a 100644
--- a/lib/ansible/plugins/action/sros_config.py
+++ b/lib/ansible/plugins/action/sros_config.py
@@ -19,14 +19,14 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+import glob
import os
import re
import time
-import glob
-from ansible.plugins.action.sros import ActionModule as _ActionModule
-from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
+from ansible.module_utils._text import to_text
+from ansible.plugins.action.sros import ActionModule as _ActionModule
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
@@ -110,4 +110,3 @@ class ActionModule(_ActionModule):
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
-
diff --git a/lib/ansible/plugins/action/synchronize.py b/lib/ansible/plugins/action/synchronize.py
index 31a606686e..6278a0738e 100644
--- a/lib/ansible/plugins/action/synchronize.py
+++ b/lib/ansible/plugins/action/synchronize.py
@@ -62,7 +62,7 @@ class ActionModule(ActionBase):
return path
# If using docker, do not add user information
- if self._remote_transport not in [ 'docker' ] and user:
+ if self._remote_transport not in ['docker'] and user:
user_prefix = '%s@' % (user, )
if self._host_is_ipv6_address(host):
@@ -308,8 +308,7 @@ class ActionModule(ActionBase):
src = _tmp_args.get('src', None)
dest = _tmp_args.get('dest', None)
if src is None or dest is None:
- return dict(failed=True,
- msg="synchronize requires both src and dest parameters are set")
+ return dict(failed=True, msg="synchronize requires both src and dest parameters are set")
if not dest_is_local:
# Private key handling
@@ -384,7 +383,7 @@ class ActionModule(ActionBase):
# If launching synchronize against docker container
# use rsync_opts to support container to override rsh options
- if self._remote_transport in [ 'docker' ]:
+ if self._remote_transport in ['docker']:
# Replicate what we do in the module argumentspec handling for lists
if not isinstance(_tmp_args.get('rsync_opts'), MutableSequence):
tmp_rsync_opts = _tmp_args.get('rsync_opts', [])
diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
index 35bc8d4ce2..adf3989124 100644
--- a/lib/ansible/plugins/action/template.py
+++ b/lib/ansible/plugins/action/template.py
@@ -23,11 +23,12 @@ from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.action import ActionBase
-from ansible.utils.hashing import checksum_s
from ansible.template import generate_ansible_template_vars
+from ansible.utils.hashing import checksum_s
boolean = C.mk_boolean
+
class ActionModule(ActionBase):
TRANSFERS_FILES = True
@@ -56,9 +57,9 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars)
source = self._task.args.get('src', None)
- dest = self._task.args.get('dest', None)
- force = boolean(self._task.args.get('force', True))
- state = self._task.args.get('state', None)
+ dest = self._task.args.get('dest', None)
+ force = boolean(self._task.args.get('force', True))
+ state = self._task.args.get('state', None)
newline_sequence = self._task.args.get('newline_sequence', self.DEFAULT_NEWLINE_SEQUENCE)
variable_start_string = self._task.args.get('variable_start_string', None)
variable_end_string = self._task.args.get('variable_end_string', None)
@@ -191,7 +192,7 @@ class ActionModule(ActionBase):
dest=dest,
original_basename=os.path.basename(source),
follow=True,
- ),
+ ),
)
result.update(self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))
diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py
index dbd8785393..0b4a0f4da0 100644
--- a/lib/ansible/plugins/action/unarchive.py
+++ b/lib/ansible/plugins/action/unarchive.py
@@ -38,8 +38,8 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars)
- source = self._task.args.get('src', None)
- dest = self._task.args.get('dest', None)
+ source = self._task.args.get('src', None)
+ dest = self._task.args.get('dest', None)
remote_src = boolean(self._task.args.get('remote_src', False))
creates = self._task.args.get('creates', None)
decrypt = self._task.args.get('decrypt', True)
@@ -73,7 +73,7 @@ class ActionModule(ActionBase):
self._remove_tmp_path(tmp)
return result
- dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
+ dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
source = os.path.expanduser(source)
if not remote_src:
diff --git a/lib/ansible/plugins/cache/__init__.py b/lib/ansible/plugins/cache/__init__.py
index 6f4bb19f24..c36f00bba2 100644
--- a/lib/ansible/plugins/cache/__init__.py
+++ b/lib/ansible/plugins/cache/__init__.py
@@ -86,13 +86,13 @@ class BaseFileCacheModule(BaseCacheModule):
self._cache_dir = os.path.expanduser(os.path.expandvars(C.CACHE_PLUGIN_CONNECTION))
if not self._cache_dir:
- raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option"
- " to be set (to a writeable directory path)" % self.plugin_name)
+ raise AnsibleError("error, '%s' cache plugin requires the 'fact_caching_connection' config option "
+ "to be set (to a writeable directory path)" % self.plugin_name)
if not os.path.exists(self._cache_dir):
try:
os.makedirs(self._cache_dir)
- except (OSError,IOError) as e:
+ except (OSError, IOError) as e:
raise AnsibleError("error in '%s' cache plugin while trying to create cache dir %s : %s" % (self.plugin_name, self._cache_dir, to_bytes(e)))
else:
for x in (os.R_OK, os.W_OK, os.X_OK):
@@ -118,12 +118,12 @@ class BaseFileCacheModule(BaseCacheModule):
self._cache[key] = value
return value
except ValueError as e:
- display.warning("error in '%s' cache plugin while trying to read %s : %s."
- " Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
+ display.warning("error in '%s' cache plugin while trying to read %s : %s. "
+ "Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e)))
self.delete(key)
- raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data."
- " It has been removed, so you can re-run your command now." % cachefile)
- except (OSError,IOError) as e:
+ raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. "
+ "It has been removed, so you can re-run your command now." % cachefile)
+ except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
raise KeyError
except Exception as e:
@@ -136,7 +136,7 @@ class BaseFileCacheModule(BaseCacheModule):
cachefile = "%s/%s" % (self._cache_dir, key)
try:
self._dump(value, cachefile)
- except (OSError,IOError) as e:
+ except (OSError, IOError) as e:
display.warning("error in '%s' cache plugin while trying to write to %s : %s" % (self.plugin_name, cachefile, to_bytes(e)))
def has_expired(self, key):
@@ -147,7 +147,7 @@ class BaseFileCacheModule(BaseCacheModule):
cachefile = "%s/%s" % (self._cache_dir, key)
try:
st = os.stat(cachefile)
- except (OSError,IOError) as e:
+ except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
@@ -179,7 +179,7 @@ class BaseFileCacheModule(BaseCacheModule):
try:
os.stat(cachefile)
return True
- except (OSError,IOError) as e:
+ except (OSError, IOError) as e:
if e.errno == errno.ENOENT:
return False
else:
@@ -194,7 +194,7 @@ class BaseFileCacheModule(BaseCacheModule):
try:
os.remove("%s/%s" % (self._cache_dir, key))
except (OSError, IOError):
- pass #TODO: only pass on non existing?
+ pass # TODO: only pass on non existing?
def flush(self):
self._cache = {}
@@ -236,6 +236,7 @@ class BaseFileCacheModule(BaseCacheModule):
"""
pass
+
class FactCache(MutableMapping):
def __init__(self, *args, **kwargs):
@@ -247,7 +248,6 @@ class FactCache(MutableMapping):
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
-
def __getitem__(self, key):
if not self._plugin.contains(key):
raise KeyError
diff --git a/lib/ansible/plugins/cache/base.py b/lib/ansible/plugins/cache/base.py
index 6c2901f737..692b1b3716 100644
--- a/lib/ansible/plugins/cache/base.py
+++ b/lib/ansible/plugins/cache/base.py
@@ -19,4 +19,3 @@ __metaclass__ = type
# moved actual classes to __init__ kept here for backward compat with 3rd parties
from ansible.plugins.cache import BaseCacheModule, BaseFileCacheModule
-
diff --git a/lib/ansible/plugins/cache/jsonfile.py b/lib/ansible/plugins/cache/jsonfile.py
index 010a026f1e..8cfdcc1c18 100644
--- a/lib/ansible/plugins/cache/jsonfile.py
+++ b/lib/ansible/plugins/cache/jsonfile.py
@@ -37,6 +37,7 @@ except ImportError:
from ansible.parsing.utils.jsonify import jsonify
from ansible.plugins.cache import BaseFileCacheModule
+
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by json files.
diff --git a/lib/ansible/plugins/cache/memory.py b/lib/ansible/plugins/cache/memory.py
index 497af9ce5e..494fd49513 100644
--- a/lib/ansible/plugins/cache/memory.py
+++ b/lib/ansible/plugins/cache/memory.py
@@ -30,6 +30,7 @@ __metaclass__ = type
from ansible.plugins.cache import BaseCacheModule
+
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
diff --git a/lib/ansible/plugins/cache/pickle.py b/lib/ansible/plugins/cache/pickle.py
index 3ac025170d..1c36ee9182 100644
--- a/lib/ansible/plugins/cache/pickle.py
+++ b/lib/ansible/plugins/cache/pickle.py
@@ -36,6 +36,7 @@ except ImportError:
from ansible.module_utils.six import PY3
from ansible.plugins.cache import BaseFileCacheModule
+
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by pickle files.
diff --git a/lib/ansible/plugins/cache/redis.py b/lib/ansible/plugins/cache/redis.py
index 99ae07170d..c0e5d4b534 100644
--- a/lib/ansible/plugins/cache/redis.py
+++ b/lib/ansible/plugins/cache/redis.py
@@ -30,6 +30,7 @@ try:
except ImportError:
raise AnsibleError("The 'redis' python module is required for the redis fact cache, 'pip install redis'")
+
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
@@ -65,7 +66,7 @@ class CacheModule(BaseCacheModule):
def set(self, key, value):
value2 = json.dumps(value)
- if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
+ if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._cache.setex(self._make_key(key), int(self._timeout), value2)
else:
self._cache.set(self._make_key(key), value2)
diff --git a/lib/ansible/plugins/cache/yaml.py b/lib/ansible/plugins/cache/yaml.py
index bad7f59364..7a6719172b 100644
--- a/lib/ansible/plugins/cache/yaml.py
+++ b/lib/ansible/plugins/cache/yaml.py
@@ -37,6 +37,7 @@ from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.cache import BaseFileCacheModule
+
class CacheModule(BaseFileCacheModule):
"""
A caching module backed by yaml files.
diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py
index df72182b5e..631fd80509 100644
--- a/lib/ansible/plugins/callback/__init__.py
+++ b/lib/ansible/plugins/callback/__init__.py
@@ -213,7 +213,6 @@ class CallbackBase:
if remove_key in result:
del result[remove_key]
-
def set_play_context(self, play_context):
pass
@@ -280,7 +279,7 @@ class CallbackBase:
def on_file_diff(self, host, diff):
pass
- ####### V2 METHODS, by default they call v1 counterparts if possible ######
+ # V2 METHODS, by default they call v1 counterparts if possible
def v2_on_any(self, *args, **kwargs):
self.on_any(args, kwargs)
@@ -295,7 +294,7 @@ class CallbackBase:
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
host = result._host.get_name()
- self.runner_on_skipped(host, self._get_item(getattr(result._result,'results',{})))
+ self.runner_on_skipped(host, self._get_item(getattr(result._result, 'results', {})))
def v2_runner_on_unreachable(self, result):
host = result._host.get_name()
@@ -307,7 +306,7 @@ class CallbackBase:
def v2_runner_on_async_poll(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
- #FIXME, get real clock
+ # FIXME, get real clock
clock = 0
self.runner_on_async_poll(host, result._result, jid, clock)
@@ -322,7 +321,7 @@ class CallbackBase:
self.runner_on_async_failed(host, result._result, jid)
def v2_runner_on_file_diff(self, result, diff):
- pass #no v1 correspondance
+ pass # no v1 correspondance
def v2_playbook_on_start(self, playbook):
self.playbook_on_start()
@@ -341,10 +340,10 @@ class CallbackBase:
self.playbook_on_task_start(task.name, is_conditional)
def v2_playbook_on_cleanup_task_start(self, task):
- pass #no v1 correspondance
+ pass # no v1 correspondance
def v2_playbook_on_handler_task_start(self, task):
- pass #no v1 correspondance
+ pass # no v1 correspondance
def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
self.playbook_on_vars_prompt(varname, private, prompt, encrypt, confirm, salt_size, salt, default)
@@ -372,7 +371,7 @@ class CallbackBase:
self.on_file_diff(host, result._result['diff'])
def v2_playbook_on_include(self, included_file):
- pass #no v1 correspondance
+ pass # no v1 correspondance
def v2_runner_item_on_ok(self, result):
pass
diff --git a/lib/ansible/plugins/callback/actionable.py b/lib/ansible/plugins/callback/actionable.py
index 8746d7579a..5aad78d52a 100644
--- a/lib/ansible/plugins/callback/actionable.py
+++ b/lib/ansible/plugins/callback/actionable.py
@@ -21,6 +21,7 @@ __metaclass__ = type
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
+
class CallbackModule(CallbackModule_default):
CALLBACK_VERSION = 2.0
@@ -76,4 +77,3 @@ class CallbackModule(CallbackModule_default):
def v2_runner_item_on_failed(self, result):
self.display_task_banner()
self.super_ref.v2_runner_item_on_failed(result)
-
diff --git a/lib/ansible/plugins/callback/context_demo.py b/lib/ansible/plugins/callback/context_demo.py
index 126f55f7a9..89e729f2ec 100644
--- a/lib/ansible/plugins/callback/context_demo.py
+++ b/lib/ansible/plugins/callback/context_demo.py
@@ -21,6 +21,7 @@ __metaclass__ = type
from ansible.plugins.callback import CallbackBase
+
class CallbackModule(CallbackBase):
"""
This is a very trivial example of how any callback function can get at play and task objects.
diff --git a/lib/ansible/plugins/callback/default.py b/lib/ansible/plugins/callback/default.py
index d5bd68d9b3..8313c39513 100644
--- a/lib/ansible/plugins/callback/default.py
+++ b/lib/ansible/plugins/callback/default.py
@@ -30,6 +30,7 @@ from ansible import constants as C
from ansible.plugins.callback import CallbackBase
from ansible.utils.color import colorize, hostcolor
+
class CallbackModule(CallbackBase):
'''
@@ -100,7 +101,7 @@ class CallbackModule(CallbackBase):
self._process_items(result)
else:
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % (self._dump_results(result._result),)
self._display.display(msg, color=color)
@@ -113,7 +114,7 @@ class CallbackModule(CallbackBase):
self._process_items(result)
else:
msg = "skipping: [%s]" % result._host.get_name()
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
@@ -209,7 +210,7 @@ class CallbackModule(CallbackBase):
msg += " => (item=%s)" % (self._get_item(result._result),)
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=color)
@@ -230,7 +231,7 @@ class CallbackModule(CallbackBase):
def v2_runner_item_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
msg = "skipping: [%s] => (item=%s) " % (result._host.get_name(), self._get_item(result._result))
- if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ if (self._display.verbosity > 0 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += " => %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_SKIP)
@@ -269,16 +270,16 @@ class CallbackModule(CallbackBase):
if C.SHOW_CUSTOM_STATS and stats.custom:
self._display.banner("CUSTOM STATS: ")
# per host
- #TODO: come up with 'pretty format'
+ # TODO: come up with 'pretty format'
for k in sorted(stats.custom.keys()):
if k == '_run':
continue
- self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n','')))
+ self._display.display('\t%s: %s' % (k, self._dump_results(stats.custom[k], indent=1).replace('\n', '')))
# print per run custom stats
if '_run' in stats.custom:
self._display.display("", screen_only=True)
- self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n',''))
+ self._display.display('\tRUN: %s' % self._dump_results(stats.custom['_run'], indent=1).replace('\n', ''))
self._display.display("", screen_only=True)
def v2_playbook_on_start(self, playbook):
@@ -291,13 +292,13 @@ class CallbackModule(CallbackBase):
for option in dir(self._options):
if option.startswith('_') or option in ['read_file', 'ensure_value', 'read_module']:
continue
- val = getattr(self._options,option)
+ val = getattr(self._options, option)
if val:
- self._display.vvvv('%s: %s' % (option,val))
+ self._display.vvvv('%s: %s' % (option, val))
def v2_runner_retry(self, result):
task_name = result.task_name or result._task
msg = "FAILED - RETRYING: %s (%d retries left)." % (task_name, result._result['retries'] - result._result['attempts'])
- if (self._display.verbosity > 2 or '_ansible_verbose_always' in result._result) and not '_ansible_verbose_override' in result._result:
+ if (self._display.verbosity > 2 or '_ansible_verbose_always' in result._result) and '_ansible_verbose_override' not in result._result:
msg += "Result was: %s" % self._dump_results(result._result)
self._display.display(msg, color=C.COLOR_DEBUG)
diff --git a/lib/ansible/plugins/callback/dense.py b/lib/ansible/plugins/callback/dense.py
index 03098116a7..11d7c2e85f 100644
--- a/lib/ansible/plugins/callback/dense.py
+++ b/lib/ansible/plugins/callback/dense.py
@@ -71,18 +71,18 @@ import sys
# FIXME: Importing constants as C simply does not work, beats me :-/
-#from ansible import constants as C
+# from ansible import constants as C
class C:
- COLOR_HIGHLIGHT = 'white'
- COLOR_VERBOSE = 'blue'
- COLOR_WARN = 'bright purple'
- COLOR_ERROR = 'red'
- COLOR_DEBUG = 'dark gray'
- COLOR_DEPRECATE = 'purple'
- COLOR_SKIP = 'cyan'
+ COLOR_HIGHLIGHT = 'white'
+ COLOR_VERBOSE = 'blue'
+ COLOR_WARN = 'bright purple'
+ COLOR_ERROR = 'red'
+ COLOR_DEBUG = 'dark gray'
+ COLOR_DEPRECATE = 'purple'
+ COLOR_SKIP = 'cyan'
COLOR_UNREACHABLE = 'bright red'
- COLOR_OK = 'green'
- COLOR_CHANGED = 'yellow'
+ COLOR_OK = 'green'
+ COLOR_CHANGED = 'yellow'
# Taken from Dstat
@@ -136,15 +136,16 @@ class vt100:
colors = dict(
- ok = vt100.darkgreen,
- changed = vt100.darkyellow,
- skipped = vt100.darkcyan,
- ignored = vt100.cyanbg + vt100.red,
- failed = vt100.darkred,
- unreachable = vt100.red,
+ ok=vt100.darkgreen,
+ changed=vt100.darkyellow,
+ skipped=vt100.darkcyan,
+ ignored=vt100.cyanbg + vt100.red,
+ failed=vt100.darkred,
+ unreachable=vt100.red,
)
-states = ( 'skipped', 'ok', 'changed', 'failed', 'unreachable' )
+states = ('skipped', 'ok', 'changed', 'failed', 'unreachable')
+
class CallbackModule_dense(CallbackModule_default):
@@ -156,7 +157,6 @@ class CallbackModule_dense(CallbackModule_default):
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'dense'
-
def __init__(self):
# From CallbackModule
diff --git a/lib/ansible/plugins/callback/foreman.py b/lib/ansible/plugins/callback/foreman.py
index 48922dcb57..39147ac44a 100644
--- a/lib/ansible/plugins/callback/foreman.py
+++ b/lib/ansible/plugins/callback/foreman.py
@@ -123,11 +123,17 @@ class CallbackModule(CallbackBase):
level = 'err'
else:
level = 'notice' if 'changed' in msg and msg['changed'] else 'info'
- logs.append({"log": {
- 'sources': {'source': source},
- 'messages': {'message': json.dumps(msg)},
- 'level': level
- }})
+ logs.append({
+ "log": {
+ 'sources': {
+ 'source': source
+ },
+ 'messages': {
+ 'message': json.dumps(msg)
+ },
+ 'level': level
+ }
+ })
return logs
def send_reports(self, stats):
@@ -154,8 +160,8 @@ class CallbackModule(CallbackBase):
"metrics": metrics,
"status": status,
"logs": log,
- }
}
+ }
# To be changed to /api/v2/config_reports in 1.11. Maybe we
# could make a GET request to get the Foreman version & do
# this automatically.
diff --git a/lib/ansible/plugins/callback/hipchat.py b/lib/ansible/plugins/callback/hipchat.py
index 3c92792824..3de5f85bb5 100644
--- a/lib/ansible/plugins/callback/hipchat.py
+++ b/lib/ansible/plugins/callback/hipchat.py
@@ -58,7 +58,7 @@ class CallbackModule(CallbackBase):
if not HAS_PRETTYTABLE:
self.disabled = True
self._display.warning('The `prettytable` python module is not installed. '
- 'Disabling the HipChat callback plugin.')
+ 'Disabling the HipChat callback plugin.')
self.msg_uri = 'https://api.hipchat.com/v1/rooms/message'
self.token = os.getenv('HIPCHAT_TOKEN')
@@ -69,8 +69,8 @@ class CallbackModule(CallbackBase):
if self.token is None:
self.disabled = True
self._display.warning('HipChat token could not be loaded. The HipChat '
- 'token can be provided using the `HIPCHAT_TOKEN` '
- 'environment variable.')
+ 'token can be provided using the `HIPCHAT_TOKEN` '
+ 'environment variable.')
self.printed_playbook = False
self.playbook_name = None
diff --git a/lib/ansible/plugins/callback/jabber.py b/lib/ansible/plugins/callback/jabber.py
index ff839716ff..2f764daa7c 100644
--- a/lib/ansible/plugins/callback/jabber.py
+++ b/lib/ansible/plugins/callback/jabber.py
@@ -39,8 +39,8 @@ class CallbackModule(CallbackBase):
super(CallbackModule, self).__init__(display=display)
if not HAS_XMPP:
- self._display.warning("The required python xmpp library (xmpppy) is not installed."
- " pip install git+https://github.com/ArchipelProject/xmpppy")
+ self._display.warning("The required python xmpp library (xmpppy) is not installed. "
+ "pip install git+https://github.com/ArchipelProject/xmpppy")
self.disabled = True
self.serv = os.getenv('JABBER_SERV')
@@ -48,15 +48,15 @@ class CallbackModule(CallbackBase):
self.j_pass = os.getenv('JABBER_PASS')
self.j_to = os.getenv('JABBER_TO')
- if (self.j_user or self.j_pass or self.serv ) is None:
+ if (self.j_user or self.j_pass or self.serv) is None:
self.disabled = True
- self._display.warning ('Jabber CallBack want JABBER_USER and JABBER_PASS env variables')
+ self._display.warning('Jabber CallBack want JABBER_USER and JABBER_PASS env variables')
def send_msg(self, msg):
"""Send message"""
jid = xmpp.JID(self.j_user)
- client = xmpp.Client(self.serv,debug=[])
- client.connect(server=(self.serv,5222))
+ client = xmpp.Client(self.serv, debug=[])
+ client.connect(server=(self.serv, 5222))
client.auth(jid.getNode(), self.j_pass, resource=jid.getResource())
message = xmpp.Message(self.j_to, msg)
message.setAttr('type', 'chat')
@@ -93,5 +93,4 @@ class CallbackModule(CallbackBase):
self.send_msg("%s: Failures detected \n%s \nHost: %s\n Failed at:\n%s" % (name, self.task, h, out))
else:
out = self.debug
- self.send_msg("Great! \n Playbook %s completed:\n%s \n Last task debug:\n %s" % (name,s, out))
-
+ self.send_msg("Great! \n Playbook %s completed:\n%s \n Last task debug:\n %s" % (name, s, out))
diff --git a/lib/ansible/plugins/callback/log_plays.py b/lib/ansible/plugins/callback/log_plays.py
index 0eb33afd01..12c6a8d225 100644
--- a/lib/ansible/plugins/callback/log_plays.py
+++ b/lib/ansible/plugins/callback/log_plays.py
@@ -44,8 +44,8 @@ class CallbackModule(CallbackBase):
CALLBACK_NAME = 'log_plays'
CALLBACK_NEEDS_WHITELIST = True
- TIME_FORMAT="%b %d %Y %H:%M:%S"
- MSG_FORMAT="%(now)s - %(category)s - %(data)s\n\n"
+ TIME_FORMAT = "%b %d %Y %H:%M:%S"
+ MSG_FORMAT = "%(now)s - %(category)s - %(data)s\n\n"
def __init__(self):
diff --git a/lib/ansible/plugins/callback/logentries.py b/lib/ansible/plugins/callback/logentries.py
index da4c55bcac..8264453a4d 100644
--- a/lib/ansible/plugins/callback/logentries.py
+++ b/lib/ansible/plugins/callback/logentries.py
@@ -160,10 +160,10 @@ class PlainTextSocketAppender(object):
try:
import ssl
- HAS_SSL=True
+ HAS_SSL = True
except ImportError: # for systems without TLS support.
SocketAppender = PlainTextSocketAppender
- HAS_SSL=False
+ HAS_SSL = False
else:
class TLSSocketAppender(PlainTextSocketAppender):
@@ -199,14 +199,14 @@ class CallbackModule(CallbackBase):
self._display.warning("Unable to import ssl module. Will send over port 80.")
if not HAS_CERTIFI:
- self.disabled =True
+ self.disabled = True
self._display.warning('The `certifi` python module is not installed. '
- 'Disabling the Logentries callback plugin.')
+ 'Disabling the Logentries callback plugin.')
if not HAS_FLATDICT:
- self.disabled =True
+ self.disabled = True
self._display.warning('The `flatdict` python module is not installed. '
- 'Disabling the Logentries callback plugin.')
+ 'Disabling the Logentries callback plugin.')
config_path = os.path.abspath(os.path.dirname(__file__))
config = configparser.ConfigParser()
diff --git a/lib/ansible/plugins/callback/logstash.py b/lib/ansible/plugins/callback/logstash.py
index 31c7e90177..61177cca4c 100644
--- a/lib/ansible/plugins/callback/logstash.py
+++ b/lib/ansible/plugins/callback/logstash.py
@@ -33,6 +33,7 @@ except ImportError:
from ansible.plugins.callback import CallbackBase
+
class CallbackModule(CallbackBase):
"""
ansible logstash callback plugin
@@ -69,9 +70,9 @@ class CallbackModule(CallbackBase):
if not HAS_LOGSTASH:
self.disabled = True
self._display.warning("The required python-logstash is not installed. "
- "pip install python-logstash")
+ "pip install python-logstash")
else:
- self.logger = logging.getLogger('python-logstash-logger')
+ self.logger = logging.getLogger('python-logstash-logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logstash.TCPLogstashHandler(
@@ -95,7 +96,7 @@ class CallbackModule(CallbackBase):
'ansible_type': "start",
'ansible_playbook': self.playbook,
}
- self.logger.info("ansible start", extra = data)
+ self.logger.info("ansible start", extra=data)
def v2_playbook_on_stats(self, stats):
summarize_stat = {}
@@ -115,7 +116,7 @@ class CallbackModule(CallbackBase):
'ansible_playbook': self.playbook,
'ansible_result': json.dumps(summarize_stat),
}
- self.logger.info("ansible stats", extra = data)
+ self.logger.info("ansible stats", extra=data)
def v2_runner_on_ok(self, result, **kwargs):
data = {
@@ -128,7 +129,7 @@ class CallbackModule(CallbackBase):
'ansible_task': result._task,
'ansible_result': self._dump_results(result._result)
}
- self.logger.info("ansible ok", extra = data)
+ self.logger.info("ansible ok", extra=data)
def v2_runner_on_skipped(self, result, **kwargs):
data = {
@@ -140,7 +141,7 @@ class CallbackModule(CallbackBase):
'ansible_task': result._task,
'ansible_host': result._host.name
}
- self.logger.info("ansible skipped", extra = data)
+ self.logger.info("ansible skipped", extra=data)
def v2_playbook_on_import_for_host(self, result, imported_file):
data = {
@@ -152,7 +153,7 @@ class CallbackModule(CallbackBase):
'ansible_host': result._host.name,
'imported_file': imported_file
}
- self.logger.info("ansible import", extra = data)
+ self.logger.info("ansible import", extra=data)
def v2_playbook_on_not_import_for_host(self, result, missing_file):
data = {
@@ -164,7 +165,7 @@ class CallbackModule(CallbackBase):
'ansible_host': result._host.name,
'missing_file': missing_file
}
- self.logger.info("ansible import", extra = data)
+ self.logger.info("ansible import", extra=data)
def v2_runner_on_failed(self, result, **kwargs):
data = {
@@ -178,7 +179,7 @@ class CallbackModule(CallbackBase):
'ansible_result': self._dump_results(result._result)
}
self.errors += 1
- self.logger.error("ansible failed", extra = data)
+ self.logger.error("ansible failed", extra=data)
def v2_runner_on_unreachable(self, result, **kwargs):
data = {
@@ -191,7 +192,7 @@ class CallbackModule(CallbackBase):
'ansible_task': result._task,
'ansible_result': self._dump_results(result._result)
}
- self.logger.error("ansbile unreachable", extra = data)
+ self.logger.error("ansbile unreachable", extra=data)
def v2_runner_on_async_failed(self, result, **kwargs):
data = {
@@ -205,4 +206,4 @@ class CallbackModule(CallbackBase):
'ansible_result': self._dump_results(result._result)
}
self.errors += 1
- self.logger.error("ansible async", extra = data)
+ self.logger.error("ansible async", extra=data)
diff --git a/lib/ansible/plugins/callback/mail.py b/lib/ansible/plugins/callback/mail.py
index 443bcba567..80608f707b 100644
--- a/lib/ansible/plugins/callback/mail.py
+++ b/lib/ansible/plugins/callback/mail.py
@@ -32,11 +32,11 @@ from ansible.plugins.callback import CallbackBase
def mail(subject='Ansible error mail', sender=None, to=None, cc=None, bcc=None, body=None, smtphost=None):
if sender is None:
- sender='<root>'
+ sender = '<root>'
if to is None:
- to='root'
+ to = 'root'
if smtphost is None:
- smtphost=os.getenv('SMTPHOST', 'localhost')
+ smtphost = os.getenv('SMTPHOST', 'localhost')
if body is None:
body = subject
diff --git a/lib/ansible/plugins/callback/minimal.py b/lib/ansible/plugins/callback/minimal.py
index de10b3d17a..eadc7de0ef 100644
--- a/lib/ansible/plugins/callback/minimal.py
+++ b/lib/ansible/plugins/callback/minimal.py
@@ -38,9 +38,9 @@ class CallbackModule(CallbackBase):
''' output the result of a command run '''
buf = "%s | %s | rc=%s >>\n" % (host, caption, result.get('rc', -1))
- buf += result.get('stdout','')
- buf += result.get('stderr','')
- buf += result.get('msg','')
+ buf += result.get('stdout', '')
+ buf += result.get('stderr', '')
+ buf += result.get('msg', '')
return buf + "\n"
diff --git a/lib/ansible/plugins/callback/oneline.py b/lib/ansible/plugins/callback/oneline.py
index 0985f51d0b..55e5ea4ce8 100644
--- a/lib/ansible/plugins/callback/oneline.py
+++ b/lib/ansible/plugins/callback/oneline.py
@@ -34,10 +34,10 @@ class CallbackModule(CallbackBase):
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'oneline'
- def _command_generic_msg(self, hostname, result, caption):
- stdout = result.get('stdout','').replace('\n', '\\n').replace('\r', '\\r')
+ def _command_generic_msg(self, hostname, result, caption):
+ stdout = result.get('stdout', '').replace('\n', '\\n').replace('\r', '\\r')
if 'stderr' in result and result['stderr']:
- stderr = result.get('stderr','').replace('\n', '\\n').replace('\r', '\\r')
+ stderr = result.get('stderr', '').replace('\n', '\\n').replace('\r', '\\r')
return "%s | %s | rc=%s | (stdout) %s (stderr) %s" % (hostname, caption, result.get('rc', -1), stdout, stderr)
else:
return "%s | %s | rc=%s | (stdout) %s" % (hostname, caption, result.get('rc', -1), stdout)
@@ -49,26 +49,25 @@ class CallbackModule(CallbackBase):
error = result._result['exception'].strip().split('\n')[-1]
msg = "An exception occurred during task execution. To see the full traceback, use -vvv. The error was: %s" % error
else:
- msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'].replace('\n','')
+ msg = "An exception occurred during task execution. The full traceback is:\n" + result._result['exception'].replace('\n', '')
if result._task.action in C.MODULE_NO_JSON and 'module_stderr' not in result._result:
- self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'FAILED'), color=C.COLOR_ERROR)
+ self._display.display(self._command_generic_msg(result._host.get_name(), result._result, 'FAILED'), color=C.COLOR_ERROR)
else:
self._display.display(msg, color=C.COLOR_ERROR)
- self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')),
+ self._display.display("%s | FAILED! => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n', '')),
color=C.COLOR_ERROR)
def v2_runner_on_ok(self, result):
if result._task.action in C.MODULE_NO_JSON:
- self._display.display(self._command_generic_msg(result._host.get_name(), result._result,'SUCCESS'), color=C.COLOR_OK)
+ self._display.display(self._command_generic_msg(result._host.get_name(), result._result, 'SUCCESS'), color=C.COLOR_OK)
else:
- self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n','')),
+ self._display.display("%s | SUCCESS => %s" % (result._host.get_name(), self._dump_results(result._result, indent=0).replace('\n', '')),
color=C.COLOR_OK)
-
def v2_runner_on_unreachable(self, result):
- self._display.display("%s | UNREACHABLE!: %s" % (result._host.get_name(), result._result.get('msg','')), color=C.COLOR_UNREACHABLE)
+ self._display.display("%s | UNREACHABLE!: %s" % (result._host.get_name(), result._result.get('msg', '')), color=C.COLOR_UNREACHABLE)
def v2_runner_on_skipped(self, result):
self._display.display("%s | SKIPPED" % (result._host.get_name()), color=C.COLOR_SKIP)
diff --git a/lib/ansible/plugins/callback/osx_say.py b/lib/ansible/plugins/callback/osx_say.py
index 2434077870..d2f85f9a31 100644
--- a/lib/ansible/plugins/callback/osx_say.py
+++ b/lib/ansible/plugins/callback/osx_say.py
@@ -25,11 +25,12 @@ import os
from ansible.plugins.callback import CallbackBase
-FAILED_VOICE="Zarvox"
-REGULAR_VOICE="Trinoids"
-HAPPY_VOICE="Cellos"
-LASER_VOICE="Princess"
-SAY_CMD="/usr/bin/say"
+FAILED_VOICE = "Zarvox"
+REGULAR_VOICE = "Trinoids"
+HAPPY_VOICE = "Cellos"
+LASER_VOICE = "Princess"
+SAY_CMD = "/usr/bin/say"
+
class CallbackModule(CallbackBase):
"""
@@ -48,7 +49,7 @@ class CallbackModule(CallbackBase):
# ansible will not call any callback if disabled is set to True
if not os.path.exists(SAY_CMD):
self.disabled = True
- self._display.warning("%s does not exist, plugin %s disabled" % (SAY_CMD, os.path.basename(__file__)) )
+ self._display.warning("%s does not exist, plugin %s disabled" % (SAY_CMD, os.path.basename(__file__)))
def say(self, msg, voice):
subprocess.call([SAY_CMD, msg, "--voice=%s" % (voice)])
diff --git a/lib/ansible/plugins/callback/profile_tasks.py b/lib/ansible/plugins/callback/profile_tasks.py
index 445cdfc8ef..e5b57a6275 100644
--- a/lib/ansible/plugins/callback/profile_tasks.py
+++ b/lib/ansible/plugins/callback/profile_tasks.py
@@ -37,7 +37,9 @@ t0 = tn = time.time()
def secondsToStr(t):
# http://bytes.com/topic/python/answers/635958-handy-short-cut-formatting-elapsed-time-floating-point-seconds
- rediv = lambda ll, b: list(divmod(ll[0], b)) + ll[1:]
+ def rediv(ll, b):
+ return list(divmod(ll[0], b)) + ll[1:]
+
return "%d:%02d:%02d.%03d" % tuple(reduce(rediv, [[t * 1000, ], 1000, 60, 60]))
@@ -104,7 +106,7 @@ class CallbackModule(CallbackBase):
self.current = task._uuid
self.stats[self.current] = {'time': time.time(), 'name': task.get_name()}
if self._display.verbosity >= 2:
- self.stats[self.current][ 'path'] = task.get_path()
+ self.stats[self.current]['path'] = task.get_path()
def v2_playbook_on_task_start(self, task, is_conditional):
self._record_task(task)
@@ -127,7 +129,7 @@ class CallbackModule(CallbackBase):
if self.sort_order != 'none':
results = sorted(
self.stats.items(),
- key=lambda x:x[1]['time'],
+ key=lambda x: x[1]['time'],
reverse=self.sort_order,
)
@@ -136,7 +138,7 @@ class CallbackModule(CallbackBase):
# Print the timings
for uuid, result in results:
- msg=u"{0:-<{2}}{1:->9}".format(result['name'] + u' ',u' {0:.02f}s'.format(result['time']), self._display.columns - 9)
+ msg = u"{0:-<{2}}{1:->9}".format(result['name'] + u' ', u' {0:.02f}s'.format(result['time']), self._display.columns - 9)
if 'path' in result:
msg += u"\n{0:-<{1}}".format(result['path'] + u' ', self._display.columns)
self._display.display(msg)
diff --git a/lib/ansible/plugins/callback/selective.py b/lib/ansible/plugins/callback/selective.py
index a1eb06a8c9..408936eec4 100644
--- a/lib/ansible/plugins/callback/selective.py
+++ b/lib/ansible/plugins/callback/selective.py
@@ -106,7 +106,7 @@ class CallbackModule(CallbackBase):
lines = text.splitlines()
result_lines = []
for l in lines:
- result_lines.append("{}{}".format(' '*indent_level, l))
+ result_lines.append("{}{}".format(' ' * indent_level, l))
return '\n'.join(result_lines)
def _print_diff(self, diff, indent_level):
@@ -121,7 +121,7 @@ class CallbackModule(CallbackBase):
diff = dict_diff(diff['before'], diff['after'])
if diff:
diff = colorize(str(diff), 'changed')
- print(self._indent_text(diff, indent_level+4))
+ print(self._indent_text(diff, indent_level + 4))
def _print_host_or_item(self, host_or_item, changed, msg, diff, is_host, error, stdout, stderr):
if is_host:
@@ -144,7 +144,7 @@ class CallbackModule(CallbackBase):
msg = colorize(msg, color)
line_length = 120
- spaces = ' ' * (40-len(name)-indent_level)
+ spaces = ' ' * (40 - len(name) - indent_level)
line = "{} * {}{}- {}".format(' ' * indent_level, name, spaces, change_string)
if len(msg) < 50:
@@ -152,16 +152,16 @@ class CallbackModule(CallbackBase):
print("{} {}---------".format(line, '-' * (line_length - len(line))))
else:
print("{} {}".format(line, '-' * (line_length - len(line))))
- print(self._indent_text(msg, indent_level+4))
+ print(self._indent_text(msg, indent_level + 4))
if diff is not None:
self._print_diff(diff, indent_level)
if stdout is not None:
stdout = colorize(stdout, 'failed')
- print(self._indent_text(stdout, indent_level+4))
+ print(self._indent_text(stdout, indent_level + 4))
if stderr is not None:
stderr = colorize(stderr, 'failed')
- print(self._indent_text(stderr, indent_level+4))
+ print(self._indent_text(stderr, indent_level + 4))
def v2_playbook_on_play_start(self, play):
"""Run on start of the play."""
@@ -236,7 +236,7 @@ class CallbackModule(CallbackBase):
self.last_skipped = False
line_length = 120
- spaces = ' ' * (31-len(result._host.name)-4)
+ spaces = ' ' * (31 - len(result._host.name) - 4)
line = " * {}{}- {}".format(colorize(result._host.name, 'not_so_bold'),
spaces,
@@ -255,4 +255,3 @@ class CallbackModule(CallbackBase):
v2_playbook_on_handler_task_start = v2_playbook_on_task_start
v2_runner_on_failed = v2_runner_on_ok
v2_runner_on_unreachable = v2_runner_on_ok
-
diff --git a/lib/ansible/plugins/callback/skippy.py b/lib/ansible/plugins/callback/skippy.py
index a934e0b60c..0d75c50dd8 100644
--- a/lib/ansible/plugins/callback/skippy.py
+++ b/lib/ansible/plugins/callback/skippy.py
@@ -21,6 +21,7 @@ __metaclass__ = type
from ansible.plugins.callback.default import CallbackModule as CallbackModule_default
+
class CallbackModule(CallbackModule_default):
'''
diff --git a/lib/ansible/plugins/callback/slack.py b/lib/ansible/plugins/callback/slack.py
index 9fc406fd34..fbb2a48a7a 100644
--- a/lib/ansible/plugins/callback/slack.py
+++ b/lib/ansible/plugins/callback/slack.py
@@ -68,7 +68,6 @@ class CallbackModule(CallbackBase):
else:
self._options = None
-
super(CallbackModule, self).__init__(display=display)
if not HAS_PRETTYTABLE:
diff --git a/lib/ansible/plugins/callback/syslog_json.py b/lib/ansible/plugins/callback/syslog_json.py
index 7a53386064..912a64222f 100644
--- a/lib/ansible/plugins/callback/syslog_json.py
+++ b/lib/ansible/plugins/callback/syslog_json.py
@@ -12,6 +12,7 @@ import socket
from ansible.plugins.callback import CallbackBase
+
class CallbackModule(CallbackBase):
"""
logs ansible-playbook and ansible runs to a syslog server in json format
@@ -33,35 +34,33 @@ class CallbackModule(CallbackBase):
super(CallbackModule, self).__init__()
- self.logger = logging.getLogger('ansible logger')
+ self.logger = logging.getLogger('ansible logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logging.handlers.SysLogHandler(
- address = (os.getenv('SYSLOG_SERVER','localhost'),
- int(os.getenv('SYSLOG_PORT',514))),
- facility= os.getenv('SYSLOG_FACILITY',logging.handlers.SysLogHandler.LOG_USER)
+ address=(os.getenv('SYSLOG_SERVER', 'localhost'), int(os.getenv('SYSLOG_PORT', 514))),
+ facility=os.getenv('SYSLOG_FACILITY', logging.handlers.SysLogHandler.LOG_USER)
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
-
def runner_on_failed(self, host, res, ignore_errors=False):
- self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
+ self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname, host, self._dump_results(res)))
def runner_on_ok(self, host, res):
- self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
+ self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname, host, self._dump_results(res)))
def runner_on_skipped(self, host, item=None):
- self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))
+ self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname, host, 'skipped'))
def runner_on_unreachable(self, host, res):
- self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
+ self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname, host, self._dump_results(res)))
def runner_on_async_failed(self, host, res, jid):
- self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
+ self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname, host, self._dump_results(res)))
def playbook_on_import_for_host(self, host, imported_file):
- self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))
+ self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname, host, imported_file))
def playbook_on_not_import_for_host(self, host, missing_file):
- self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))
+ self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname, host, missing_file))
diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py
index 59351d25f0..813cf9dabe 100644
--- a/lib/ansible/plugins/connection/__init__.py
+++ b/lib/ansible/plugins/connection/__init__.py
@@ -59,8 +59,8 @@ class ConnectionBase(with_metaclass(ABCMeta, object)):
'''
has_pipelining = False
- has_native_async = False # eg, winrm
- always_pipeline_modules = False # eg, winrm
+ has_native_async = False # eg, winrm
+ always_pipeline_modules = False # eg, winrm
become_methods = C.BECOME_METHODS
# When running over this connection type, prefer modules written in a certain language
# as discovered by the specified file extension. An empty string as the
diff --git a/lib/ansible/plugins/connection/accelerate.py b/lib/ansible/plugins/connection/accelerate.py
index 7dd578c2e5..fd53649c45 100644
--- a/lib/ansible/plugins/connection/accelerate.py
+++ b/lib/ansible/plugins/connection/accelerate.py
@@ -44,7 +44,7 @@ except ImportError:
# ((1400-8)/4)*3) = 1044
# which leaves room for the TCP/IP header. We set this to a
# multiple of the value to speed up file reads.
-CHUNK_SIZE=1044*20
+CHUNK_SIZE = 1044 * 20
class Connection(ConnectionBase):
@@ -73,7 +73,7 @@ class Connection(ConnectionBase):
host=self._play_context.remote_addr)
while tries > 0:
try:
- self.conn.connect((self._play_context.remote_addr,self._play_context.accelerate_port))
+ self.conn.connect((self._play_context.remote_addr, self._play_context.accelerate_port))
break
except socket.error:
display.vvvv("connection to %s failed, retrying..." % self._play_context.remote_addr, host=self._play_context.remote_addr)
@@ -107,11 +107,11 @@ class Connection(ConnectionBase):
sock.close()
def send_data(self, data):
- packed_len = struct.pack('!Q',len(data))
+ packed_len = struct.pack('!Q', len(data))
return self.conn.sendall(packed_len + data)
def recv_data(self):
- header_len = 8 # size of a packed unsigned long long
+ header_len = 8 # size of a packed unsigned long long
data = b""
try:
display.vvvv("in recv_data(), waiting for the header", host=self._play_context.remote_addr)
@@ -122,7 +122,7 @@ class Connection(ConnectionBase):
return None
data += d
display.vvvv("got the header, unpacking", host=self._play_context.remote_addr)
- data_len = struct.unpack('!Q',data[:header_len])[0]
+ data_len = struct.unpack('!Q', data[:header_len])[0]
data = data[header_len:]
display.vvvv("data received so far (expecting %d): %d" % (data_len, len(data)), host=self._play_context.remote_addr)
while len(data) < data_len:
@@ -252,7 +252,7 @@ class Connection(ConnectionBase):
response = keyczar_decrypt(self.key, response)
response = json.loads(response)
- if response.get('failed',False):
+ if response.get('failed', False):
raise AnsibleError("failed to put the file in the requested location")
finally:
fd.close()
@@ -263,7 +263,7 @@ class Connection(ConnectionBase):
response = keyczar_decrypt(self.key, response)
response = json.loads(response)
- if response.get('failed',False):
+ if response.get('failed', False):
raise AnsibleError("failed to put the file in the requested location")
def fetch_file(self, in_path, out_path):
diff --git a/lib/ansible/plugins/connection/chroot.py b/lib/ansible/plugins/connection/chroot.py
index 7304420c8f..4ad6aa87e5 100644
--- a/lib/ansible/plugins/connection/chroot.py
+++ b/lib/ansible/plugins/connection/chroot.py
@@ -96,7 +96,7 @@ class Connection(ConnectionBase):
display.vvv("EXEC %s" % (local_cmd), host=self.chroot)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
diff --git a/lib/ansible/plugins/connection/docker.py b/lib/ansible/plugins/connection/docker.py
index ef227c31c3..89bcae030f 100644
--- a/lib/ansible/plugins/connection/docker.py
+++ b/lib/ansible/plugins/connection/docker.py
@@ -235,7 +235,7 @@ class Connection(ConnectionBase):
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = subprocess.Popen(args, stdin=in_file,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError:
raise AnsibleError("docker connection requires dd command in the container to put files")
stdout, stderr = p.communicate()
@@ -257,7 +257,7 @@ class Connection(ConnectionBase):
args = [to_bytes(i, errors='surrogate_or_strict') for i in args]
p = subprocess.Popen(args, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
# Rename if needed
diff --git a/lib/ansible/plugins/connection/funcd.py b/lib/ansible/plugins/connection/funcd.py
index 49b2499778..1193906937 100644
--- a/lib/ansible/plugins/connection/funcd.py
+++ b/lib/ansible/plugins/connection/funcd.py
@@ -43,6 +43,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class Connection(object):
''' Func-based connections '''
diff --git a/lib/ansible/plugins/connection/iocage.py b/lib/ansible/plugins/connection/iocage.py
index 89d648b4a7..cc26edb39e 100644
--- a/lib/ansible/plugins/connection/iocage.py
+++ b/lib/ansible/plugins/connection/iocage.py
@@ -65,4 +65,3 @@ class Connection(Jail):
raise AnsibleError(u"iocage returned an error: {}".format(stdout))
return stdout.strip('\n')
-
diff --git a/lib/ansible/plugins/connection/jail.py b/lib/ansible/plugins/connection/jail.py
index 47c43000ca..6810aaeb19 100644
--- a/lib/ansible/plugins/connection/jail.py
+++ b/lib/ansible/plugins/connection/jail.py
@@ -56,7 +56,7 @@ class Connection(ConnectionBase):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.jail = self._play_context.remote_addr
- if self.modified_jailname_key in kwargs :
+ if self.modified_jailname_key in kwargs:
self.jail = kwargs[self.modified_jailname_key]
if os.geteuid() != 0:
@@ -122,7 +122,7 @@ class Connection(ConnectionBase):
display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
diff --git a/lib/ansible/plugins/connection/libvirt_lxc.py b/lib/ansible/plugins/connection/libvirt_lxc.py
index 9583eeda8a..123caa798c 100644
--- a/lib/ansible/plugins/connection/libvirt_lxc.py
+++ b/lib/ansible/plugins/connection/libvirt_lxc.py
@@ -97,7 +97,7 @@ class Connection(ConnectionBase):
display.vvv("EXEC %s" % (local_cmd,), host=self.lxc)
local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
diff --git a/lib/ansible/plugins/connection/local.py b/lib/ansible/plugins/connection/local.py
index abb7a0c4a4..005185fb60 100644
--- a/lib/ansible/plugins/connection/local.py
+++ b/lib/ansible/plugins/connection/local.py
@@ -90,7 +90,7 @@ class Connection(ConnectionBase):
p = subprocess.Popen(
cmd,
shell=isinstance(cmd, (text_type, binary_type)),
- executable=executable, #cwd=...
+ executable=executable, # cwd=...
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
diff --git a/lib/ansible/plugins/connection/lxc.py b/lib/ansible/plugins/connection/lxc.py
index 34cb66c484..278c204f30 100644
--- a/lib/ansible/plugins/connection/lxc.py
+++ b/lib/ansible/plugins/connection/lxc.py
@@ -68,7 +68,7 @@ class Connection(ConnectionBase):
raise errors.AnsibleError("%s is not running" % self.container_name)
def _communicate(self, pid, in_data, stdin, stdout, stderr):
- buf = { stdout: [], stderr: [] }
+ buf = {stdout: [], stderr: []}
read_fds = [stdout, stderr]
if in_data:
write_fds = [stdin]
@@ -109,7 +109,7 @@ class Connection(ConnectionBase):
read_stdout, write_stdout = None, None
read_stderr, write_stderr = None, None
- read_stdin, write_stdin = None, None
+ read_stdin, write_stdin = None, None
try:
read_stdout, write_stdout = os.pipe()
diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py
index 6c1aa030b4..0e7b6f25f2 100644
--- a/lib/ansible/plugins/connection/paramiko_ssh.py
+++ b/lib/ansible/plugins/connection/paramiko_ssh.py
@@ -52,7 +52,7 @@ except ImportError:
display = Display()
-AUTHENTICITY_MSG="""
+AUTHENTICITY_MSG = """
paramiko: The authenticity of host '%s' can't be established.
The %s key fingerprint is %s.
Are you sure you want to continue connecting (yes/no)?
@@ -62,12 +62,12 @@ Are you sure you want to continue connecting (yes/no)?
SETTINGS_REGEX = re.compile(r'(\w+)(?:\s*=\s*|\s+)(.+)')
# prevent paramiko warning noise -- see http://stackoverflow.com/questions/3920502/
-HAVE_PARAMIKO=False
+HAVE_PARAMIKO = False
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
import paramiko
- HAVE_PARAMIKO=True
+ HAVE_PARAMIKO = True
logging.getLogger("paramiko").setLevel(logging.WARNING)
except ImportError:
pass
@@ -109,7 +109,7 @@ class MyAddPolicy(object):
self.connection.connection_unlock()
- if inp not in ['yes','y','']:
+ if inp not in ['yes', 'y', '']:
raise AnsibleError("host connection rejected by user")
key._added_by_ansible_this_time = True
@@ -213,11 +213,11 @@ class Connection(ConnectionBase):
if C.HOST_KEY_CHECKING:
for ssh_known_hosts in ("/etc/ssh/ssh_known_hosts", "/etc/openssh/ssh_known_hosts"):
try:
- #TODO: check if we need to look at several possible locations, possible for loop
+ # TODO: check if we need to look at several possible locations, possible for loop
ssh.load_system_host_keys(ssh_known_hosts)
break
except IOError:
- pass # file was not found, but not required to function
+ pass # file was not found, but not required to function
ssh.load_system_host_keys()
sock_kwarg = self._parse_proxy_command(port)
@@ -305,10 +305,10 @@ class Connection(ConnectionBase):
display.debug("chunk is: %s" % chunk)
if not chunk:
if b'unknown user' in become_output:
- raise AnsibleError( 'user %s does not exist' % self._play_context.become_user)
+ raise AnsibleError('user %s does not exist' % self._play_context.become_user)
else:
break
- #raise AnsibleError('ssh connection closed waiting for password prompt')
+ # raise AnsibleError('ssh connection closed waiting for password prompt')
become_output += chunk
# need to check every line because we might get lectured
@@ -441,7 +441,7 @@ class Connection(ConnectionBase):
# (This doesn't acquire the connection lock because it needs
# to exclude only other known_hosts writers, not connections
# that are starting up.)
- lockfile = self.keyfile.replace("known_hosts",".known_hosts.lock")
+ lockfile = self.keyfile.replace("known_hosts", ".known_hosts.lock")
dirname = os.path.dirname(self.keyfile)
makedirs_safe(dirname)
@@ -457,7 +457,7 @@ class Connection(ConnectionBase):
# gather information about the current key file, so
# we can ensure the new file has the correct mode/owner
- key_dir = os.path.dirname(self.keyfile)
+ key_dir = os.path.dirname(self.keyfile)
if os.path.exists(self.keyfile):
key_stat = os.stat(self.keyfile)
mode = key_stat.st_mode
diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py
index 1eef5acf0b..400d2a26ca 100644
--- a/lib/ansible/plugins/connection/ssh.py
+++ b/lib/ansible/plugins/connection/ssh.py
@@ -135,6 +135,7 @@ class AnsibleControlPersistBrokenPipeError(AnsibleError):
''' ControlPersist broken pipe '''
pass
+
def _ssh_retry(func):
"""
Decorator to retry ssh/scp/sftp in the case of a connection failure
@@ -365,12 +366,14 @@ class Connection(ConnectionBase):
user = self._play_context.remote_user
if user:
- self._add_args(b_command,
- (b"-o", b"User=" + to_bytes(self._play_context.remote_user, errors='surrogate_or_strict')),
+ self._add_args(
+ b_command,
+ (b"-o", b"User=" + to_bytes(self._play_context.remote_user, errors='surrogate_or_strict')),
u"ANSIBLE_REMOTE_USER/remote_user/ansible_user/user/-u set"
)
- self._add_args(b_command,
+ self._add_args(
+ b_command,
(b"-o", b"ConnectTimeout=" + to_bytes(self._play_context.timeout, errors='surrogate_or_strict', nonstring='simplerepr')),
u"ANSIBLE_TIMEOUT/timeout set"
)
@@ -459,7 +462,7 @@ class Connection(ConnectionBase):
display_line = to_text(b_line).rstrip('\r\n')
suppress_output = False
- #display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, display_line))
+ # display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, display_line))
if self._play_context.prompt and self.check_password_prompt(b_line):
display.debug("become_prompt: (source=%s, state=%s): '%s'" % (source, state, display_line))
self._flags['become_prompt'] = True
@@ -594,7 +597,7 @@ class Connection(ConnectionBase):
for fd in (p.stdout, p.stderr):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
- ### TODO: bcoca would like to use SelectSelector() when open
+ # TODO: bcoca would like to use SelectSelector() when open
# filehandles is low, then switch to more efficient ones when higher.
# select is faster when filehandles is low.
selector = selectors.DefaultSelector()
@@ -839,8 +842,8 @@ class Connection(ConnectionBase):
if returncode == 255:
raise AnsibleConnectionFailure("Failed to connect to the host via %s: %s" % (method, to_native(stderr)))
else:
- raise AnsibleError("failed to transfer file to {0} {1}:\n{2}\n{3}"\
- .format(to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
+ raise AnsibleError("failed to transfer file to %s %s:\n%s\n%s" %
+ (to_native(in_path), to_native(out_path), to_native(stdout), to_native(stderr)))
#
# Main public methods
@@ -852,7 +855,6 @@ class Connection(ConnectionBase):
display.vvv(u"ESTABLISH SSH CONNECTION FOR USER: {0}".format(self._play_context.remote_user), host=self._play_context.remote_addr)
-
# we can only use tty when we are not pipelining the modules. piping
# data into /usr/bin/python inside a tty automatically invokes the
# python interactive-mode but the modules are not compatible with the
diff --git a/lib/ansible/plugins/connection/winrm.py b/lib/ansible/plugins/connection/winrm.py
index d01463e736..8bc2fa0a64 100644
--- a/lib/ansible/plugins/connection/winrm.py
+++ b/lib/ansible/plugins/connection/winrm.py
@@ -74,15 +74,15 @@ class Connection(ConnectionBase):
become_methods = ['runas']
allow_executable = False
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args, **kwargs):
- self.has_pipelining = True
+ self.has_pipelining = True
self.always_pipeline_modules = True
self.has_native_async = True
- self.protocol = None
- self.shell_id = None
- self.delegate = None
- self._shell_type = 'powershell'
+ self.protocol = None
+ self.shell_id = None
+ self.delegate = None
+ self._shell_type = 'powershell'
# FUTURE: Add runas support
super(Connection, self).__init__(*args, **kwargs)
@@ -109,13 +109,13 @@ class Connection(ConnectionBase):
self._become_user = self._play_context.become_user
self._become_pass = self._play_context.become_pass
- self._kinit_cmd = hostvars.get('ansible_winrm_kinit_cmd', 'kinit')
+ self._kinit_cmd = hostvars.get('ansible_winrm_kinit_cmd', 'kinit')
if hasattr(winrm, 'FEATURE_SUPPORTED_AUTHTYPES'):
self._winrm_supported_authtypes = set(winrm.FEATURE_SUPPORTED_AUTHTYPES)
else:
# for legacy versions of pywinrm, use the values we know are supported
- self._winrm_supported_authtypes = set(['plaintext','ssl','kerberos'])
+ self._winrm_supported_authtypes = set(['plaintext', 'ssl', 'kerberos'])
# TODO: figure out what we want to do with auto-transport selection in the face of NTLM/Kerb/CredSSP/Cert/Basic
transport_selector = 'ssl' if self._winrm_scheme == 'https' else 'plaintext'
@@ -191,7 +191,7 @@ class Connection(ConnectionBase):
Establish a WinRM connection over HTTP/HTTPS.
'''
display.vvv("ESTABLISH WINRM CONNECTION FOR USER: %s on PORT %s TO %s" %
- (self._winrm_user, self._winrm_port, self._winrm_host), host=self._winrm_host)
+ (self._winrm_user, self._winrm_port, self._winrm_host), host=self._winrm_host)
netloc = '%s:%d' % (self._winrm_host, self._winrm_port)
endpoint = urlunsplit((self._winrm_scheme, netloc, self._winrm_path, '', ''))
errors = []
@@ -320,7 +320,7 @@ class Connection(ConnectionBase):
payload_bytes = to_bytes(payload)
byte_count = len(payload_bytes)
for i in range(0, byte_count, buffer_size):
- yield payload_bytes[i:i+buffer_size], i+buffer_size >= byte_count
+ yield payload_bytes[i:i + buffer_size], i + buffer_size >= byte_count
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
@@ -349,7 +349,6 @@ class Connection(ConnectionBase):
return (result.status_code, result.std_out, result.std_err)
-
def exec_command_old(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
cmd_parts = shlex.split(to_bytes(cmd), posix=False)
@@ -406,7 +405,7 @@ class Connection(ConnectionBase):
in_size = os.path.getsize(to_bytes(in_path, errors='surrogate_or_strict'))
offset = 0
with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
- for out_data in iter((lambda:in_file.read(buffer_size)), ''):
+ for out_data in iter((lambda: in_file.read(buffer_size)), ''):
offset += len(out_data)
self._display.vvvvv('WINRM PUT "%s" to "%s" (offset=%d size=%d)' % (in_path, out_path, offset, len(out_data)), host=self._winrm_host)
# yes, we're double-encoding over the wire in this case- we want to ensure that the data shipped to the end PS pipeline is still b64-encoded
diff --git a/lib/ansible/plugins/connection/zone.py b/lib/ansible/plugins/connection/zone.py
index f20735e21b..e7b82ed669 100644
--- a/lib/ansible/plugins/connection/zone.py
+++ b/lib/ansible/plugins/connection/zone.py
@@ -71,8 +71,8 @@ class Connection(ConnectionBase):
def list_zones(self):
process = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
zones = []
for l in process.stdout.readlines():
@@ -84,13 +84,13 @@ class Connection(ConnectionBase):
return zones
def get_zone_path(self):
- #solaris10vm# zoneadm -z cswbuild list -p
- #-:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared
+ # solaris10vm# zoneadm -z cswbuild list -p
+ # -:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared
process = subprocess.Popen([self.zoneadm_cmd, '-z', to_bytes(self.zone), 'list', '-p'],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- #stdout, stderr = p.communicate()
+ # stdout, stderr = p.communicate()
path = process.stdout.readlines()[0].split(':')[3]
return path + '/root'
@@ -109,7 +109,7 @@ class Connection(ConnectionBase):
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
- # Note: zlogin invokes a shell (just like ssh does) so we do not pass
+ # NOTE: zlogin invokes a shell (just like ssh does) so we do not pass
# this through /bin/sh -c here. Instead it goes through the shell
# that zlogin selects.
local_cmd = [self.zlogin_cmd, self.zone, cmd]
@@ -117,7 +117,7 @@ class Connection(ConnectionBase):
display.vvv("EXEC %s" % (local_cmd), host=self.zone)
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
diff --git a/lib/ansible/plugins/filter/__init__.py b/lib/ansible/plugins/filter/__init__.py
index cbbbd64118..980f84a225 100644
--- a/lib/ansible/plugins/filter/__init__.py
+++ b/lib/ansible/plugins/filter/__init__.py
@@ -1,5 +1,3 @@
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-
-
diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py
index e82061a51d..462a6612f2 100644
--- a/lib/ansible/plugins/filter/core.py
+++ b/lib/ansible/plugins/filter/core.py
@@ -72,20 +72,24 @@ class AnsibleJSONEncoder(json.JSONEncoder):
else:
return super(AnsibleJSONEncoder, self).default(o)
+
def to_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, **kw)
return to_text(transformed)
+
def to_nice_yaml(a, indent=4, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=indent, allow_unicode=True, default_flow_style=False, **kw)
return to_text(transformed)
+
def to_json(a, *args, **kw):
''' Convert the value to JSON '''
return json.dumps(a, cls=AnsibleJSONEncoder, *args, **kw)
+
def to_nice_json(a, indent=4, *args, **kw):
'''Make verbose, human readable JSON'''
# python-2.6's json encoder is buggy (can't encode hostvars)
@@ -109,6 +113,7 @@ def to_nice_json(a, indent=4, *args, **kw):
# Fallback to the to_json filter
return to_json(a, *args, **kw)
+
def to_bool(a):
''' return a bool for the arg '''
if a is None or isinstance(a, bool):
@@ -119,11 +124,12 @@ def to_bool(a):
return True
return False
+
def to_datetime(string, format="%Y-%d-%m %H:%M:%S"):
return datetime.strptime(string, format)
-def strftime(string_format, second = None):
+def strftime(string_format, second=None):
''' return a date string using string. See https://docs.python.org/2/library/time.html#time.strftime for format '''
if second is not None:
try:
@@ -132,13 +138,16 @@ def strftime(string_format, second = None):
raise errors.AnsibleFilterError('Invalid value for epoch value (%s)' % second)
return time.strftime(string_format, time.localtime(second))
+
def quote(a):
''' return its argument quoted for shell usage '''
return shlex_quote(a)
+
def fileglob(pathname):
''' return list of matched regular files for glob '''
- return [ g for g in glob.glob(pathname) if os.path.isfile(g) ]
+ return [g for g in glob.glob(pathname) if os.path.isfile(g)]
+
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
@@ -152,6 +161,7 @@ def regex_replace(value='', pattern='', replacement='', ignorecase=False):
_re = re.compile(pattern, flags=flags)
return _re.sub(replacement, value)
+
def regex_findall(value, regex, multiline=False, ignorecase=False):
''' Perform re.findall and return the list of matches '''
flags = 0
@@ -161,6 +171,7 @@ def regex_findall(value, regex, multiline=False, ignorecase=False):
flags |= re.M
return re.findall(regex, value, flags)
+
def regex_search(value, regex, *args, **kwargs):
''' Perform re.search and return the list of matches or a backref '''
@@ -191,6 +202,7 @@ def regex_search(value, regex, *args, **kwargs):
items.append(match.group(item))
return items
+
def ternary(value, true_val, false_val):
''' value ? true_val : false_val '''
if value:
@@ -199,16 +211,17 @@ def ternary(value, true_val, false_val):
return false_val
-
def regex_escape(string):
'''Escape all regular expressions special characters from STRING.'''
return re.escape(string)
+
def from_yaml(data):
if isinstance(data, string_types):
return yaml.safe_load(data)
return data
+
@environmentfilter
def rand(environment, end, start=None, step=None, seed=None):
if seed is None:
@@ -228,6 +241,7 @@ def rand(environment, end, start=None, step=None, seed=None):
else:
raise errors.AnsibleFilterError('random can only be used on sequences and integers')
+
def randomize_list(mylist, seed=None):
try:
mylist = list(mylist)
@@ -240,9 +254,10 @@ def randomize_list(mylist, seed=None):
pass
return mylist
+
def get_hash(data, hashtype='sha1'):
- try: # see if hash is supported
+ try: # see if hash is supported
h = hashlib.new(hashtype)
except:
return None
@@ -250,14 +265,15 @@ def get_hash(data, hashtype='sha1'):
h.update(to_bytes(data, errors='surrogate_then_strict'))
return h.hexdigest()
+
def get_encrypted_password(password, hashtype='sha512', salt=None):
# TODO: find a way to construct dynamically from system
- cryptmethod= {
- 'md5': '1',
+ cryptmethod = {
+ 'md5': '1',
'blowfish': '2a',
- 'sha256': '5',
- 'sha512': '6',
+ 'sha256': '5',
+ 'sha512': '6',
}
if hashtype in cryptmethod:
@@ -273,7 +289,7 @@ def get_encrypted_password(password, hashtype='sha512', salt=None):
if not HAS_PASSLIB:
if sys.platform.startswith('darwin'):
raise errors.AnsibleFilterError('|password_hash requires the passlib python module to generate password hashes on Mac OS X/Darwin')
- saltstring = "$%s$%s" % (cryptmethod[hashtype],salt)
+ saltstring = "$%s$%s" % (cryptmethod[hashtype], salt)
encrypted = crypt.crypt(password, saltstring)
else:
if hashtype == 'blowfish':
@@ -287,9 +303,11 @@ def get_encrypted_password(password, hashtype='sha512', salt=None):
return None
+
def to_uuid(string):
return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
+
def mandatory(a):
from jinja2.runtime import Undefined
@@ -298,6 +316,7 @@ def mandatory(a):
raise errors.AnsibleFilterError('Mandatory variable not defined.')
return a
+
def combine(*terms, **kwargs):
recursive = kwargs.get('recursive', False)
if len(kwargs) > 1 or (len(kwargs) == 1 and 'recursive' not in kwargs):
@@ -312,6 +331,7 @@ def combine(*terms, **kwargs):
else:
return dict(itertools.chain(*map(iteritems, terms)))
+
def comment(text, style='plain', **kw):
# Predefined comment types
comment_styles = {
@@ -394,6 +414,7 @@ def comment(text, style='plain', **kw):
str_postfix,
str_end)
+
def extract(item, container, morekeys=None):
from jinja2.runtime import Undefined
@@ -410,6 +431,7 @@ def extract(item, container, morekeys=None):
return value
+
def failed(*a, **kw):
''' Test if task result yields failed '''
item = a[0]
@@ -422,26 +444,31 @@ def failed(*a, **kw):
else:
return False
+
def success(*a, **kw):
''' Test if task result yields success '''
return not failed(*a, **kw)
+
def changed(*a, **kw):
''' Test if task result yields changed '''
item = a[0]
if not isinstance(item, MutableMapping):
raise errors.AnsibleFilterError("|changed expects a dictionary")
- if not 'changed' in item:
+ if 'changed' not in item:
changed = False
- if ('results' in item # some modules return a 'results' key
- and isinstance(item['results'], MutableSequence)
- and isinstance(item['results'][0], MutableMapping)):
+ if (
+ 'results' in item and # some modules return a 'results' key
+ isinstance(item['results'], MutableSequence) and
+ isinstance(item['results'][0], MutableMapping)
+ ):
for result in item['results']:
changed = changed or result.get('changed', False)
else:
changed = item.get('changed', False)
return changed
+
def skipped(*a, **kw):
''' Test if task result yields skipped '''
item = a[0]
@@ -504,7 +531,7 @@ class FilterModule(object):
'to_nice_yaml': to_nice_yaml,
'from_yaml': from_yaml,
- #date
+ # date
'to_datetime': to_datetime,
# path
@@ -567,18 +594,18 @@ class FilterModule(object):
'extract': extract,
# failure testing
- 'failed' : failed,
- 'failure' : failed,
- 'success' : success,
- 'succeeded' : success,
+ 'failed': failed,
+ 'failure': failed,
+ 'success': success,
+ 'succeeded': success,
# changed testing
- 'changed' : changed,
- 'change' : changed,
+ 'changed': changed,
+ 'change': changed,
# skip testing
- 'skipped' : skipped,
- 'skip' : skipped,
+ 'skipped': skipped,
+ 'skip': skipped,
# debug
'type_debug': lambda o: o.__class__.__name__,
diff --git a/lib/ansible/plugins/filter/ipaddr.py b/lib/ansible/plugins/filter/ipaddr.py
index beef17e53f..cd56537eda 100644
--- a/lib/ansible/plugins/filter/ipaddr.py
+++ b/lib/ansible/plugins/filter/ipaddr.py
@@ -36,7 +36,6 @@ from ansible import errors
# ---- IP address and network query helpers ----
-
def _empty_ipaddr_query(v, vtype):
# We don't have any query to process, so just check what type the user
# expects, and return the IP address in a correct format
@@ -46,6 +45,7 @@ def _empty_ipaddr_query(v, vtype):
elif vtype == 'network':
return str(v)
+
def _6to4_query(v, vtype, value):
if v.version == 4:
@@ -76,6 +76,7 @@ def _6to4_query(v, vtype, value):
else:
return False
+
def _ip_query(v):
if v.size == 1:
return str(v.ip)
@@ -84,22 +85,27 @@ def _ip_query(v):
if v.ip != v.network or not v.broadcast:
return str(v.ip)
+
def _gateway_query(v):
if v.size > 1:
if v.ip != v.network:
return str(v.ip) + '/' + str(v.prefixlen)
+
def _bool_ipaddr_query(v):
if v:
return True
+
def _broadcast_query(v):
if v.size > 1:
return str(v.broadcast)
+
def _cidr_query(v):
return str(v)
+
def _cidr_lookup_query(v, iplist, value):
try:
if v in iplist:
@@ -107,6 +113,7 @@ def _cidr_lookup_query(v, iplist, value):
except:
return False
+
def _host_query(v):
if v.size == 1:
return str(v)
@@ -114,15 +121,18 @@ def _host_query(v):
if v.ip != v.network:
return str(v.ip) + '/' + str(v.prefixlen)
+
def _hostmask_query(v):
return str(v.hostmask)
+
def _int_query(v, vtype):
if vtype == 'address':
return int(v.ip)
elif vtype == 'network':
return str(int(v.ip)) + '/' + str(int(v.prefixlen))
+
def _ipv4_query(v, value):
if v.version == 6:
try:
@@ -132,12 +142,14 @@ def _ipv4_query(v, value):
else:
return value
+
def _ipv6_query(v, value):
if v.version == 4:
return str(v.ipv6())
else:
return value
+
def _link_local_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if v.version == 4:
@@ -148,34 +160,42 @@ def _link_local_query(v, value):
if ipaddr(str(v_ip), 'fe80::/10'):
return value
+
def _loopback_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if v_ip.is_loopback():
return value
+
def _multicast_query(v, value):
if v.is_multicast():
return value
+
def _net_query(v):
if v.size > 1:
if v.ip == v.network:
return str(v.network) + '/' + str(v.prefixlen)
+
def _netmask_query(v):
return str(v.netmask)
+
def _network_query(v):
if v.size > 1:
return str(v.network)
+
def _prefix_query(v):
return int(v.prefixlen)
+
def _private_query(v, value):
if v.is_private():
return value
+
def _public_query(v, value):
v_ip = netaddr.IPAddress(str(v.ip))
if (v_ip.is_unicast() and not v_ip.is_private() and
@@ -183,16 +203,20 @@ def _public_query(v, value):
not v_ip.is_hostmask()):
return value
+
def _revdns_query(v):
v_ip = netaddr.IPAddress(str(v.ip))
return v_ip.reverse_dns
+
def _size_query(v):
return v.size
+
def _subnet_query(v):
return str(v.cidr)
+
def _type_query(v):
if v.size == 1:
return 'address'
@@ -202,13 +226,16 @@ def _type_query(v):
else:
return 'network'
+
def _unicast_query(v, value):
if v.is_unicast():
return value
+
def _version_query(v):
return v.version
+
def _wrap_query(v, vtype, value):
if v.version == 6:
if vtype == 'address':
@@ -224,41 +251,48 @@ def _bare_query(v):
v.dialect = netaddr.mac_bare
return str(v)
+
def _bool_hwaddr_query(v):
if v:
return True
+
def _int_hwaddr_query(v):
return int(v)
+
def _cisco_query(v):
v.dialect = netaddr.mac_cisco
return str(v)
+
def _empty_hwaddr_query(v, value):
if v:
return value
+
def _linux_query(v):
v.dialect = mac_linux
return str(v)
+
def _postgresql_query(v):
v.dialect = netaddr.mac_pgsql
return str(v)
+
def _unix_query(v):
v.dialect = netaddr.mac_unix
return str(v)
+
def _win_query(v):
v.dialect = netaddr.mac_eui48
return str(v)
# ---- IP address and network filters ----
-
-def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
+def ipaddr(value, query='', version=False, alias='ipaddr'):
''' Check if string is an IP address or network and filter it '''
query_func_extra_args = {
@@ -276,7 +310,8 @@ def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
'public': ('value',),
'unicast': ('value',),
'wrap': ('vtype', 'value'),
- }
+ }
+
query_func_map = {
'': _empty_ipaddr_query,
'6to4': _6to4_query,
@@ -316,7 +351,7 @@ def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
'v6': _ipv6_query,
'version': _version_query,
'wrap': _wrap_query,
- }
+ }
vtype = None
@@ -421,7 +456,7 @@ def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
# that string is a valid subnet, if so, we can check later if given IP
# address/network is inside that specific subnet
try:
- ### ?? 6to4 and link-local were True here before. Should they still?
+ # ?? 6to4 and link-local were True here before. Should they still?
if query and (query not in query_func_map or query == 'cidr_lookup') and ipaddr(query, 'network'):
iplist = netaddr.IPSet([netaddr.IPNetwork(query)])
query = 'cidr_lookup'
@@ -463,19 +498,19 @@ def ipaddr(value, query = '', version = False, alias = 'ipaddr'):
return False
-def ipwrap(value, query = ''):
+def ipwrap(value, query=''):
try:
if isinstance(value, (list, tuple, types.GeneratorType)):
_ret = []
for element in value:
- if ipaddr(element, query, version = False, alias = 'ipwrap'):
+ if ipaddr(element, query, version=False, alias='ipwrap'):
_ret.append(ipaddr(element, 'wrap'))
else:
_ret.append(element)
return _ret
else:
- _ret = ipaddr(value, query, version = False, alias = 'ipwrap')
+ _ret = ipaddr(value, query, version=False, alias='ipwrap')
if _ret:
return ipaddr(_ret, 'wrap')
else:
@@ -485,12 +520,12 @@ def ipwrap(value, query = ''):
return value
-def ipv4(value, query = ''):
- return ipaddr(value, query, version = 4, alias = 'ipv4')
+def ipv4(value, query=''):
+ return ipaddr(value, query, version=4, alias='ipv4')
-def ipv6(value, query = ''):
- return ipaddr(value, query, version = 6, alias = 'ipv6')
+def ipv6(value, query=''):
+ return ipaddr(value, query, version=6, alias='ipv6')
# Split given subnet into smaller subnets or find out the biggest subnet of
@@ -511,7 +546,7 @@ def ipv6(value, query = ''):
#
# - address | ipsubnet(cidr, index)
# returns next indexed subnet which contains given address
-def ipsubnet(value, query = '', index = 'x'):
+def ipsubnet(value, query='', index='x'):
''' Manipulate IPv4/IPv6 subnets '''
try:
@@ -563,6 +598,7 @@ def ipsubnet(value, query = '', index = 'x'):
return False
+
# Returns the nth host within a network described by value.
# Usage:
#
@@ -594,11 +630,12 @@ def nthhost(value, query=''):
return False
+
# Returns the SLAAC address within a network for a given HW/MAC address.
# Usage:
#
# - prefix | slaac(mac)
-def slaac(value, query = ''):
+def slaac(value, query=''):
''' Get the SLAAC address within given network '''
try:
vtype = ipaddr(value, 'type')
@@ -618,7 +655,7 @@ def slaac(value, query = ''):
return False
try:
- mac = hwaddr(query, alias = 'slaac')
+ mac = hwaddr(query, alias='slaac')
eui = netaddr.EUI(mac)
except:
@@ -628,13 +665,13 @@ def slaac(value, query = ''):
# ---- HWaddr / MAC address filters ----
-
-def hwaddr(value, query = '', alias = 'hwaddr'):
+def hwaddr(value, query='', alias='hwaddr'):
''' Check if string is a HW/MAC address and filter it '''
query_func_extra_args = {
'': ('value',),
- }
+ }
+
query_func_map = {
'': _empty_hwaddr_query,
'bare': _bare_query,
@@ -648,7 +685,7 @@ def hwaddr(value, query = '', alias = 'hwaddr'):
'psql': _postgresql_query,
'unix': _unix_query,
'win': _win_query,
- }
+ }
try:
v = netaddr.EUI(value)
@@ -666,23 +703,26 @@ def hwaddr(value, query = '', alias = 'hwaddr'):
return False
-def macaddr(value, query = ''):
- return hwaddr(value, query, alias = 'macaddr')
+
+def macaddr(value, query=''):
+ return hwaddr(value, query, alias='macaddr')
+
def _need_netaddr(f_name, *args, **kwargs):
- raise errors.AnsibleFilterError('The {0} filter requires python-netaddr be'
- ' installed on the ansible controller'.format(f_name))
+ raise errors.AnsibleFilterError('The %s filter requires python-netaddr be '
+ 'installed on the ansible controller' % f_name)
+
def ip4_hex(arg):
''' Convert an IPv4 address to Hexadecimal notation '''
numbers = list(map(int, arg.split('.')))
return '{:02x}{:02x}{:02x}{:02x}'.format(*numbers)
-# ---- Ansible filters ----
+# ---- Ansible filters ----
class FilterModule(object):
''' IP address and network manipulation filters '''
- filter_map = {
+ filter_map = {
# IP addresses and networks
'ipaddr': ipaddr,
'ipwrap': ipwrap,
diff --git a/lib/ansible/plugins/filter/json_query.py b/lib/ansible/plugins/filter/json_query.py
index 479721e9b7..bfb97fc51b 100644
--- a/lib/ansible/plugins/filter/json_query.py
+++ b/lib/ansible/plugins/filter/json_query.py
@@ -39,6 +39,7 @@ def json_query(data, expr):
return jmespath.search(expr, data)
+
class FilterModule(object):
''' Query filter '''
diff --git a/lib/ansible/plugins/filter/mathstuff.py b/lib/ansible/plugins/filter/mathstuff.py
index 242549f348..6e3cc94fe2 100644
--- a/lib/ansible/plugins/filter/mathstuff.py
+++ b/lib/ansible/plugins/filter/mathstuff.py
@@ -20,16 +20,16 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import math
import collections
import itertools
+import math
from ansible import errors
from ansible.module_utils import basic
def unique(a):
- if isinstance(a,collections.Hashable):
+ if isinstance(a, collections.Hashable):
c = set(a)
else:
c = []
@@ -38,38 +38,44 @@ def unique(a):
c.append(x)
return c
+
def intersect(a, b):
- if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ if isinstance(a, collections.Hashable) and isinstance(b, collections.Hashable):
c = set(a) & set(b)
else:
c = unique(filter(lambda x: x in b, a))
return c
+
def difference(a, b):
- if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ if isinstance(a, collections.Hashable) and isinstance(b, collections.Hashable):
c = set(a) - set(b)
else:
c = unique(filter(lambda x: x not in b, a))
return c
+
def symmetric_difference(a, b):
- if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ if isinstance(a, collections.Hashable) and isinstance(b, collections.Hashable):
c = set(a) ^ set(b)
else:
- c = unique(filter(lambda x: x not in intersect(a,b), union(a,b)))
+ c = unique(filter(lambda x: x not in intersect(a, b), union(a, b)))
return c
+
def union(a, b):
- if isinstance(a,collections.Hashable) and isinstance(b,collections.Hashable):
+ if isinstance(a, collections.Hashable) and isinstance(b, collections.Hashable):
c = set(a) | set(b)
else:
c = unique(a + b)
return c
+
def min(a):
_min = __builtins__.get('min')
return _min(a)
+
def max(a):
_max = __builtins__.get('max')
return _max(a)
@@ -97,7 +103,7 @@ def inversepower(x, base=2):
if base == 2:
return math.sqrt(x)
else:
- return math.pow(x, 1.0/float(base))
+ return math.pow(x, 1.0 / float(base))
except TypeError as e:
raise errors.AnsibleFilterError('root() can only be used on numbers: %s' % str(e))
@@ -109,6 +115,7 @@ def human_readable(size, isbits=False, unit=None):
except:
raise errors.AnsibleFilterError("human_readable() can't interpret following string: %s" % size)
+
def human_to_bytes(size, default_unit=None, isbits=False):
''' Return bytes count from a human readable string '''
try:
@@ -116,14 +123,15 @@ def human_to_bytes(size, default_unit=None, isbits=False):
except:
raise errors.AnsibleFilterError("human_to_bytes() can't interpret following string: %s" % size)
+
class FilterModule(object):
''' Ansible math jinja2 filters '''
def filters(self):
filters = {
# general math
- 'min' : min,
- 'max' : max,
+ 'min': min,
+ 'max': max,
# exponents and logarithms
'log': logarithm,
@@ -131,7 +139,7 @@ class FilterModule(object):
'root': inversepower,
# set theory
- 'unique' : unique,
+ 'unique': unique,
'intersect': intersect,
'difference': difference,
'symmetric_difference': symmetric_difference,
@@ -142,8 +150,8 @@ class FilterModule(object):
'combinations': itertools.combinations,
# computer theory
- 'human_readable' : human_readable,
- 'human_to_bytes' : human_to_bytes,
+ 'human_readable': human_readable,
+ 'human_to_bytes': human_to_bytes,
}
diff --git a/lib/ansible/plugins/inventory/__init__.py b/lib/ansible/plugins/inventory/__init__.py
index ce9be5d3c8..706e821501 100644
--- a/lib/ansible/plugins/inventory/__init__.py
+++ b/lib/ansible/plugins/inventory/__init__.py
@@ -85,8 +85,7 @@ class BaseInventoryPlugin(object):
def _compose(self, template, variables):
''' helper method for pluigns to compose variables for Ansible based on jinja2 expression and inventory vars'''
t = Templar(loader=self.loader, variables=variables)
- return t.do_template('%s%s%s' % (t.environment.variable_start_string,template,t.environment.variable_end_string), disable_lookups=True)
-
+ return t.do_template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
class BaseFileInventoryPlugin(BaseInventoryPlugin):
@@ -100,8 +99,8 @@ class BaseFileInventoryPlugin(BaseInventoryPlugin):
super(BaseFileInventoryPlugin, self).__init__(cache=None)
-#### Helper methods ####
-def detect_range(line = None):
+# Helper methods
+def detect_range(line=None):
'''
A helper function that checks a given host line to see if it contains
a range pattern described in the docstring above.
@@ -110,7 +109,8 @@ def detect_range(line = None):
'''
return '[' in line
-def expand_hostname_range(line = None):
+
+def expand_hostname_range(line=None):
'''
A helper function that expands a given line that contains a pattern
specified in top docstring, and returns a list that consists of the
@@ -137,7 +137,7 @@ def expand_hostname_range(line = None):
# - also add an optional third parameter which contains the step. (Default: 1)
# so range can be [01:10:2] -> 01 03 05 07 09
- (head, nrange, tail) = line.replace('[','|',1).replace(']','|',1).split('|')
+ (head, nrange, tail) = line.replace('[', '|', 1).replace(']', '|', 1).split('|')
bounds = nrange.split(":")
if len(bounds) != 2 and len(bounds) != 3:
raise AnsibleError("host range must be begin:end or begin:end:step")
@@ -152,10 +152,13 @@ def expand_hostname_range(line = None):
if not end:
raise AnsibleError("host range must specify end value")
if beg[0] == '0' and len(beg) > 1:
- rlen = len(beg) # range length formatting hint
+ rlen = len(beg) # range length formatting hint
if rlen != len(end):
raise AnsibleError("host range must specify equal-length begin and end formats")
- fill = lambda _: str(_).zfill(rlen) # range sequence
+
+ def fill(x):
+ return str(x).zfill(rlen) # range sequence
+
else:
fill = str
@@ -164,17 +167,16 @@ def expand_hostname_range(line = None):
i_end = string.ascii_letters.index(end)
if i_beg > i_end:
raise AnsibleError("host range must have begin <= end")
- seq = list(string.ascii_letters[i_beg:i_end+1:int(step)])
+ seq = list(string.ascii_letters[i_beg:i_end + 1:int(step)])
except ValueError: # not an alpha range
- seq = range(int(beg), int(end)+1, int(step))
+ seq = range(int(beg), int(end) + 1, int(step))
for rseq in seq:
hname = ''.join((head, fill(rseq), tail))
if detect_range(hname):
- all_hosts.extend( expand_hostname_range( hname ) )
+ all_hosts.extend(expand_hostname_range(hname))
else:
all_hosts.append(hname)
return all_hosts
-
diff --git a/lib/ansible/plugins/inventory/ini.py b/lib/ansible/plugins/inventory/ini.py
index ae1a00b47c..ed32aa0ec1 100644
--- a/lib/ansible/plugins/inventory/ini.py
+++ b/lib/ansible/plugins/inventory/ini.py
@@ -130,7 +130,7 @@ class InventoryModule(BaseFileInventoryPlugin):
for line in b_data.splitlines():
if line and line[0] in self.b_COMMENT_MARKERS:
# Replace is okay for comment lines
- #data.append(to_text(line, errors='surrogate_then_replace'))
+ # data.append(to_text(line, errors='surrogate_then_replace'))
# Currently we only need these lines for accurate lineno in errors
data.append(u'')
else:
@@ -141,7 +141,6 @@ class InventoryModule(BaseFileInventoryPlugin):
except Exception as e:
raise AnsibleParserError(e)
-
def _raise_error(self, message):
raise AnsibleError("%s:%d: " % (self._filename, self.lineno) + message)
@@ -281,7 +280,7 @@ class InventoryModule(BaseFileInventoryPlugin):
self._raise_error("Expected key=value, got: %s" % (line))
- def _parse_host_definition(self, line ):
+ def _parse_host_definition(self, line):
'''
Takes a single line and tries to parse it as a host definition. Returns
a list of Hosts if successful, or raises an error.
diff --git a/lib/ansible/plugins/inventory/script.py b/lib/ansible/plugins/inventory/script.py
index 008d8f3bcf..b60fb29d4e 100644
--- a/lib/ansible/plugins/inventory/script.py
+++ b/lib/ansible/plugins/inventory/script.py
@@ -80,7 +80,7 @@ class InventoryModule(BaseInventoryPlugin):
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
# directory when '.' is not in PATH.
- cmd = [ path, "--list" ]
+ cmd = [path, "--list"]
try:
cache_key = self.get_cache_prefix(path)
@@ -95,7 +95,7 @@ class InventoryModule(BaseInventoryPlugin):
path = to_native(path)
if stderr:
- err = to_native(stderr) + "\n"
+ err = to_native(stderr) + "\n"
if sp.returncode != 0:
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (path, err))
@@ -140,14 +140,13 @@ class InventoryModule(BaseInventoryPlugin):
try:
got = processed.get(host, {})
except AttributeError as e:
- raise AnsibleError("Improperly formatted host information for %s: %s" % (host,to_native(e)))
+ raise AnsibleError("Improperly formatted host information for %s: %s" % (host, to_native(e)))
self.populate_host_vars(host, got, group)
except Exception as e:
raise AnsibleParserError(to_native(e))
-
def _parse_group(self, group, data):
self.inventory.add_group(group)
@@ -155,7 +154,7 @@ class InventoryModule(BaseInventoryPlugin):
if not isinstance(data, dict):
data = {'hosts': data}
# is not those subkeys, then simplified syntax, host with vars
- elif not any(k in data for k in ('hosts','vars','children')):
+ elif not any(k in data for k in ('hosts', 'vars', 'children')):
data = {'hosts': [group], 'vars': data}
if 'hosts' in data:
diff --git a/lib/ansible/plugins/inventory/yaml.py b/lib/ansible/plugins/inventory/yaml.py
index 8c3d3e7727..612e5174bb 100644
--- a/lib/ansible/plugins/inventory/yaml.py
+++ b/lib/ansible/plugins/inventory/yaml.py
@@ -71,6 +71,7 @@ from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins.inventory import BaseFileInventoryPlugin, detect_range, expand_hostname_range
+
class InventoryModule(BaseFileInventoryPlugin):
NAME = 'yaml'
@@ -120,7 +121,7 @@ class InventoryModule(BaseFileInventoryPlugin):
self.inventory.add_group(group)
if isinstance(group_data, dict):
- #make sure they are dicts
+ # make sure they are dicts
for section in ['vars', 'children', 'hosts']:
if section in group_data and isinstance(group_data[section], string_types):
group_data[section] = {group_data[section]: None}
@@ -178,4 +179,4 @@ class InventoryModule(BaseFileInventoryPlugin):
'''
Compiles the regular expressions required to parse the inventory and stores them in self.patterns.
'''
- self.patterns['groupname'] = re.compile( r'''^[A-Za-z_][A-Za-z0-9_]*$''')
+ self.patterns['groupname'] = re.compile(r'''^[A-Za-z_][A-Za-z0-9_]*$''')
diff --git a/lib/ansible/plugins/lookup/__init__.py b/lib/ansible/plugins/lookup/__init__.py
index 9df9ef20a4..eaecd62252 100644
--- a/lib/ansible/plugins/lookup/__init__.py
+++ b/lib/ansible/plugins/lookup/__init__.py
@@ -60,7 +60,7 @@ class LookupBase(with_metaclass(ABCMeta, object)):
results = []
for x in a:
for y in b:
- results.append(LookupBase._flatten([x,y]))
+ results.append(LookupBase._flatten([x, y]))
return results
@staticmethod
diff --git a/lib/ansible/plugins/lookup/cartesian.py b/lib/ansible/plugins/lookup/cartesian.py
index 1fca635d7a..8b8decffae 100644
--- a/lib/ansible/plugins/lookup/cartesian.py
+++ b/lib/ansible/plugins/lookup/cartesian.py
@@ -51,6 +51,7 @@ from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
+
class LookupModule(LookupBase):
"""
Create the cartesian product of lists
@@ -79,4 +80,3 @@ class LookupModule(LookupBase):
raise AnsibleError("with_cartesian requires at least one element in each list")
return [self._flatten(x) for x in product(*my_list)]
-
diff --git a/lib/ansible/plugins/lookup/csvfile.py b/lib/ansible/plugins/lookup/csvfile.py
index 6279d8c081..eb95dba179 100644
--- a/lib/ansible/plugins/lookup/csvfile.py
+++ b/lib/ansible/plugins/lookup/csvfile.py
@@ -39,6 +39,7 @@ class CSVRecoder:
def next(self):
return self.reader.next().encode("utf-8")
+
class CSVReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
@@ -56,6 +57,7 @@ class CSVReader:
def __iter__(self):
return self
+
class LookupModule(LookupBase):
def read_csv(self, filename, key, delimiter, encoding='utf-8', dflt=None, col=1):
@@ -81,11 +83,11 @@ class LookupModule(LookupBase):
key = params[0]
paramvals = {
- 'col' : "1", # column to return
- 'default' : None,
- 'delimiter' : "TAB",
- 'file' : 'ansible.csv',
- 'encoding' : 'utf-8',
+ 'col': "1", # column to return
+ 'default': None,
+ 'delimiter': "TAB",
+ 'file': 'ansible.csv',
+ 'encoding': 'utf-8',
}
# parameters specified?
diff --git a/lib/ansible/plugins/lookup/dict.py b/lib/ansible/plugins/lookup/dict.py
index 58375b02af..7e24e3a2b9 100644
--- a/lib/ansible/plugins/lookup/dict.py
+++ b/lib/ansible/plugins/lookup/dict.py
@@ -22,6 +22,7 @@ import collections
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
diff --git a/lib/ansible/plugins/lookup/dig.py b/lib/ansible/plugins/lookup/dig.py
index 49dce0ef80..dd565d6f1f 100644
--- a/lib/ansible/plugins/lookup/dig.py
+++ b/lib/ansible/plugins/lookup/dig.py
@@ -29,11 +29,12 @@ try:
import dns.reversename
import dns.rdataclass
from dns.rdatatype import (A, AAAA, CNAME, DLV, DNAME, DNSKEY, DS, HINFO, LOC,
- MX, NAPTR, NS, NSEC3PARAM, PTR, RP, SOA, SPF, SRV, SSHFP, TLSA, TXT)
+ MX, NAPTR, NS, NSEC3PARAM, PTR, RP, SOA, SPF, SRV, SSHFP, TLSA, TXT)
HAVE_DNS = True
except ImportError:
HAVE_DNS = False
+
def make_rdata_dict(rdata):
''' While the 'dig' lookup plugin supports anything which dnspython supports
out of the box, the following supported_types list describes which
@@ -42,28 +43,28 @@ def make_rdata_dict(rdata):
Note: adding support for RRSIG is hard work. :)
'''
supported_types = {
- A : ['address'],
- AAAA : ['address'],
- CNAME : ['target'],
- DNAME : ['target'],
- DLV : ['algorithm', 'digest_type', 'key_tag', 'digest'],
- DNSKEY : ['flags', 'algorithm', 'protocol', 'key'],
- DS : ['algorithm', 'digest_type', 'key_tag', 'digest'],
- HINFO : ['cpu', 'os'],
- LOC : ['latitude', 'longitude', 'altitude', 'size', 'horizontal_precision', 'vertical_precision'],
- MX : ['preference', 'exchange'],
- NAPTR : ['order', 'preference', 'flags', 'service', 'regexp', 'replacement'],
- NS : ['target'],
- NSEC3PARAM : ['algorithm', 'flags', 'iterations', 'salt'],
- PTR : ['target'],
- RP : ['mbox', 'txt'],
- # RRSIG : ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
- SOA : ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
- SPF : ['strings'],
- SRV : ['priority', 'weight', 'port', 'target'],
- SSHFP : ['algorithm', 'fp_type', 'fingerprint'],
- TLSA : ['usage', 'selector', 'mtype', 'cert'],
- TXT : ['strings'],
+ A: ['address'],
+ AAAA: ['address'],
+ CNAME: ['target'],
+ DNAME: ['target'],
+ DLV: ['algorithm', 'digest_type', 'key_tag', 'digest'],
+ DNSKEY: ['flags', 'algorithm', 'protocol', 'key'],
+ DS: ['algorithm', 'digest_type', 'key_tag', 'digest'],
+ HINFO: ['cpu', 'os'],
+ LOC: ['latitude', 'longitude', 'altitude', 'size', 'horizontal_precision', 'vertical_precision'],
+ MX: ['preference', 'exchange'],
+ NAPTR: ['order', 'preference', 'flags', 'service', 'regexp', 'replacement'],
+ NS: ['target'],
+ NSEC3PARAM: ['algorithm', 'flags', 'iterations', 'salt'],
+ PTR: ['target'],
+ RP: ['mbox', 'txt'],
+ # RRSIG: ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
+ SOA: ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
+ SPF: ['strings'],
+ SRV: ['priority', 'weight', 'port', 'target'],
+ SSHFP: ['algorithm', 'fp_type', 'fingerprint'],
+ TLSA: ['usage', 'selector', 'mtype', 'cert'],
+ TXT: ['strings'],
}
rd = {}
@@ -71,7 +72,7 @@ def make_rdata_dict(rdata):
if rdata.rdtype in supported_types:
fields = supported_types[rdata.rdtype]
for f in fields:
- val = rdata.__getattribute__(f)
+ val = rdata.__getattribute__(f)
if isinstance(val, dns.name.Name):
val = dns.name.Name.to_text(val)
@@ -89,11 +90,11 @@ def make_rdata_dict(rdata):
if rdata.rdtype == TLSA and f == 'cert':
val = dns.rdata._hexify(rdata.cert).replace(' ', '')
-
- rd[f] = val
+ rd[f] = val
return rd
+
# ==============================================================
# dig: Lookup DNS records
#
@@ -127,8 +128,8 @@ class LookupModule(LookupBase):
myres.use_edns(0, ednsflags=dns.flags.DO, payload=edns_size)
domain = None
- qtype = 'A'
- flat = True
+ qtype = 'A'
+ flat = True
rdclass = dns.rdataclass.from_text('IN')
for t in terms:
@@ -201,10 +202,10 @@ class LookupModule(LookupBase):
else:
try:
rd = make_rdata_dict(rdata)
- rd['owner'] = answers.canonical_name.to_text()
- rd['type'] = dns.rdatatype.to_text(rdata.rdtype)
- rd['ttl'] = answers.rrset.ttl
- rd['class'] = dns.rdataclass.to_text(rdata.rdclass)
+ rd['owner'] = answers.canonical_name.to_text()
+ rd['type'] = dns.rdatatype.to_text(rdata.rdtype)
+ rd['ttl'] = answers.rrset.ttl
+ rd['class'] = dns.rdataclass.to_text(rdata.rdclass)
ret.append(rd)
except Exception as e:
diff --git a/lib/ansible/plugins/lookup/dnstxt.py b/lib/ansible/plugins/lookup/dnstxt.py
index be0068e8dd..fa1cf20667 100644
--- a/lib/ansible/plugins/lookup/dnstxt.py
+++ b/lib/ansible/plugins/lookup/dnstxt.py
@@ -17,17 +17,18 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-HAVE_DNS=False
+HAVE_DNS = False
try:
import dns.resolver
from dns.exception import DNSException
- HAVE_DNS=True
+ HAVE_DNS = True
except ImportError:
pass
from ansible.errors import AnsibleError
-from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_native
+from ansible.plugins.lookup import LookupBase
+
# ==============================================================
# DNSTXT: DNS TXT records
@@ -63,4 +64,3 @@ class LookupModule(LookupBase):
ret.append(''.join(string))
return ret
-
diff --git a/lib/ansible/plugins/lookup/env.py b/lib/ansible/plugins/lookup/env.py
index 0314863f6a..5b4cbe9c23 100644
--- a/lib/ansible/plugins/lookup/env.py
+++ b/lib/ansible/plugins/lookup/env.py
@@ -21,6 +21,7 @@ import os
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
diff --git a/lib/ansible/plugins/lookup/etcd.py b/lib/ansible/plugins/lookup/etcd.py
index 792cda6afb..064e21be66 100644
--- a/lib/ansible/plugins/lookup/etcd.py
+++ b/lib/ansible/plugins/lookup/etcd.py
@@ -74,12 +74,13 @@ ANSIBLE_ETCD_VERSION = 'v1'
if os.getenv('ANSIBLE_ETCD_VERSION') is not None:
ANSIBLE_ETCD_VERSION = os.environ['ANSIBLE_ETCD_VERSION']
+
class Etcd:
def __init__(self, url=ANSIBLE_ETCD_URL, version=ANSIBLE_ETCD_VERSION,
validate_certs=True):
self.url = url
self.version = version
- self.baseurl = '%s/%s/keys' % (self.url,self.version)
+ self.baseurl = '%s/%s/keys' % (self.url, self.version)
self.validate_certs = validate_certs
def get(self, key):
@@ -111,6 +112,7 @@ class Etcd:
return value
+
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
diff --git a/lib/ansible/plugins/lookup/filetree.py b/lib/ansible/plugins/lookup/filetree.py
index 3cc6796f0c..762cf24d67 100644
--- a/lib/ansible/plugins/lookup/filetree.py
+++ b/lib/ansible/plugins/lookup/filetree.py
@@ -22,10 +22,10 @@ import pwd
import grp
import stat
-HAVE_SELINUX=False
+HAVE_SELINUX = False
try:
import selinux
- HAVE_SELINUX=True
+ HAVE_SELINUX = True
except ImportError:
pass
@@ -38,6 +38,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
# If selinux fails to find a default, return an array of None
def selinux_context(path):
context = [None, None, None, None]
@@ -119,7 +120,7 @@ class LookupModule(LookupBase):
relpath = os.path.relpath(os.path.join(root, entry), path)
# Skip if relpath was already processed (from another root)
- if relpath not in [ entry['path'] for entry in ret ]:
+ if relpath not in [entry['path'] for entry in ret]:
props = file_props(path, relpath)
if props is not None:
ret.append(props)
diff --git a/lib/ansible/plugins/lookup/first_found.py b/lib/ansible/plugins/lookup/first_found.py
index 569a0ad055..b41e714ca3 100644
--- a/lib/ansible/plugins/lookup/first_found.py
+++ b/lib/ansible/plugins/lookup/first_found.py
@@ -34,11 +34,11 @@ __metaclass__ = type
# first file found with os.path.exists() is returned
# no file matches raises ansibleerror
# EXAMPLES
-# - name: copy first existing file found to /some/file
-# action: copy src=$item dest=/some/file
-# with_first_found:
-# - files: foo ${inventory_hostname} bar
-# paths: /tmp/production /tmp/staging
+# - name: copy first existing file found to /some/file
+# action: copy src=$item dest=/some/file
+# with_first_found:
+# - files: foo ${inventory_hostname} bar
+# paths: /tmp/production /tmp/staging
# that will look for files in this order:
# /tmp/production/foo
@@ -48,10 +48,10 @@ __metaclass__ = type
# ${inventory_hostname}
# bar
-# - name: copy first existing file found to /some/file
-# action: copy src=$item dest=/some/file
-# with_first_found:
-# - files: /some/place/foo ${inventory_hostname} /some/place/else
+# - name: copy first existing file found to /some/file
+# action: copy src=$item dest=/some/file
+# with_first_found:
+# - files: /some/place/foo ${inventory_hostname} /some/place/else
# that will look for files in this order:
# /some/place/foo
@@ -59,47 +59,47 @@ __metaclass__ = type
# /some/place/else
# example - including tasks:
-# tasks:
-# - include: $item
-# with_first_found:
-# - files: generic
-# paths: tasks/staging tasks/production
+# tasks:
+# - include: $item
+# with_first_found:
+# - files: generic
+# paths: tasks/staging tasks/production
# this will include the tasks in the file generic where it is found first (staging or production)
# example simple file lists
-#tasks:
-#- name: first found file
-# action: copy src=$item dest=/etc/file.cfg
-# with_first_found:
-# - files: foo.${inventory_hostname} foo
+# tasks:
+# - name: first found file
+# action: copy src=$item dest=/etc/file.cfg
+# with_first_found:
+# - files: foo.${inventory_hostname} foo
# example skipping if no matched files
# First_found also offers the ability to control whether or not failing
# to find a file returns an error or not
#
-#- name: first found file - or skip
-# action: copy src=$item dest=/etc/file.cfg
-# with_first_found:
-# - files: foo.${inventory_hostname}
-# skip: true
+# - name: first found file - or skip
+# action: copy src=$item dest=/etc/file.cfg
+# with_first_found:
+# - files: foo.${inventory_hostname}
+# skip: true
# example a role with default configuration and configuration per host
# you can set multiple terms with their own files and paths to look through.
# consider a role that sets some configuration per host falling back on a default config.
#
-#- name: some configuration template
-# template: src={{ item }} dest=/etc/file.cfg mode=0444 owner=root group=root
-# with_first_found:
-# - files:
-# - ${inventory_hostname}/etc/file.cfg
-# paths:
-# - ../../../templates.overwrites
-# - ../../../templates
-# - files:
-# - etc/file.cfg
-# paths:
-# - templates
+# - name: some configuration template
+# template: src={{ item }} dest=/etc/file.cfg mode=0444 owner=root group=root
+# with_first_found:
+# - files:
+# - ${inventory_hostname}/etc/file.cfg
+# paths:
+# - ../../../templates.overwrites
+# - ../../../templates
+# - files:
+# - etc/file.cfg
+# paths:
+# - templates
# the above will return an empty list if the files cannot be found at all
# if skip is unspecificed or if it is set to false then it will return a list
@@ -110,13 +110,13 @@ __metaclass__ = type
# first_available_file with with_first_found and leave the file listing in place
#
#
-# - name: with_first_found like first_available_file
-# action: copy src=$item dest=/tmp/faftest
-# with_first_found:
-# - ../files/foo
-# - ../files/bar
-# - ../files/baz
-# ignore_errors: true
+# - name: with_first_found like first_available_file
+# action: copy src=$item dest=/tmp/faftest
+# with_first_found:
+# - ../files/foo
+# - ../files/bar
+# - ../files/baz
+# ignore_errors: true
import os
@@ -145,7 +145,7 @@ class LookupModule(LookupBase):
if isinstance(term, dict):
files = term.get('files', [])
paths = term.get('paths', [])
- skip = boolean(term.get('skip', False))
+ skip = boolean(term.get('skip', False))
filelist = files
if isinstance(files, string_types):
@@ -191,4 +191,3 @@ class LookupModule(LookupBase):
else:
raise AnsibleLookupError("No file was found when using with_first_found. Use the 'skip: true' option to allow this task to be skipped if no "
"files are found")
-
diff --git a/lib/ansible/plugins/lookup/flattened.py b/lib/ansible/plugins/lookup/flattened.py
index 7c04b91bc0..cf859b6ef6 100644
--- a/lib/ansible/plugins/lookup/flattened.py
+++ b/lib/ansible/plugins/lookup/flattened.py
@@ -29,9 +29,9 @@ class LookupModule(LookupBase):
# make sure term is not a list of one (list of one..) item
# return the final non list item if so
- if isinstance(term,list) and len(term) == 1:
+ if isinstance(term, list) and len(term) == 1:
term = term[0]
- if isinstance(term,list):
+ if isinstance(term, list):
term = self._check_list_of_one_list(term)
return term
@@ -50,7 +50,7 @@ class LookupModule(LookupBase):
# convert a variable to a list
term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
# but avoid converting a plain string to a list of one string
- if term2 != [ term ]:
+ if term2 != [term]:
term = term2
if isinstance(term, list):
@@ -62,11 +62,9 @@ class LookupModule(LookupBase):
return ret
-
def run(self, terms, variables, **kwargs):
if not isinstance(terms, list):
raise AnsibleError("with_flattened expects a list")
return self._do_flatten(terms, variables)
-
diff --git a/lib/ansible/plugins/lookup/hashi_vault.py b/lib/ansible/plugins/lookup/hashi_vault.py
index c9a07fe058..f38d412a9c 100644
--- a/lib/ansible/plugins/lookup/hashi_vault.py
+++ b/lib/ansible/plugins/lookup/hashi_vault.py
@@ -30,7 +30,6 @@
# necessarily be an error if a bad endpoint is specified.
#
# Requires hvac library. Install with pip.
-#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -46,6 +45,7 @@ ANSIBLE_HASHI_VAULT_ADDR = 'http://127.0.0.1:8200'
if os.getenv('VAULT_ADDR') is not None:
ANSIBLE_HASHI_VAULT_ADDR = os.environ['VAULT_ADDR']
+
class HashiVault:
def __init__(self, **kwargs):
try:
@@ -62,7 +62,7 @@ class HashiVault:
s_f = s.split(':')
self.secret = s_f[0]
- if len(s_f)>=2:
+ if len(s_f) >= 2:
self.secret_field = s_f[1]
else:
self.secret_field = 'value'
@@ -108,7 +108,7 @@ class HashiVault:
if data is None:
raise AnsibleError("The secret %s doesn't seem to exist" % self.secret)
- if self.secret_field=='': # secret was specified with trailing ':'
+ if self.secret_field == '': # secret was specified with trailing ':'
return data['data']
if self.secret_field not in data['data']:
@@ -153,4 +153,3 @@ class LookupModule(LookupBase):
ret.append(value)
return ret
-
diff --git a/lib/ansible/plugins/lookup/indexed_items.py b/lib/ansible/plugins/lookup/indexed_items.py
index da06de97e3..80ef858a2a 100644
--- a/lib/ansible/plugins/lookup/indexed_items.py
+++ b/lib/ansible/plugins/lookup/indexed_items.py
@@ -20,6 +20,7 @@ __metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def __init__(self, basedir=None, **kwargs):
@@ -32,4 +33,3 @@ class LookupModule(LookupBase):
items = self._flatten(terms)
return list(zip(range(len(items)), items))
-
diff --git a/lib/ansible/plugins/lookup/ini.py b/lib/ansible/plugins/lookup/ini.py
index 040571cec1..14660bc5f5 100644
--- a/lib/ansible/plugins/lookup/ini.py
+++ b/lib/ansible/plugins/lookup/ini.py
@@ -37,7 +37,7 @@ def _parse_params(term):
params[k] = ''
thiskey = 'key'
- for idp,phrase in enumerate(term.split()):
+ for idp, phrase in enumerate(term.split()):
for k in keys:
if ('%s=' % k) in phrase:
thiskey = k
@@ -81,7 +81,7 @@ class LookupModule(LookupBase):
basedir = self.get_basedir(variables)
self.basedir = basedir
- self.cp = configparser.ConfigParser()
+ self.cp = configparser.ConfigParser()
ret = []
for term in terms:
@@ -89,11 +89,11 @@ class LookupModule(LookupBase):
key = params[0]
paramvals = {
- 'file' : 'ansible.ini',
- 're' : False,
- 'default' : None,
- 'section' : "global",
- 'type' : "ini",
+ 'file': 'ansible.ini',
+ 're': False,
+ 'default': None,
+ 'section': "global",
+ 'type': "ini",
}
# parameters specified?
diff --git a/lib/ansible/plugins/lookup/inventory_hostnames.py b/lib/ansible/plugins/lookup/inventory_hostnames.py
index 3de8682e01..05a04a6949 100644
--- a/lib/ansible/plugins/lookup/inventory_hostnames.py
+++ b/lib/ansible/plugins/lookup/inventory_hostnames.py
@@ -19,14 +19,15 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.plugins.lookup import LookupBase
from ansible.inventory.manager import split_host_pattern, order_patterns
+from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def get_hosts(self, variables, pattern):
hosts = []
- if pattern[0] in ('!','&'):
+ if pattern[0] in ('!', '&'):
obj = pattern[1:]
else:
obj = pattern
@@ -47,9 +48,9 @@ class LookupModule(LookupBase):
for p in patterns:
that = self.get_hosts(variables, p)
if p.startswith("!"):
- host_list = [ h for h in host_list if h not in that]
+ host_list = [h for h in host_list if h not in that]
elif p.startswith("&"):
- host_list = [ h for h in host_list if h in that ]
+ host_list = [h for h in host_list if h in that]
else:
host_list.extend(that)
diff --git a/lib/ansible/plugins/lookup/items.py b/lib/ansible/plugins/lookup/items.py
index 43bb77e144..063c0ef216 100644
--- a/lib/ansible/plugins/lookup/items.py
+++ b/lib/ansible/plugins/lookup/items.py
@@ -19,9 +19,9 @@ __metaclass__ = type
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, **kwargs):
return self._flatten(terms)
-
diff --git a/lib/ansible/plugins/lookup/keyring.py b/lib/ansible/plugins/lookup/keyring.py
index 0d4da4b466..c7df1ecbe0 100644
--- a/lib/ansible/plugins/lookup/keyring.py
+++ b/lib/ansible/plugins/lookup/keyring.py
@@ -15,8 +15,6 @@
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
-
-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
@@ -56,20 +54,20 @@ except ImportError:
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, **kwargs):
if not HAS_KEYRING:
raise AnsibleError(u"Can't LOOKUP(keyring): missing required python library 'keyring'")
- display.vvvv(u"keyring: %s" % keyring.get_keyring() )
+ display.vvvv(u"keyring: %s" % keyring.get_keyring())
ret = []
for term in terms:
(servicename, username) = (term.split()[0], term.split()[1])
- display.vvvv(u"username: %s, servicename: %s " %(username,servicename))
- password = keyring.get_password(servicename,username)
+ display.vvvv(u"username: %s, servicename: %s " % (username, servicename))
+ password = keyring.get_password(servicename, username)
if password is None:
raise AnsibleError(u"servicename: %s for user %s not found" % (servicename, username))
ret.append(password.rstrip())
return ret
-
diff --git a/lib/ansible/plugins/lookup/lines.py b/lib/ansible/plugins/lookup/lines.py
index 04e5ebd8cc..429c8c844c 100644
--- a/lib/ansible/plugins/lookup/lines.py
+++ b/lib/ansible/plugins/lookup/lines.py
@@ -23,6 +23,7 @@ from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_text
+
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
diff --git a/lib/ansible/plugins/lookup/list.py b/lib/ansible/plugins/lookup/list.py
index 76b86dd167..25e9bdcfdf 100644
--- a/lib/ansible/plugins/lookup/list.py
+++ b/lib/ansible/plugins/lookup/list.py
@@ -21,6 +21,7 @@ __metaclass__ = type
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, **kwargs):
diff --git a/lib/ansible/plugins/lookup/mongodb.py b/lib/ansible/plugins/lookup/mongodb.py
index 41b14a76f4..725c762da2 100644
--- a/lib/ansible/plugins/lookup/mongodb.py
+++ b/lib/ansible/plugins/lookup/mongodb.py
@@ -134,7 +134,7 @@ class LookupModule(LookupBase):
item[1] = ASCENDING
elif sort_order == "DESCENDING":
item[1] = DESCENDING
- #else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary
+ # else the user knows what s/he is doing and we won't predict. PyMongo will return an error if necessary
def convert_mongo_result_to_valid_json(self, result):
if result is None:
@@ -151,17 +151,16 @@ class LookupModule(LookupBase):
elif isinstance(result, dict):
new_dict = {}
for key in result.keys():
- value = result[key] # python2 and 3 compatible....
+ value = result[key] # python2 and 3 compatible....
new_dict[key] = self.convert_mongo_result_to_valid_json(value)
return new_dict
elif isinstance(result, datetime.datetime):
- #epoch
- return (result - datetime.datetime(1970,1,1)).total_seconds()
+ # epoch
+ return (result - datetime.datetime(1970, 1, 1)). total_seconds()
else:
- #failsafe
+ # failsafe
return "{}".format(result)
-
def run(self, terms, variables, **kwargs):
ret = []
@@ -226,7 +225,7 @@ class LookupModule(LookupBase):
try:
client = MongoClient(connection_string, **extra_connection_parameters)
- results = client[database][collection].find( **term )
+ results = client[database][collection].find(**term)
for result in results:
result = self.convert_mongo_result_to_valid_json(result)
@@ -235,6 +234,4 @@ class LookupModule(LookupBase):
except ConnectionFailure as e:
raise AnsibleError('unable to connect to database: %s' % str(e))
-
-
return ret
diff --git a/lib/ansible/plugins/lookup/nested.py b/lib/ansible/plugins/lookup/nested.py
index 322fcce1ff..08f7a9d94c 100644
--- a/lib/ansible/plugins/lookup/nested.py
+++ b/lib/ansible/plugins/lookup/nested.py
@@ -23,6 +23,7 @@ from ansible.errors import AnsibleError, AnsibleUndefinedVariable
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
+
class LookupModule(LookupBase):
def _lookup_variables(self, terms, variables):
@@ -47,10 +48,8 @@ class LookupModule(LookupBase):
result = my_list.pop()
while len(my_list) > 0:
result2 = self._combine(result, my_list.pop())
- result = result2
+ result = result2
new_result = []
for x in result:
new_result.append(self._flatten(x))
return new_result
-
-
diff --git a/lib/ansible/plugins/lookup/password.py b/lib/ansible/plugins/lookup/password.py
index 426efaeb7a..161e84b24a 100644
--- a/lib/ansible/plugins/lookup/password.py
+++ b/lib/ansible/plugins/lookup/password.py
@@ -131,7 +131,7 @@ def _gen_candidate_chars(characters):
# getattr from string expands things like "ascii_letters" and "digits"
# into a set of characters.
chars.append(to_text(getattr(string, to_native(chars_spec), chars_spec),
- errors='strict'))
+ errors='strict'))
chars = u''.join(chars).replace(u'"', u'').replace(u"'", u'')
return chars
diff --git a/lib/ansible/plugins/lookup/passwordstore.py b/lib/ansible/plugins/lookup/passwordstore.py
index e02b85cac6..adeedeb88d 100644
--- a/lib/ansible/plugins/lookup/passwordstore.py
+++ b/lib/ansible/plugins/lookup/passwordstore.py
@@ -24,6 +24,7 @@ from distutils import util
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
+
# backhacked check_output with input for python 2.7
# http://stackoverflow.com/questions/10103551/passing-data-to-subprocess-check-output
def check_output2(*popenargs, **kwargs):
@@ -41,7 +42,7 @@ def check_output2(*popenargs, **kwargs):
inputdata = None
process = subprocess.Popen(*popenargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
try:
- out,err = process.communicate(inputdata)
+ out, err = process.communicate(inputdata)
except:
process.kill()
process.wait()
@@ -51,9 +52,10 @@ def check_output2(*popenargs, **kwargs):
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
- raise subprocess.CalledProcessError(retcode, cmd, out+err)
+ raise subprocess.CalledProcessError(retcode, cmd, out + err)
return out
+
class LookupModule(LookupBase):
def parse_params(self, term):
# I went with the "traditional" param followed with space separated KV pairs.
@@ -117,7 +119,7 @@ class LookupModule(LookupBase):
newpass = self.paramvals['userpass']
else:
try:
- newpass = check_output2(['pwgen','-cns',str(self.paramvals['length']), '1']).rstrip()
+ newpass = check_output2(['pwgen', '-cns', str(self.paramvals['length']), '1']).rstrip()
except (subprocess.CalledProcessError) as e:
raise AnsibleError(e)
return newpass
@@ -125,11 +127,11 @@ class LookupModule(LookupBase):
def update_password(self):
# generate new password, insert old lines from current result and return new password
newpass = self.get_newpass()
- datetime= time.strftime("%d/%m/%Y %H:%M:%S")
- msg = newpass +'\n' + '\n'.join(self.passoutput[1:])
- msg+="\nlookup_pass: old password was {} (Updated on {})\n".format(self.password, datetime)
+ datetime = time.strftime("%d/%m/%Y %H:%M:%S")
+ msg = newpass + '\n' + '\n'.join(self.passoutput[1:])
+ msg += "\nlookup_pass: old password was {} (Updated on {})\n".format(self.password, datetime)
try:
- generate = check_output2(['pass','insert','-f','-m',self.passname], input=msg)
+ generate = check_output2(['pass', 'insert', '-f', '-m', self.passname], input=msg)
except (subprocess.CalledProcessError) as e:
raise AnsibleError(e)
return newpass
@@ -141,7 +143,7 @@ class LookupModule(LookupBase):
datetime = time.strftime("%d/%m/%Y %H:%M:%S")
msg = newpass + '\n' + "lookup_pass: First generated by ansible on {}\n".format(datetime)
try:
- generate = check_output2(['pass','insert','-f','-m',self.passname], input=msg)
+ generate = check_output2(['pass', 'insert', '-f', '-m', self.passname], input=msg)
except (subprocess.CalledProcessError) as e:
raise AnsibleError(e)
return newpass
@@ -160,13 +162,14 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
result = []
self.paramvals = {
- 'subkey':'password',
- 'directory':variables.get('passwordstore'),
- 'create':False,
+ 'subkey': 'password',
+ 'directory': variables.get('passwordstore'),
+ 'create': False,
'returnall': False,
- 'overwrite':False,
- 'userpass':'',
- 'length': 16}
+ 'overwrite': False,
+ 'userpass': '',
+ 'length': 16,
+ }
for term in terms:
self.parse_params(term) # parse the input into paramvals
diff --git a/lib/ansible/plugins/lookup/pipe.py b/lib/ansible/plugins/lookup/pipe.py
index 306e3afb7a..11749fe395 100644
--- a/lib/ansible/plugins/lookup/pipe.py
+++ b/lib/ansible/plugins/lookup/pipe.py
@@ -22,6 +22,7 @@ import subprocess
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
+
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
diff --git a/lib/ansible/plugins/lookup/random_choice.py b/lib/ansible/plugins/lookup/random_choice.py
index fccc2f4e09..2b016c78d9 100644
--- a/lib/ansible/plugins/lookup/random_choice.py
+++ b/lib/ansible/plugins/lookup/random_choice.py
@@ -31,9 +31,9 @@ from ansible.plugins.lookup import LookupBase
# - two
# - three
+
class LookupModule(LookupBase):
def run(self, terms, inject=None, **kwargs):
- return [ random.choice(terms) ]
-
+ return [random.choice(terms)]
diff --git a/lib/ansible/plugins/lookup/redis_kv.py b/lib/ansible/plugins/lookup/redis_kv.py
index 982cceebef..bce62e0264 100644
--- a/lib/ansible/plugins/lookup/redis_kv.py
+++ b/lib/ansible/plugins/lookup/redis_kv.py
@@ -20,22 +20,22 @@ __metaclass__ = type
import os
import re
-HAVE_REDIS=False
+HAVE_REDIS = False
try:
import redis # https://github.com/andymccurdy/redis-py/
- HAVE_REDIS=True
+ HAVE_REDIS = True
except ImportError:
pass
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
+
# ==============================================================
# REDISGET: Obtain value from a GET on a Redis key. Terms
# expected: 0 = URL, 1 = Key
# URL may be empty, in which case redis://localhost:6379 assumed
# --------------------------------------------------------------
-
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
@@ -45,7 +45,7 @@ class LookupModule(LookupBase):
ret = []
for term in terms:
- (url,key) = term.split(',')
+ (url, key) = term.split(',')
if url == "":
url = 'redis://localhost:6379'
diff --git a/lib/ansible/plugins/lookup/sequence.py b/lib/ansible/plugins/lookup/sequence.py
index f5665b9629..3cdd1ef7e7 100644
--- a/lib/ansible/plugins/lookup/sequence.py
+++ b/lib/ansible/plugins/lookup/sequence.py
@@ -146,9 +146,9 @@ class LookupModule(LookupBase):
def sanity_check(self):
if self.count is None and self.end is None:
- raise AnsibleError( "must specify count or end in with_sequence")
+ raise AnsibleError("must specify count or end in with_sequence")
elif self.count is not None and self.end is not None:
- raise AnsibleError( "can't specify both count and end in with_sequence")
+ raise AnsibleError("can't specify both count and end in with_sequence")
elif self.count is not None:
# convert count to end
if self.count != 0:
diff --git a/lib/ansible/plugins/lookup/shelvefile.py b/lib/ansible/plugins/lookup/shelvefile.py
index e0ae4f9ad6..a210d1e3fe 100644
--- a/lib/ansible/plugins/lookup/shelvefile.py
+++ b/lib/ansible/plugins/lookup/shelvefile.py
@@ -38,7 +38,7 @@ class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
if not isinstance(terms, list):
- terms = [ terms ]
+ terms = [terms]
ret = []
diff --git a/lib/ansible/plugins/lookup/subelements.py b/lib/ansible/plugins/lookup/subelements.py
index 6ed44c0006..9e3a840124 100644
--- a/lib/ansible/plugins/lookup/subelements.py
+++ b/lib/ansible/plugins/lookup/subelements.py
@@ -33,8 +33,7 @@ class LookupModule(LookupBase):
def _raise_terms_error(msg=""):
raise AnsibleError(
- "subelements lookup expects a list of two or three items, "
- + msg)
+ "subelements lookup expects a list of two or three items, " + msg)
terms[0] = listify_lookup_plugin_terms(terms[0], templar=self._templar, loader=self._loader)
@@ -80,7 +79,7 @@ class LookupModule(LookupBase):
for subkey in subelements:
if subkey == subelements[-1]:
lastsubkey = True
- if not subkey in subvalue:
+ if subkey not in subvalue:
if skip_missing:
continue
else:
@@ -102,4 +101,3 @@ class LookupModule(LookupBase):
ret.append((item0, item1))
return ret
-
diff --git a/lib/ansible/plugins/lookup/template.py b/lib/ansible/plugins/lookup/template.py
index 3b51b37700..09fd355e3d 100644
--- a/lib/ansible/plugins/lookup/template.py
+++ b/lib/ansible/plugins/lookup/template.py
@@ -68,7 +68,7 @@ class LookupModule(LookupBase):
self._templar.set_available_variables(temp_vars)
# do the templating
- res = self._templar.template(template_data, preserve_trailing_newlines=True,convert_data=convert_data_p)
+ res = self._templar.template(template_data, preserve_trailing_newlines=True, convert_data=convert_data_p)
ret.append(res)
else:
raise AnsibleError("the template file %s could not be found for the lookup" % term)
diff --git a/lib/ansible/plugins/lookup/together.py b/lib/ansible/plugins/lookup/together.py
index 64bca90684..c8fe26c123 100644
--- a/lib/ansible/plugins/lookup/together.py
+++ b/lib/ansible/plugins/lookup/together.py
@@ -47,4 +47,3 @@ class LookupModule(LookupBase):
raise AnsibleError("with_together requires at least one element in each list")
return [self._flatten(x) for x in zip_longest(*my_list, fillvalue=None)]
-
diff --git a/lib/ansible/plugins/shell/__init__.py b/lib/ansible/plugins/shell/__init__.py
index 95f40cc52b..bf16fa41ab 100644
--- a/lib/ansible/plugins/shell/__init__.py
+++ b/lib/ansible/plugins/shell/__init__.py
@@ -36,16 +36,16 @@ class ShellBase(object):
if C.DEFAULT_MODULE_SET_LOCALE:
self.env.update(
dict(
- LANG = C.DEFAULT_MODULE_LANG,
- LC_ALL = C.DEFAULT_MODULE_LANG,
- LC_MESSAGES = C.DEFAULT_MODULE_LANG,
+ LANG=C.DEFAULT_MODULE_LANG,
+ LC_ALL=C.DEFAULT_MODULE_LANG,
+ LC_MESSAGES=C.DEFAULT_MODULE_LANG,
)
)
def env_prefix(self, **kwargs):
env = self.env.copy()
env.update(kwargs)
- return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
+ return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
def join_path(self, *args):
return os.path.join(*args)
diff --git a/lib/ansible/plugins/shell/csh.py b/lib/ansible/plugins/shell/csh.py
index c4d6319dc5..8ed3decf49 100644
--- a/lib/ansible/plugins/shell/csh.py
+++ b/lib/ansible/plugins/shell/csh.py
@@ -19,6 +19,7 @@ __metaclass__ = type
from ansible.plugins.shell import ShellBase
+
class ShellModule(ShellBase):
# Common shell filenames that this plugin handles
diff --git a/lib/ansible/plugins/shell/fish.py b/lib/ansible/plugins/shell/fish.py
index 0e3a516d42..e70a8c206f 100644
--- a/lib/ansible/plugins/shell/fish.py
+++ b/lib/ansible/plugins/shell/fish.py
@@ -41,7 +41,7 @@ class ShellModule(ShModule):
def env_prefix(self, **kwargs):
env = self.env.copy()
env.update(kwargs)
- return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k,v in env.items()])
+ return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode
diff --git a/lib/ansible/plugins/shell/powershell.py b/lib/ansible/plugins/shell/powershell.py
index d6153f01d5..9db9995c4e 100644
--- a/lib/ansible/plugins/shell/powershell.py
+++ b/lib/ansible/plugins/shell/powershell.py
@@ -95,7 +95,7 @@ end {
Write-Output $output
}
-''' # end exec_wrapper
+''' # end exec_wrapper
leaf_exec = br'''
Function Run($payload) {
@@ -144,7 +144,7 @@ Function Run($payload) {
$host.SetShouldExit($exit_code)
}
}
-''' # end leaf_exec
+''' # end leaf_exec
become_wrapper = br'''
@@ -434,7 +434,7 @@ Function Run($payload) {
}
-''' # end become_wrapper
+''' # end become_wrapper
async_wrapper = br'''
@@ -836,7 +836,7 @@ Function Run($payload) {
return $result_json
}
-''' # end async_wrapper
+''' # end async_wrapper
async_watchdog = br'''
Set-StrictMode -Version 2
@@ -964,7 +964,8 @@ Function Run($payload) {
#$rs.Close() | Out-Null
}
-''' # end async_watchdog
+''' # end async_watchdog
+
class ShellModule(object):
@@ -1184,7 +1185,10 @@ class ShellModule(object):
subs.append(('$', '`$'))
pattern = '|'.join('(%s)' % re.escape(p) for p, s in subs)
substs = [s for p, s in subs]
- replace = lambda m: substs[m.lastindex - 1]
+
+ def replace(m):
+ return substs[m.lastindex - 1]
+
return re.sub(pattern, replace, value)
def _encode_script(self, script, as_list=False, strict_mode=True, preserve_rc=True):
diff --git a/lib/ansible/plugins/shell/sh.py b/lib/ansible/plugins/shell/sh.py
index fe7e6345fa..b7acb66495 100644
--- a/lib/ansible/plugins/shell/sh.py
+++ b/lib/ansible/plugins/shell/sh.py
@@ -41,7 +41,6 @@ class ShellModule(ShellBase):
_SHELL_GROUP_LEFT = '('
_SHELL_GROUP_RIGHT = ')'
-
def checksum(self, path, python_interp):
# The following test needs to be SH-compliant. BASH-isms will
# not work if /bin/sh points to a non-BASH shell.
@@ -81,4 +80,3 @@ class ShellModule(ShellBase):
cmd = (" %s " % self._SHELL_OR).join(csums)
cmd = "%s; %s %s (echo \'0 \'%s)" % (test, cmd, self._SHELL_OR, shell_escaped_path)
return cmd
-
diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py
index 70fd6fb092..a26074b5b9 100644
--- a/lib/ansible/plugins/strategy/__init__.py
+++ b/lib/ansible/plugins/strategy/__init__.py
@@ -54,9 +54,11 @@ except ImportError:
__all__ = ['StrategyBase']
+
class StrategySentinel:
pass
+
# TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
# the loaders created there
@@ -69,12 +71,13 @@ class SharedPluginLoaderObj:
self.action_loader = action_loader
self.connection_loader = connection_loader
self.filter_loader = filter_loader
- self.test_loader = test_loader
+ self.test_loader = test_loader
self.lookup_loader = lookup_loader
self.module_loader = module_loader
-
_sentinel = StrategySentinel()
+
+
def results_thread_main(strategy):
while True:
try:
@@ -90,6 +93,7 @@ def results_thread_main(strategy):
except Queue.Empty:
pass
+
class StrategyBase:
'''
@@ -98,27 +102,27 @@ class StrategyBase:
'''
def __init__(self, tqm):
- self._tqm = tqm
- self._inventory = tqm.get_inventory()
- self._workers = tqm.get_workers()
+ self._tqm = tqm
+ self._inventory = tqm.get_inventory()
+ self._workers = tqm.get_workers()
self._notified_handlers = tqm._notified_handlers
self._listening_handlers = tqm._listening_handlers
- self._variable_manager = tqm.get_variable_manager()
- self._loader = tqm.get_loader()
- self._final_q = tqm._final_q
- self._step = getattr(tqm._options, 'step', False)
- self._diff = getattr(tqm._options, 'diff', False)
+ self._variable_manager = tqm.get_variable_manager()
+ self._loader = tqm.get_loader()
+ self._final_q = tqm._final_q
+ self._step = getattr(tqm._options, 'step', False)
+ self._diff = getattr(tqm._options, 'diff', False)
# Backwards compat: self._display isn't really needed, just import the global display and use that.
- self._display = display
+ self._display = display
# internal counters
- self._pending_results = 0
- self._cur_worker = 0
+ self._pending_results = 0
+ self._cur_worker = 0
# this dictionary is used to keep track of hosts that have
# outstanding tasks still in queue
- self._blocked_hosts = dict()
+ self._blocked_hosts = dict()
self._results = deque()
self._results_lock = threading.Condition(threading.Lock())
@@ -141,7 +145,7 @@ class StrategyBase:
# save the failed/unreachable hosts, as the run_handlers()
# method will clear that information during its execution
- failed_hosts = iterator.get_failed_hosts()
+ failed_hosts = iterator.get_failed_hosts()
unreachable_hosts = self._tqm._unreachable_hosts.keys()
display.debug("running handlers")
@@ -153,7 +157,7 @@ class StrategyBase:
# now update with the hosts (if any) that failed or were
# unreachable during the handler execution phase
- failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
+ failed_hosts = set(failed_hosts).union(iterator.get_failed_hosts())
unreachable_hosts = set(unreachable_hosts).union(self._tqm._unreachable_hosts.keys())
# return the appropriate code, depending on the status hosts after the run
@@ -216,7 +220,7 @@ class StrategyBase:
worker_prc = WorkerProcess(self._final_q, task_vars, host, task, play_context, self._loader, self._variable_manager, shared_loader_obj)
self._workers[self._cur_worker][0] = worker_prc
worker_prc.start()
- display.debug("worker is %d (out of %d available)" % (self._cur_worker+1, len(self._workers)))
+ display.debug("worker is %d (out of %d available)" % (self._cur_worker + 1, len(self._workers)))
queued = True
self._cur_worker += 1
if self._cur_worker >= len(self._workers):
@@ -260,7 +264,7 @@ class StrategyBase:
ret_results = []
def get_original_host(host_name):
- #FIXME: this should not need x2 _inventory
+ # FIXME: this should not need x2 _inventory
host_name = to_text(host_name)
if host_name in self._inventory.hosts:
return self._inventory.hosts[host_name]
@@ -293,7 +297,6 @@ class StrategyBase:
continue
return None
-
def search_handler_blocks_by_uuid(handler_uuid, handler_blocks):
for handler_block in handler_blocks:
for handler_task in handler_block.block:
@@ -429,7 +432,7 @@ class StrategyBase:
# loop over all of them instead of a single result
result_items = task_result._result.get('results', [])
else:
- result_items = [ task_result._result ]
+ result_items = [task_result._result]
for result_item in result_items:
if '_ansible_notify' in result_item:
@@ -546,7 +549,7 @@ class StrategyBase:
# If this is a role task, mark the parent role as being run (if
# the task was ok or failed, but not skipped or unreachable)
- if original_task._role is not None and role_ran: #TODO: and original_task.action != 'include_role':?
+ if original_task._role is not None and role_ran: # TODO: and original_task.action != 'include_role':?
# lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[original_task._role._role_name]):
@@ -555,7 +558,7 @@ class StrategyBase:
ret_results.append(task_result)
- if one_pass or max_passes is not None and (cur_pass+1) >= max_passes:
+ if one_pass or max_passes is not None and (cur_pass + 1) >= max_passes:
break
cur_pass += 1
@@ -594,12 +597,12 @@ class StrategyBase:
host_name = host_info.get('host_name')
# Check if host in inventory, add if not
- if not host_name in self._inventory.hosts:
+ if host_name not in self._inventory.hosts:
self._inventory.add_host(host_name, 'all')
new_host = self._inventory.hosts.get(host_name)
# Set/update the vars for this host
- new_host.vars = combine_vars(new_host.get_vars(), host_info.get('host_vars', dict()))
+ new_host.vars = combine_vars(new_host.get_vars(), host_info.get('host_vars', dict()))
new_groups = host_info.get('groups', [])
for group_name in new_groups:
@@ -733,10 +736,10 @@ class StrategyBase:
def _do_handler_run(self, handler, handler_name, iterator, play_context, notified_hosts=None):
# FIXME: need to use iterator.get_failed_hosts() instead?
- #if not len(self.get_hosts_remaining(iterator._play)):
- # self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
- # result = False
- # break
+ # if not len(self.get_hosts_remaining(iterator._play)):
+ # self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
+ # result = False
+ # break
saved_name = handler.name
handler.name = handler_name
self._tqm.send_callback('v2_playbook_on_handler_task_start', handler)
@@ -813,14 +816,14 @@ class StrategyBase:
def _take_step(self, task, host=None):
- ret=False
- msg=u'Perform task: %s ' % task
+ ret = False
+ msg = u'Perform task: %s ' % task
if host:
msg += u'on %s ' % host
msg += u'(N)o/(y)es/(c)ontinue: '
resp = display.prompt(msg)
- if resp.lower() in ['y','yes']:
+ if resp.lower() in ['y', 'yes']:
display.debug("User ran task")
ret = True
elif resp.lower() in ['c', 'continue']:
@@ -853,7 +856,7 @@ class StrategyBase:
msg = ''
if meta_action == 'noop':
# FIXME: issue a callback for the noop here?
- msg="noop"
+ msg = "noop"
elif meta_action == 'flush_handlers':
self.run_handlers(iterator, play_context)
msg = "ran handlers"
@@ -873,26 +876,26 @@ class StrategyBase:
self._tqm._failed_hosts.pop(host.name, False)
self._tqm._unreachable_hosts.pop(host.name, False)
iterator._host_states[host.name].fail_state = iterator.FAILED_NONE
- msg="cleared host errors"
+ msg = "cleared host errors"
else:
skipped = True
elif meta_action == 'end_play':
if _evaluate_conditional(target_host):
for host in self._inventory.get_hosts(iterator._play.hosts):
- if not host.name in self._tqm._unreachable_hosts:
+ if host.name not in self._tqm._unreachable_hosts:
iterator._host_states[host.name].run_state = iterator.ITERATING_COMPLETE
- msg="ending play"
+ msg = "ending play"
elif meta_action == 'reset_connection':
connection = connection_loader.get(play_context.connection, play_context, os.devnull)
if connection:
connection.reset()
- msg= 'reset connection'
+ msg = 'reset connection'
else:
- msg= 'no connection, nothing to reset'
+ msg = 'no connection, nothing to reset'
else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
- result = { 'msg': msg }
+ result = {'msg': msg}
if skipped:
result['skipped'] = True
else:
diff --git a/lib/ansible/plugins/strategy/debug.py b/lib/ansible/plugins/strategy/debug.py
index 177655df6e..1a9f906740 100644
--- a/lib/ansible/plugins/strategy/debug.py
+++ b/lib/ansible/plugins/strategy/debug.py
@@ -78,9 +78,9 @@ class StrategyModule(LinearStrategyModule):
# rollback host state
self.curr_tqm.clear_failed_hosts()
iterator._host_states[self.curr_host.name] = prev_host_state
- if reduce(lambda total, res : res.is_failed() or total, results, False):
+ if reduce(lambda total, res: res.is_failed() or total, results, False):
self._tqm._stats.failures[self.curr_host.name] -= 1
- elif reduce(lambda total, res : res.is_unreachable() or total, results, False):
+ elif reduce(lambda total, res: res.is_unreachable() or total, results, False):
self._tqm._stats.dark[self.curr_host.name] -= 1
# redo
@@ -94,7 +94,7 @@ class StrategyModule(LinearStrategyModule):
return results
def _need_debug(self, results):
- return reduce(lambda total, res : res.is_failed() or res.is_unreachable() or total, results, False)
+ return reduce(lambda total, res: res.is_failed() or res.is_unreachable() or total, results, False)
class Debugger(cmd.Cmd):
diff --git a/lib/ansible/plugins/strategy/free.py b/lib/ansible/plugins/strategy/free.py
index fd53f48b03..9b009bc8f5 100644
--- a/lib/ansible/plugins/strategy/free.py
+++ b/lib/ansible/plugins/strategy/free.py
@@ -79,7 +79,7 @@ class StrategyModule(StrategyBase):
break
work_to_do = False # assume we have no more work to do
- starting_host = last_host # save current position so we know when we've looped back around and need to break
+ starting_host = last_host # save current position so we know when we've looped back around and need to break
# try and find an unblocked host with a task to run
host_results = []
@@ -155,8 +155,8 @@ class StrategyModule(StrategyBase):
# handle step if needed, skip meta actions as they are used internally
if not self._step or self._take_step(task, host_name):
if task.any_errors_fatal:
- display.warning("Using any_errors_fatal with the free strategy is not supported,"
- " as tasks are executed independently on each host")
+ display.warning("Using any_errors_fatal with the free strategy is not supported, "
+ "as tasks are executed independently on each host")
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
self._queue_task(host, task, task_vars, play_context)
del task_vars
diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py
index 4b361562a7..c2d3609ebe 100644
--- a/lib/ansible/plugins/strategy/linear.py
+++ b/lib/ansible/plugins/strategy/linear.py
@@ -71,7 +71,7 @@ class StrategyModule(StrategyBase):
display.debug("done building task lists")
num_setups = 0
- num_tasks = 0
+ num_tasks = 0
num_rescue = 0
num_always = 0
@@ -84,7 +84,7 @@ class StrategyModule(StrategyBase):
try:
lowest_cur_block = min(
(s.cur_block for h, (s, t) in host_tasks_to_run
- if s.run_state != PlayIterator.ITERATING_COMPLETE))
+ if s.run_state != PlayIterator.ITERATING_COMPLETE))
except ValueError:
lowest_cur_block = None
else:
@@ -192,7 +192,7 @@ class StrategyModule(StrategyBase):
host_tasks = self._get_next_task_lockstep(hosts_left, iterator)
# skip control
- skip_rest = False
+ skip_rest = False
choose_step = True
# flag set if task is set to any_errors_fatal
@@ -303,7 +303,7 @@ class StrategyModule(StrategyBase):
loop_var = hr._task.loop_control.loop_var or 'item'
include_results = hr._result.get('results', [])
else:
- include_results = [ hr._result ]
+ include_results = [hr._result]
for include_result in include_results:
if 'skipped' in include_result and include_result['skipped'] or 'failed' in include_result and include_result['failed']:
@@ -363,7 +363,7 @@ class StrategyModule(StrategyBase):
display.debug("done filtering new block on tags")
noop_block = Block(parent_block=task._parent)
- noop_block.block = [noop_task for t in new_block.block]
+ noop_block.block = [noop_task for t in new_block.block]
noop_block.always = [noop_task for t in new_block.always]
noop_block.rescue = [noop_task for t in new_block.rescue]
diff --git a/lib/ansible/plugins/test/core.py b/lib/ansible/plugins/test/core.py
index 59244dc8c1..7440b2197f 100644
--- a/lib/ansible/plugins/test/core.py
+++ b/lib/ansible/plugins/test/core.py
@@ -26,38 +26,44 @@ from distutils.version import LooseVersion, StrictVersion
from ansible import errors
+
def failed(*a, **kw):
''' Test if task result yields failed '''
item = a[0]
if not isinstance(item, MutableMapping):
raise errors.AnsibleFilterError("|failed expects a dictionary")
- rc = item.get('rc',0)
- failed = item.get('failed',False)
+ rc = item.get('rc', 0)
+ failed = item.get('failed', False)
if rc != 0 or failed:
return True
else:
return False
+
def success(*a, **kw):
''' Test if task result yields success '''
return not failed(*a, **kw)
+
def changed(*a, **kw):
''' Test if task result yields changed '''
item = a[0]
if not isinstance(item, MutableMapping):
raise errors.AnsibleFilterError("|changed expects a dictionary")
- if not 'changed' in item:
+ if 'changed' not in item:
changed = False
- if ('results' in item # some modules return a 'results' key
- and isinstance(item['results'], MutableSequence)
- and isinstance(item['results'][0], MutableMapping)):
+ if (
+ 'results' in item and # some modules return a 'results' key
+ isinstance(item['results'], MutableSequence) and
+ isinstance(item['results'][0], MutableMapping)
+ ):
for result in item['results']:
changed = changed or result.get('changed', False)
else:
changed = item.get('changed', False)
return changed
+
def skipped(*a, **kw):
''' Test if task result yields skipped '''
item = a[0]
@@ -66,6 +72,7 @@ def skipped(*a, **kw):
skipped = item.get('skipped', False)
return skipped
+
def regex(value='', pattern='', ignorecase=False, multiline=False, match_type='search'):
''' Expose `re` as a boolean filter using the `search` method by default.
This is likely only useful for `search` and `match` which already
@@ -80,21 +87,24 @@ def regex(value='', pattern='', ignorecase=False, multiline=False, match_type='s
_bool = __builtins__.get('bool')
return _bool(getattr(_re, match_type, 'search')(value))
+
def match(value, pattern='', ignorecase=False, multiline=False):
''' Perform a `re.match` returning a boolean '''
return regex(value, pattern, ignorecase, multiline, 'match')
+
def search(value, pattern='', ignorecase=False, multiline=False):
''' Perform a `re.search` returning a boolean '''
return regex(value, pattern, ignorecase, multiline, 'search')
+
def version_compare(value, version, operator='eq', strict=False):
''' Perform a version comparison on a value '''
op_map = {
- '==': 'eq', '=': 'eq', 'eq': 'eq',
- '<': 'lt', 'lt': 'lt',
+ '==': 'eq', '=': 'eq', 'eq': 'eq',
+ '<': 'lt', 'lt': 'lt',
'<=': 'le', 'le': 'le',
- '>': 'gt', 'gt': 'gt',
+ '>': 'gt', 'gt': 'gt',
'>=': 'ge', 'ge': 'ge',
'!=': 'ne', '<>': 'ne', 'ne': 'ne'
}
@@ -115,20 +125,21 @@ def version_compare(value, version, operator='eq', strict=False):
except Exception as e:
raise errors.AnsibleFilterError('Version comparison: %s' % e)
+
class TestModule(object):
''' Ansible core jinja2 tests '''
def tests(self):
return {
# failure testing
- 'failed' : failed,
- 'succeeded' : success,
+ 'failed': failed,
+ 'succeeded': success,
# changed testing
- 'changed' : changed,
+ 'changed': changed,
# skip testing
- 'skipped' : skipped,
+ 'skipped': skipped,
# regex
'match': match,
diff --git a/lib/ansible/plugins/test/files.py b/lib/ansible/plugins/test/files.py
index 5ab5788e01..9d6695fc5e 100644
--- a/lib/ansible/plugins/test/files.py
+++ b/lib/ansible/plugins/test/files.py
@@ -22,20 +22,21 @@ __metaclass__ = type
from os.path import isdir, isfile, isabs, exists, lexists, islink, samefile, ismount
from ansible import errors
+
class TestModule(object):
''' Ansible file jinja2 tests '''
def tests(self):
return {
# file testing
- 'is_dir' : isdir,
- 'is_file' : isfile,
- 'is_link' : islink,
- 'exists' : exists,
- 'link_exists' : lexists,
+ 'is_dir': isdir,
+ 'is_file': isfile,
+ 'is_link': islink,
+ 'exists': exists,
+ 'link_exists': lexists,
# path testing
- 'is_abs' : isabs,
- 'is_same_file' : samefile,
- 'is_mount' : ismount,
+ 'is_abs': isabs,
+ 'is_same_file': samefile,
+ 'is_mount': ismount,
}
diff --git a/lib/ansible/plugins/test/mathstuff.py b/lib/ansible/plugins/test/mathstuff.py
index c736bbe6b6..0a64f52674 100644
--- a/lib/ansible/plugins/test/mathstuff.py
+++ b/lib/ansible/plugins/test/mathstuff.py
@@ -20,18 +20,22 @@ __metaclass__ = type
import math
+
def issubset(a, b):
return set(a) <= set(b)
+
def issuperset(a, b):
return set(a) >= set(b)
+
def isnotanumber(x):
try:
return math.isnan(x)
except TypeError:
return False
+
class TestModule:
''' Ansible math jinja2 tests '''
diff --git a/lib/ansible/plugins/vars/__init__.py b/lib/ansible/plugins/vars/__init__.py
index 4585058fa1..6540c47fc0 100644
--- a/lib/ansible/plugins/vars/__init__.py
+++ b/lib/ansible/plugins/vars/__init__.py
@@ -26,6 +26,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class BaseVarsPlugin(object):
"""
@@ -39,4 +40,3 @@ class BaseVarsPlugin(object):
def get_vars(self, loader, path, entities):
""" Gets variables. """
self._basedir = basedir(path)
-
diff --git a/lib/ansible/release.py b/lib/ansible/release.py
index bcbb8d785c..e38da10d35 100644
--- a/lib/ansible/release.py
+++ b/lib/ansible/release.py
@@ -20,4 +20,4 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.4.0'
-__author__ = 'Ansible, Inc.'
+__author__ = 'Ansible, Inc.'
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 23300c1f4c..c9129a31d5 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -36,10 +36,10 @@ except ImportError:
from sha import sha as sha1
from jinja2 import Environment
-from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
-from jinja2.utils import concat as j2_concat
+from jinja2.loaders import FileSystemLoader
from jinja2.runtime import Context, StrictUndefined
+from jinja2.utils import concat as j2_concat
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
@@ -64,7 +64,7 @@ __all__ = ['Templar', 'generate_ansible_template_vars']
# expand is just a single variable name.
# Primitive Types which we don't want Jinja to convert to strings.
-NON_TEMPLATED_TYPES = ( bool, Number )
+NON_TEMPLATED_TYPES = (bool, Number)
JINJA2_OVERRIDE = '#jinja2:'
@@ -78,20 +78,20 @@ def generate_ansible_template_vars(path):
template_uid = os.stat(b_path).st_uid
temp_vars = {}
- temp_vars['template_host'] = os.uname()[1]
- temp_vars['template_path'] = b_path
- temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_path))
- temp_vars['template_uid'] = template_uid
+ temp_vars['template_host'] = os.uname()[1]
+ temp_vars['template_path'] = b_path
+ temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_path))
+ temp_vars['template_uid'] = template_uid
temp_vars['template_fullpath'] = os.path.abspath(path)
temp_vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
- host = temp_vars['template_host'],
- uid = temp_vars['template_uid'],
- file = temp_vars['template_path'],
+ host=temp_vars['template_host'],
+ uid=temp_vars['template_uid'],
+ file=temp_vars['template_path'],
)
- temp_vars['ansible_managed'] = time.strftime( managed_str, time.localtime(os.path.getmtime(b_path)))
+ temp_vars['ansible_managed'] = time.strftime(managed_str, time.localtime(os.path.getmtime(b_path)))
return temp_vars
@@ -130,7 +130,7 @@ def _escape_backslashes(data, jinja_env):
new_data.append(token[2])
elif in_var and token[1] == 'string':
# Double backslashes only if we're inside of a jinja2 variable
- new_data.append(token[2].replace('\\','\\\\'))
+ new_data.append(token[2].replace('\\', '\\\\'))
else:
new_data.append(token[2])
@@ -148,7 +148,7 @@ def _count_newlines_from_end(in_str):
try:
i = len(in_str)
- j = i -1
+ j = i - 1
while in_str[j] == '\n':
j -= 1
return i - 1 - j
@@ -156,6 +156,7 @@ def _count_newlines_from_end(in_str):
# Uncommon cases: zero length string and string containing only newlines
return i
+
class AnsibleContext(Context):
'''
A custom context, which intercepts resolve() calls and sets a flag
@@ -204,6 +205,7 @@ class AnsibleContext(Context):
self._update_unsafe(val)
return val
+
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level
@@ -212,17 +214,18 @@ class AnsibleEnvironment(Environment):
context_class = AnsibleContext
template_class = AnsibleJ2Template
+
class Templar:
'''
The main class for templating, with the main entry-point of template().
'''
def __init__(self, loader, shared_loader_obj=None, variables=dict()):
- self._loader = loader
- self._filters = None
- self._tests = None
+ self._loader = loader
+ self._filters = None
+ self._tests = None
self._available_variables = variables
- self._cached_result = {}
+ self._cached_result = {}
if loader:
self._basedir = loader.get_basedir()
@@ -231,17 +234,17 @@ class Templar:
if shared_loader_obj:
self._filter_loader = getattr(shared_loader_obj, 'filter_loader')
- self._test_loader = getattr(shared_loader_obj, 'test_loader')
+ self._test_loader = getattr(shared_loader_obj, 'test_loader')
self._lookup_loader = getattr(shared_loader_obj, 'lookup_loader')
else:
self._filter_loader = filter_loader
- self._test_loader = test_loader
+ self._test_loader = test_loader
self._lookup_loader = lookup_loader
# flags to determine whether certain failures during templating
# should result in fatal errors being raised
- self._fail_on_lookup_errors = True
- self._fail_on_filter_errors = True
+ self._fail_on_lookup_errors = True
+ self._fail_on_filter_errors = True
self._fail_on_undefined_errors = C.DEFAULT_UNDEFINED_VAR_BEHAVIOR
self.environment = AnsibleEnvironment(
@@ -257,7 +260,7 @@ class Templar:
self.SINGLE_VAR = re.compile(r"^%s\s*(\w*)\s*%s$" % (self.environment.variable_start_string, self.environment.variable_end_string))
- self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (
+ self._clean_regex = re.compile(r'(?:%s|%s|%s|%s)' % (
self.environment.variable_start_string,
self.environment.block_start_string,
self.environment.block_end_string,
@@ -329,7 +332,7 @@ class Templar:
elif isinstance(orig_data, dict):
clean_dict = {}
for k in orig_data:
- clean_dict[self._clean_data(k)] = self._clean_data(orig_data[k])
+ clean_dict[self._clean_data(k)] = self._clean_data(orig_data[k])
ret = clean_dict
elif isinstance(orig_data, string_types):
@@ -384,7 +387,7 @@ class Templar:
assert isinstance(variables, dict)
self._available_variables = variables
- self._cached_result = {}
+ self._cached_result = {}
def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
convert_data=True, static_vars=[''], cache=True, bare_deprecated=True, disable_lookups=False):
@@ -476,7 +479,7 @@ class Templar:
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
- ) for v in variable]
+ ) for v in variable]
elif isinstance(variable, dict):
d = {}
# we don't use iteritems() here to avoid problems if the underlying dict
@@ -489,7 +492,7 @@ class Templar:
fail_on_undefined=fail_on_undefined,
overrides=overrides,
disable_lookups=disable_lookups,
- )
+ )
else:
d[k] = variable[k]
return d
@@ -555,8 +558,8 @@ class Templar:
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
if bare_deprecated:
display.deprecated("Using bare variables is deprecated."
- " Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
- (self.environment.variable_start_string, variable, self.environment.variable_end_string), version='2.7')
+ " Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
+ (self.environment.variable_start_string, variable, self.environment.variable_end_string), version='2.7')
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
@@ -629,9 +632,9 @@ class Templar:
if data.startswith(JINJA2_OVERRIDE):
eol = data.find('\n')
line = data[len(JINJA2_OVERRIDE):eol]
- data = data[eol+1:]
+ data = data[eol + 1:]
for pair in line.split(','):
- (key,val) = pair.split(':')
+ (key, val) = pair.split(':')
key = key.strip()
setattr(myenv, key, ast.literal_eval(val.strip()))
@@ -671,12 +674,12 @@ class Templar:
res = wrap_var(res)
except TypeError as te:
if 'StrictUndefined' in to_native(te):
- errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
+ errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
else:
display.debug("failing because of a type error, template data is: %s" % to_native(data))
- raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data),to_native(te)))
+ raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data), to_native(te)))
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
@@ -699,7 +702,7 @@ class Templar:
if fail_on_undefined:
raise AnsibleUndefinedVariable(e)
else:
- #TODO: return warning about undefined var
+ # TODO: return warning about undefined var
return data
# for backwards compatibility in case anyone is using old private method directly
diff --git a/lib/ansible/template/safe_eval.py b/lib/ansible/template/safe_eval.py
index efe5600f5a..2f819b40b1 100644
--- a/lib/ansible/template/safe_eval.py
+++ b/lib/ansible/template/safe_eval.py
@@ -14,6 +14,7 @@
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
@@ -54,7 +55,7 @@ def safe_eval(expr, locals={}, include_exceptions=False):
(
ast.Add,
ast.BinOp,
- #ast.Call,
+ # ast.Call,
ast.Compare,
ast.Dict,
ast.Div,
diff --git a/lib/ansible/template/template.py b/lib/ansible/template/template.py
index 55936f42f7..50bf2a2dfa 100644
--- a/lib/ansible/template/template.py
+++ b/lib/ansible/template/template.py
@@ -34,4 +34,3 @@ class AnsibleJ2Template(jinja2.environment.Template):
def new_context(self, vars=None, shared=False, locals=None):
return self.environment.context_class(self.environment, vars.add_locals(locals), self.name, self.blocks)
-
diff --git a/lib/ansible/template/vars.py b/lib/ansible/template/vars.py
index bc36046a10..854af5be95 100644
--- a/lib/ansible/template/vars.py
+++ b/lib/ansible/template/vars.py
@@ -47,8 +47,8 @@ class AnsibleJ2Vars:
self._templar = templar
self._globals = globals
- self._extras = extras
- self._locals = dict()
+ self._extras = extras
+ self._locals = dict()
if isinstance(locals, dict):
for key, val in iteritems(locals):
if val is not missing:
diff --git a/lib/ansible/vars/manager.py b/lib/ansible/vars/manager.py
index 963797d163..c9a1faf5c4 100644
--- a/lib/ansible/vars/manager.py
+++ b/lib/ansible/vars/manager.py
@@ -61,7 +61,7 @@ def preprocess_vars(a):
if a is None:
return None
elif not isinstance(a, list):
- data = [ a ]
+ data = [a]
else:
data = a
@@ -111,15 +111,15 @@ class VariableManager:
def __getstate__(self):
data = dict(
- fact_cache = self._fact_cache,
- np_fact_cache = self._nonpersistent_fact_cache,
- vars_cache = self._vars_cache,
- extra_vars = self._extra_vars,
- host_vars_files = self._host_vars_files,
- group_vars_files = self._group_vars_files,
- omit_token = self._omit_token,
- options_vars = self._options_vars,
- inventory = self._inventory,
+ fact_cache=self._fact_cache,
+ np_fact_cache=self._nonpersistent_fact_cache,
+ vars_cache=self._vars_cache,
+ extra_vars=self._extra_vars,
+ host_vars_files=self._host_vars_files,
+ group_vars_files=self._group_vars_files,
+ omit_token=self._omit_token,
+ options_vars=self._options_vars,
+ inventory=self._inventory,
)
return data
@@ -169,7 +169,7 @@ class VariableManager:
if a is None:
return None
elif not isinstance(a, list):
- data = [ a ]
+ data = [a]
else:
data = a
@@ -223,20 +223,20 @@ class VariableManager:
all_vars = combine_vars(all_vars, task._role.get_default_vars(dep_chain=task.get_dep_chain()))
if host:
- ### INIT WORK (use unsafe as we are going to copy/merge vars, no need to x2 copy)
+ # INIT WORK (use unsafe as we are going to copy/merge vars, no need to x2 copy)
# basedir, THE 'all' group and the rest of groups for a host, used below
basedir = self._loader.get_basedir()
all_group = self._inventory.groups.get('all')
host_groups = sort_groups([g for g in host.get_groups() if g.name not in ['all']])
- ### internal fuctions that actually do the work ###
+ # internal fuctions that actually do the work
def _plugins_inventory(entities):
''' merges all entities by inventory source '''
data = {}
for inventory_dir in self._inventory._sources:
- if ',' in inventory_dir: # skip host lists
+ if ',' in inventory_dir: # skip host lists
continue
- elif not os.path.isdir(inventory_dir): # always pass 'inventory directory'
+ elif not os.path.isdir(inventory_dir): # always pass 'inventory directory'
inventory_dir = os.path.dirname(inventory_dir)
for plugin in vars_loader.all():
@@ -251,7 +251,7 @@ class VariableManager:
data = combine_vars(data, plugin.get_vars(self._loader, basedir, entities))
return data
- ### configurable functions that are sortable via config ###
+ # configurable functions that are sortable via config
def all_inventory():
return all_group.get_vars()
@@ -424,7 +424,7 @@ class VariableManager:
if host:
# host already provides some magic vars via host.get_vars()
if self._inventory:
- variables['groups'] = self._inventory.get_groups_dict()
+ variables['groups'] = self._inventory.get_groups_dict()
if play:
variables['role_names'] = [r._role_name for r in play.roles]