summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbin/ansible12
-rw-r--r--docsite/rst/developing_modules_python3.rst22
-rwxr-xr-xhacking/module_formatter.py5
-rw-r--r--lib/ansible/cli/__init__.py4
-rw-r--r--lib/ansible/cli/adhoc.py6
-rw-r--r--lib/ansible/cli/console.py25
-rw-r--r--lib/ansible/cli/galaxy.py19
-rw-r--r--lib/ansible/cli/vault.py4
-rw-r--r--lib/ansible/errors/__init__.py13
-rw-r--r--lib/ansible/executor/module_common.py42
-rw-r--r--lib/ansible/executor/playbook_executor.py18
-rw-r--r--lib/ansible/executor/process/worker.py19
-rw-r--r--lib/ansible/executor/task_executor.py37
-rw-r--r--lib/ansible/executor/task_queue_manager.py22
-rw-r--r--lib/ansible/galaxy/api.py10
-rw-r--r--lib/ansible/inventory/__init__.py45
-rw-r--r--lib/ansible/inventory/ini.py10
-rw-r--r--lib/ansible/inventory/script.py12
-rw-r--r--lib/ansible/module_utils/_text.py24
-rw-r--r--lib/ansible/module_utils/basic.py14
-rw-r--r--lib/ansible/parsing/dataloader.py61
-rw-r--r--lib/ansible/parsing/splitter.py19
-rw-r--r--lib/ansible/parsing/vault/__init__.py34
-rw-r--r--lib/ansible/parsing/yaml/constructor.py8
-rw-r--r--lib/ansible/parsing/yaml/objects.py3
-rw-r--r--lib/ansible/playbook/base.py24
-rw-r--r--lib/ansible/playbook/task.py26
-rw-r--r--lib/ansible/plugins/__init__.py29
-rw-r--r--lib/ansible/plugins/action/__init__.py62
-rw-r--r--lib/ansible/plugins/action/assemble.py6
-rw-r--r--lib/ansible/plugins/action/async.py9
-rw-r--r--lib/ansible/plugins/action/copy.py20
-rw-r--r--lib/ansible/plugins/action/debug.py4
-rw-r--r--lib/ansible/plugins/action/fetch.py4
-rw-r--r--lib/ansible/plugins/action/include_vars.py4
-rw-r--r--lib/ansible/plugins/action/net_config.py9
-rw-r--r--lib/ansible/plugins/action/net_template.py10
-rw-r--r--lib/ansible/plugins/action/patch.py6
-rw-r--r--lib/ansible/plugins/action/script.py7
-rw-r--r--lib/ansible/plugins/action/template.py13
-rw-r--r--lib/ansible/plugins/action/unarchive.py7
-rw-r--r--lib/ansible/plugins/action/win_reboot.py17
-rw-r--r--lib/ansible/plugins/cache/jsonfile.py2
-rw-r--r--lib/ansible/plugins/callback/__init__.py20
-rw-r--r--lib/ansible/plugins/callback/junit.py5
-rw-r--r--lib/ansible/plugins/callback/log_plays.py3
-rw-r--r--lib/ansible/plugins/callback/mail.py5
-rw-r--r--lib/ansible/plugins/callback/tree.py5
-rw-r--r--lib/ansible/plugins/connection/__init__.py8
-rw-r--r--lib/ansible/plugins/connection/accelerate.py7
-rw-r--r--lib/ansible/plugins/connection/chroot.py11
-rw-r--r--lib/ansible/plugins/connection/docker.py13
-rw-r--r--lib/ansible/plugins/connection/jail.py9
-rw-r--r--lib/ansible/plugins/connection/libvirt_lxc.py9
-rw-r--r--lib/ansible/plugins/connection/local.py13
-rw-r--r--lib/ansible/plugins/connection/lxc.py22
-rw-r--r--lib/ansible/plugins/connection/lxd.py16
-rw-r--r--lib/ansible/plugins/connection/paramiko_ssh.py11
-rw-r--r--lib/ansible/plugins/connection/ssh.py24
-rw-r--r--lib/ansible/plugins/connection/winrm.py64
-rw-r--r--lib/ansible/plugins/connection/zone.py3
-rw-r--r--lib/ansible/plugins/filter/core.py26
-rw-r--r--lib/ansible/plugins/lookup/__init__.py4
-rw-r--r--lib/ansible/plugins/lookup/csvfile.py7
-rw-r--r--lib/ansible/plugins/lookup/fileglob.py7
-rw-r--r--lib/ansible/plugins/lookup/filetree.py17
-rw-r--r--lib/ansible/plugins/lookup/ini.py8
-rw-r--r--lib/ansible/plugins/lookup/shelvefile.py6
-rw-r--r--lib/ansible/plugins/lookup/template.py6
-rw-r--r--lib/ansible/plugins/lookup/url.py8
-rw-r--r--lib/ansible/plugins/shell/powershell.py13
-rw-r--r--lib/ansible/plugins/strategy/__init__.py23
-rw-r--r--lib/ansible/plugins/strategy/free.py15
-rw-r--r--lib/ansible/plugins/strategy/linear.py7
-rw-r--r--lib/ansible/template/__init__.py40
-rw-r--r--lib/ansible/template/vars.py6
-rw-r--r--lib/ansible/utils/display.py14
-rw-r--r--lib/ansible/utils/hashing.py8
-rw-r--r--lib/ansible/utils/path.py14
-rw-r--r--lib/ansible/utils/shlex.py5
-rw-r--r--lib/ansible/utils/unicode.py276
-rw-r--r--lib/ansible/utils/vars.py12
-rw-r--r--lib/ansible/vars/unsafe_proxy.py13
-rw-r--r--test/units/mock/procenv.py5
-rw-r--r--test/units/parsing/vault/test_vault.py3
-rw-r--r--test/units/parsing/vault/test_vault_editor.py28
-rw-r--r--test/units/parsing/yaml/test_loader.py4
-rw-r--r--test/units/plugins/action/test_action.py18
-rw-r--r--test/units/plugins/connections/test_connection_ssh.py41
89 files changed, 747 insertions, 882 deletions
diff --git a/bin/ansible b/bin/ansible
index be4edc2308..c7abff0180 100755
--- a/bin/ansible
+++ b/bin/ansible
@@ -43,7 +43,7 @@ from multiprocessing import Lock
import ansible.constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.utils.display import Display
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
########################################
@@ -97,10 +97,10 @@ if __name__ == '__main__':
except AnsibleOptionsError as e:
cli.parser.print_help()
- display.error(to_unicode(e), wrap_text=False)
+ display.error(to_text(e), wrap_text=False)
exit_code = 5
except AnsibleParserError as e:
- display.error(to_unicode(e), wrap_text=False)
+ display.error(to_text(e), wrap_text=False)
exit_code = 4
# TQM takes care of these, but leaving comment to reserve the exit codes
# except AnsibleHostUnreachable as e:
@@ -110,16 +110,16 @@ if __name__ == '__main__':
# display.error(str(e))
# exit_code = 2
except AnsibleError as e:
- display.error(to_unicode(e), wrap_text=False)
+ display.error(to_text(e), wrap_text=False)
exit_code = 1
except KeyboardInterrupt:
display.error("User interrupted execution")
exit_code = 99
except Exception as e:
have_cli_options = cli is not None and cli.options is not None
- display.error("Unexpected Exception: %s" % to_unicode(e), wrap_text=False)
+ display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False)
if not have_cli_options or have_cli_options and cli.options.verbosity > 2:
- display.display(u"the full traceback was:\n\n%s" % to_unicode(traceback.format_exc()))
+ display.display(u"the full traceback was:\n\n%s" % to_text(traceback.format_exc()))
else:
display.display("to see the full traceback, use -vvv")
exit_code = 250
diff --git a/docsite/rst/developing_modules_python3.rst b/docsite/rst/developing_modules_python3.rst
index 26fdc9ad7f..901dfaed17 100644
--- a/docsite/rst/developing_modules_python3.rst
+++ b/docsite/rst/developing_modules_python3.rst
@@ -215,6 +215,28 @@ Python3. We'll need to gather experience to see if this is going to work out
well for modules as well or if we should give the module_utils API explicit
switches so that modules can choose to operate with text type all of the time.
+Helpers
+~~~~~~~
+
+For converting between bytes, text, and native strings we have three helper
+functions. These are :func:`ansible.module_utils._text.to_bytes`,
+:func:`ansible.module_utils._text.to_native`, and
+:func:`ansible.module_utils._text.to_text`. These are similar to using
+``bytes.decode()`` and ``unicode.encode()`` with a few differences.
+
+* By default they try very hard not to traceback.
+* The default encoding is "utf-8"
+* There are two error strategies that don't correspond one-to-one with
+ a python codec error handler. These are ``surrogate_or_strict`` and
+ ``surrogate_or_replace``. ``surrogate_or_strict`` will use the ``surrogateescape``
+ error handler if available (mostly on python3) or strict if not. It is most
+ appropriate to use when dealing with something that needs to round trip its
+ value like file paths database keys, etc. Without ``surrogateescape`` the best
+ thing these values can do is generate a traceback that our code can catch
+ and decide how to show an error message. ``surrogate_or_replace`` is for
+ when a value is going to be displayed to the user. If the
+ ``surrogateescape`` error handler is not present, it will replace
+ undecodable byte sequences with a replacement character.
================================
Porting Core Ansible to Python 3
diff --git a/hacking/module_formatter.py b/hacking/module_formatter.py
index 7a4005c044..6e84cada28 100755
--- a/hacking/module_formatter.py
+++ b/hacking/module_formatter.py
@@ -19,6 +19,7 @@
#
from __future__ import print_function
+__metaclass__ = type
import os
import glob
@@ -34,10 +35,10 @@ from collections import defaultdict
from jinja2 import Environment, FileSystemLoader
from six import iteritems
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
from ansible.utils import module_docs
from ansible.utils.vars import merge_hash
-from ansible.utils.unicode import to_bytes
-from ansible.errors import AnsibleError
#####################################################################################
# constants and paths
diff --git a/lib/ansible/cli/__init__.py b/lib/ansible/cli/__init__.py
index b12fc0dbc6..3176d9c956 100644
--- a/lib/ansible/cli/__init__.py
+++ b/lib/ansible/cli/__init__.py
@@ -33,7 +33,7 @@ import subprocess
from ansible.release import __version__
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
try:
from __main__ import display
@@ -109,7 +109,7 @@ class CLI(object):
if self.options.verbosity > 0:
if C.CONFIG_FILE:
- display.display(u"Using %s as config file" % to_unicode(C.CONFIG_FILE))
+ display.display(u"Using %s as config file" % to_text(C.CONFIG_FILE))
else:
display.display(u"No config file found; using defaults")
diff --git a/lib/ansible/cli/adhoc.py b/lib/ansible/cli/adhoc.py
index cfaeca955b..95ab640ded 100644
--- a/lib/ansible/cli/adhoc.py
+++ b/lib/ansible/cli/adhoc.py
@@ -27,13 +27,13 @@ from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.inventory import Inventory
+from ansible.module_utils._text import to_text
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
from ansible.plugins import get_all_plugin_loaders
from ansible.utils.vars import load_extra_vars
from ansible.utils.vars import load_options_vars
-from ansible.utils.unicode import to_unicode
from ansible.vars import VariableManager
try:
@@ -99,7 +99,7 @@ class AdHocCLI(CLI):
super(AdHocCLI, self).run()
# only thing left should be host pattern
- pattern = to_unicode(self.args[0], errors='strict')
+ pattern = to_text(self.args[0], errors='surrogate_or_strict')
# ignore connection password cause we are local
if self.options.connection == "local":
@@ -169,7 +169,7 @@ class AdHocCLI(CLI):
play_ds = self._play_ds(pattern, self.options.seconds, self.options.poll_interval)
play = Play().load(play_ds, variable_manager=variable_manager, loader=loader)
- if self.callback:
+ if self.callback:
cb = self.callback
elif self.options.one_line:
cb = 'oneline'
diff --git a/lib/ansible/cli/console.py b/lib/ansible/cli/console.py
index 5b0432036f..d2a1d1252f 100644
--- a/lib/ansible/cli/console.py
+++ b/lib/ansible/cli/console.py
@@ -39,18 +39,16 @@ import sys
from ansible import constants as C
from ansible.cli import CLI
from ansible.errors import AnsibleError
-
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.inventory import Inventory
+from ansible.module_utils._text import to_native, to_text
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play
-from ansible.vars import VariableManager
+from ansible.plugins import module_loader
from ansible.utils import module_docs
from ansible.utils.color import stringc
-from ansible.utils.unicode import to_unicode, to_str
-from ansible.plugins import module_loader
-
+from ansible.vars import VariableManager
try:
from __main__ import display
@@ -152,11 +150,11 @@ class ConsoleCLI(CLI, cmd.Cmd):
continue
elif module.startswith('_'):
fullpath = '/'.join([path,module])
- if os.path.islink(fullpath): # avoids aliases
+ if os.path.islink(fullpath): # avoids aliases
continue
module = module.replace('_', '', 1)
- module = os.path.splitext(module)[0] # removes the extension
+ module = os.path.splitext(module)[0] # removes the extension
yield module
def default(self, arg, forceshell=False):
@@ -192,11 +190,11 @@ class ConsoleCLI(CLI, cmd.Cmd):
)
play = Play().load(play_ds, variable_manager=self.variable_manager, loader=self.loader)
except Exception as e:
- display.error(u"Unable to build command: %s" % to_unicode(e))
+ display.error(u"Unable to build command: %s" % to_text(e))
return False
try:
- cb = 'minimal' #FIXME: make callbacks configurable
+ cb = 'minimal' # FIXME: make callbacks configurable
# now create a task queue manager to execute the play
self._tqm = None
try:
@@ -225,8 +223,8 @@ class ConsoleCLI(CLI, cmd.Cmd):
display.error('User interrupted execution')
return False
except Exception as e:
- display.error(to_unicode(e))
- #FIXME: add traceback in very very verbose mode
+ display.error(to_text(e))
+ # FIXME: add traceback in very very verbose mode
return False
def emptyline(self):
@@ -379,7 +377,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
else:
completions = [x.name for x in self.inventory.list_hosts(self.options.cwd)]
- return [to_str(s)[offs:] for s in completions if to_str(s).startswith(to_str(mline))]
+ return [to_native(s)[offs:] for s in completions if to_native(s).startswith(to_native(mline))]
def completedefault(self, text, line, begidx, endidx):
if line.split()[0] in self.modules:
@@ -394,7 +392,6 @@ class ConsoleCLI(CLI, cmd.Cmd):
oc, a, _ = module_docs.get_docstring(in_path)
return oc['options'].keys()
-
def run(self):
super(ConsoleCLI, self).run()
@@ -410,7 +407,6 @@ class ConsoleCLI(CLI, cmd.Cmd):
self.pattern = self.args[0]
self.options.cwd = self.pattern
-
# dynamically add modules as commands
self.modules = self.list_modules()
for module in self.modules:
@@ -465,4 +461,3 @@ class ConsoleCLI(CLI, cmd.Cmd):
atexit.register(readline.write_history_file, histfile)
self.set_prompt()
self.cmdloop()
-
diff --git a/lib/ansible/cli/galaxy.py b/lib/ansible/cli/galaxy.py
index 231b6b909a..457e15d5c1 100644
--- a/lib/ansible/cli/galaxy.py
+++ b/lib/ansible/cli/galaxy.py
@@ -39,7 +39,7 @@ from ansible.galaxy.role import GalaxyRole
from ansible.galaxy.login import GalaxyLogin
from ansible.galaxy.token import GalaxyToken
from ansible.playbook.role.requirement import RoleRequirement
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
try:
from __main__ import display
@@ -47,6 +47,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class GalaxyCLI(CLI):
SKIP_INFO_KEYS = ("name", "description", "readme_html", "related", "summary_fields", "average_aw_composite", "average_aw_score", "url" )
@@ -65,7 +66,6 @@ class GalaxyCLI(CLI):
epilog = "\nSee '%s <command> --help' for more information on a specific command.\n\n" % os.path.basename(sys.argv[0])
)
-
self.set_action()
# common
@@ -111,7 +111,7 @@ class GalaxyCLI(CLI):
if self.action in ['init', 'info']:
self.parser.add_option( '--offline', dest='offline', default=False, action='store_true', help="Don't query the galaxy API when creating roles")
- if not self.action in ("delete","import","init","login","setup"):
+ if self.action not in ("delete","import","init","login","setup"):
# NOTE: while the option type=str, the default is a list, and the
# callback will set the value to a list.
self.parser.add_option('-p', '--roles-path', dest='roles_path', action="callback", callback=CLI.expand_paths, type=str, default=C.DEFAULT_ROLES_PATH,
@@ -142,7 +142,7 @@ class GalaxyCLI(CLI):
def _display_role_info(self, role_info):
- text = [u"", u"Role: %s" % to_unicode(role_info['name'])]
+ text = [u"", u"Role: %s" % to_text(role_info['name'])]
text.append(u"\tdescription: %s" % role_info.get('description', ''))
for k in sorted(role_info.keys()):
@@ -340,7 +340,7 @@ class GalaxyCLI(CLI):
f = open(role_file, 'r')
if role_file.endswith('.yaml') or role_file.endswith('.yml'):
try:
- required_roles = yaml.safe_load(f.read())
+ required_roles = yaml.safe_load(f.read())
except Exception as e:
raise AnsibleError("Unable to load data from the requirements file: %s" % role_file)
@@ -502,7 +502,7 @@ class GalaxyCLI(CLI):
if len(self.args):
terms = []
for i in range(len(self.args)):
- terms.append(self.args.pop())
+ terms.append(self.args.pop())
search = '+'.join(terms[::-1])
if not search and not self.options.platforms and not self.options.tags and not self.options.author:
@@ -578,8 +578,8 @@ class GalaxyCLI(CLI):
if len(self.args) < 2:
raise AnsibleError("Expected a github_username and github_repository. Use --help.")
- github_repo = self.args.pop()
- github_user = self.args.pop()
+ github_repo = to_text(self.args.pop(), errors='surrogate_or_strict')
+ github_user = to_text(self.args.pop(), errors='surrogate_or_strict')
if self.options.check_status:
task = self.api.get_import_task(github_user=github_user, github_repo=github_repo)
@@ -594,7 +594,8 @@ class GalaxyCLI(CLI):
display.display("The following Galaxy roles are being updated:" + u'\n', color=C.COLOR_CHANGED)
for t in task:
display.display('%s.%s' % (t['summary_fields']['role']['namespace'],t['summary_fields']['role']['name']), color=C.COLOR_CHANGED)
- display.display(u'\n' + "To properly namespace this role, remove each of the above and re-import %s/%s from scratch" % (github_user,github_repo), color=C.COLOR_CHANGED)
+ display.display(u'\nTo properly namespace this role, remove each of the above and re-import %s/%s from scratch' % (github_user, github_repo),
+ color=C.COLOR_CHANGED)
return 0
# found a single role as expected
display.display("Successfully submitted import request %d" % task[0]['id'])
diff --git a/lib/ansible/cli/vault.py b/lib/ansible/cli/vault.py
index 85bdac6993..65657ca70e 100644
--- a/lib/ansible/cli/vault.py
+++ b/lib/ansible/cli/vault.py
@@ -26,7 +26,7 @@ from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.parsing.dataloader import DataLoader
from ansible.parsing.vault import VaultEditor
from ansible.cli import CLI
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
try:
from __main__ import display
@@ -163,7 +163,7 @@ class VaultCLI(CLI):
# unicode here because we are displaying it and therefore can make
# the decision that the display doesn't have to be precisely what
# the input was (leave that to decrypt instead)
- self.pager(to_unicode(self.editor.plaintext(f)))
+ self.pager(ansible.module_utils._text.to_text(self.editor.plaintext(f)))
def execute_rekey(self):
for f in self.args:
diff --git a/lib/ansible/errors/__init__.py b/lib/ansible/errors/__init__.py
index 7f02a44bc0..2186e6bc74 100644
--- a/lib/ansible/errors/__init__.py
+++ b/lib/ansible/errors/__init__.py
@@ -25,8 +25,7 @@ from ansible.errors.yaml_strings import ( YAML_POSITION_DETAILS,
YAML_COMMON_UNQUOTED_COLON_ERROR,
YAML_COMMON_PARTIALLY_QUOTED_LINE_ERROR,
YAML_COMMON_UNBALANCED_QUOTES_ERROR )
-
-from ansible.utils.unicode import to_unicode, to_str
+from ansible.module_utils._text import to_native, to_text
class AnsibleError(Exception):
@@ -54,11 +53,11 @@ class AnsibleError(Exception):
if obj and isinstance(obj, AnsibleBaseYAMLObject):
extended_error = self._get_extended_error()
if extended_error and not suppress_extended_error:
- self.message = '%s\n\n%s' % (to_str(message), to_str(extended_error))
+ self.message = '%s\n\n%s' % (to_native(message), to_native(extended_error))
else:
- self.message = '%s' % to_str(message)
+ self.message = '%s' % to_native(message)
else:
- self.message = '%s' % to_str(message)
+ self.message = '%s' % to_native(message)
def __str__(self):
return self.message
@@ -104,8 +103,8 @@ class AnsibleError(Exception):
error_message += YAML_POSITION_DETAILS % (src_file, line_number, col_number)
if src_file not in ('<string>', '<unicode>') and self._show_content:
(target_line, prev_line) = self._get_error_lines_from_file(src_file, line_number - 1)
- target_line = to_unicode(target_line)
- prev_line = to_unicode(prev_line)
+ target_line = to_text(target_line)
+ prev_line = to_text(prev_line)
if target_line:
stripped_line = target_line.replace(" ","")
arrow_line = (" " * (col_number-1)) + "^ here"
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index 64a6d88283..86549ea0b7 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -29,11 +29,10 @@ import shlex
import zipfile
from io import BytesIO
-# from Ansible
from ansible.release import __version__, __author__
from ansible import constants as C
from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
# Must import strategy and use write_locks from there
# If we import write_locks directly then we end up binding a
# variable to the object and then it never gets updated.
@@ -45,6 +44,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
REPLACER_VERSION = b"\"<<ANSIBLE_VERSION>>\""
REPLACER_COMPLEX = b"\"<<INCLUDE_ANSIBLE_MODULE_COMPLEX_ARGS>>\""
@@ -239,7 +239,9 @@ def debug(command, zipped_mod, json_params):
else:
os.environ['PYTHONPATH'] = basedir
- p = subprocess.Popen([%(interpreter)s, script_path, args_path], env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
+ p = subprocess.Popen([%(interpreter)s, script_path, args_path],
+ env=os.environ, shell=False, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if not isinstance(stderr, (bytes, unicode)):
@@ -328,6 +330,7 @@ if __name__ == '__main__':
sys.exit(exitcode)
'''
+
def _strip_comments(source):
# Strip comments and blank lines from the wrapper
buf = []
@@ -338,6 +341,7 @@ def _strip_comments(source):
buf.append(line)
return u'\n'.join(buf)
+
if C.DEFAULT_KEEP_REMOTE_FILES:
# Keep comments when KEEP_REMOTE_FILES is set. That way users will see
# the comments with some nice usage instructions
@@ -346,6 +350,7 @@ else:
# ANSIBALLZ_TEMPLATE stripped of comments for smaller over the wire size
ACTIVE_ANSIBALLZ_TEMPLATE = _strip_comments(ANSIBALLZ_TEMPLATE)
+
class ModuleDepFinder(ast.NodeVisitor):
# Caveats:
# This code currently does not handle:
@@ -404,6 +409,7 @@ def _slurp(path):
fd.close()
return data
+
def _get_shebang(interpreter, task_vars, args=tuple()):
"""
Note not stellar API:
@@ -425,6 +431,7 @@ def _get_shebang(interpreter, task_vars, args=tuple()):
return (shebang, interpreter)
+
def recursive_finder(name, data, py_module_names, py_module_cache, zf):
"""
Using ModuleDepFinder, make sure we have all of the module_utils files that
@@ -529,11 +536,13 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
# Save memory; the file won't have to be read again for this ansible module.
del py_module_cache[py_module_file]
+
def _is_binary(module_data):
textchars = bytearray(set([7, 8, 9, 10, 12, 13, 27]) | set(range(0x20, 0x100)) - set([0x7f]))
start = module_data[:1024]
return bool(start.translate(None, textchars))
+
def _find_snippet_imports(module_name, module_data, module_path, module_args, task_vars, module_compression):
"""
Given the source of the module, convert it to a Jinja2 template to insert
@@ -617,9 +626,12 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
# Create the module zip data
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
- ### Note: If we need to import from release.py first,
- ### remember to catch all exceptions: https://github.com/ansible/ansible/issues/16523
- zf.writestr('ansible/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n__version__="' + to_bytes(__version__) + b'"\n__author__="' + to_bytes(__author__) + b'"\n')
+ # Note: If we need to import from release.py first,
+ # remember to catch all exceptions: https://github.com/ansible/ansible/issues/16523
+ zf.writestr('ansible/__init__.py',
+ b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n__version__="' +
+ to_bytes(__version__) + b'"\n__author__="' +
+ to_bytes(__author__) + b'"\n')
zf.writestr('ansible/module_utils/__init__.py', b'from pkgutil import extend_path\n__path__=extend_path(__path__,__name__)\n')
zf.writestr('ansible_module_%s.py' % module_name, module_data)
@@ -655,8 +667,9 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
try:
zipdata = open(cached_module_filename, 'rb').read()
except IOError:
- raise AnsibleError('A different worker process failed to create module file. Look at traceback for that process for debugging information.')
- zipdata = to_unicode(zipdata, errors='strict')
+ raise AnsibleError('A different worker process failed to create module file.'
+ ' Look at traceback for that process for debugging information.')
+ zipdata = to_text(zipdata, errors='surrogate_or_strict')
shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars)
if shebang is None:
@@ -674,7 +687,7 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
shebang=shebang,
interpreter=interpreter,
coding=ENCODING_STRING,
- )))
+ )))
module_data = output.getvalue()
elif module_substyle == 'powershell':
@@ -721,12 +734,11 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
# The main event -- substitute the JSON args string into the module
module_data = module_data.replace(REPLACER_JSONARGS, module_args_json)
- facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='strict')
+ facility = b'syslog.' + to_bytes(task_vars.get('ansible_syslog_facility', C.DEFAULT_SYSLOG_FACILITY), errors='surrogate_or_strict')
module_data = module_data.replace(b'syslog.LOG_USER', facility)
return (module_data, module_style, shebang)
-# ******************************************************************************
def modify_module(module_name, module_path, module_args, task_vars=dict(), module_compression='ZIP_STORED'):
"""
@@ -760,7 +772,7 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
(module_data, module_style, shebang) = _find_snippet_imports(module_name, module_data, module_path, module_args, task_vars, module_compression)
if module_style == 'binary':
- return (module_data, module_style, to_unicode(shebang, nonstring='passthru'))
+ return (module_data, module_style, to_text(shebang, nonstring='passthru'))
elif shebang is None:
lines = module_data.split(b"\n", 1)
if lines[0].startswith(b"#!"):
@@ -769,7 +781,7 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
interpreter = args[0]
interpreter = to_bytes(interpreter)
- new_shebang = to_bytes(_get_shebang(interpreter, task_vars, args[1:])[0], errors='strict', nonstring='passthru')
+ new_shebang = to_bytes(_get_shebang(interpreter, task_vars, args[1:])[0], errors='surrogate_or_strict', nonstring='passthru')
if new_shebang:
lines[0] = shebang = new_shebang
@@ -781,6 +793,6 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
module_data = b"\n".join(lines)
else:
- shebang = to_bytes(shebang, errors='strict')
+ shebang = to_bytes(shebang, errors='surrogate_or_strict')
- return (module_data, module_style, to_unicode(shebang, nonstring='passthru'))
+ return (module_data, module_style, to_text(shebang, nonstring='passthru'))
diff --git a/lib/ansible/executor/playbook_executor.py b/lib/ansible/executor/playbook_executor.py
index 3d6d54cbe2..058230d844 100644
--- a/lib/ansible/executor/playbook_executor.py
+++ b/lib/ansible/executor/playbook_executor.py
@@ -21,15 +21,13 @@ __metaclass__ = type
import os
-from ansible.compat.six import string_types
-
from ansible import constants as C
from ansible.executor.task_queue_manager import TaskQueueManager
+from ansible.module_utils._text import to_native, to_text
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.helpers import pct_to_int
from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_unicode, to_str
try:
from __main__ import display
@@ -74,7 +72,7 @@ class PlaybookExecutor:
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))
- if self._tqm is None: # we are doing a listing
+ if self._tqm is None: # we are doing a listing
entry = {'playbook': playbook_path}
entry['plays'] = []
else:
@@ -84,7 +82,7 @@ class PlaybookExecutor:
i = 1
plays = pb.get_plays()
- display.vv(u'%d plays in %s' % (len(plays), to_unicode(playbook_path)))
+ display.vv(u'%d plays in %s' % (len(plays), to_text(playbook_path)))
for play in plays:
if play._included_path is not None:
@@ -110,7 +108,7 @@ class PlaybookExecutor:
if self._tqm:
self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default)
play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default)
- else: # we are either in --list-<option> or syntax check
+ else: # we are either in --list-<option> or syntax check
play.vars[vname] = default
# Create a temporary copy of the play here, so we can run post_validate
@@ -156,7 +154,7 @@ class PlaybookExecutor:
# conditions are met, we break out, otherwise we only break out if the entire
# batch failed
failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts) - \
- (previously_failed + previously_unreachable)
+ (previously_failed + previously_unreachable)
if len(batch) == failed_hosts_count:
break_play = True
@@ -173,10 +171,10 @@ class PlaybookExecutor:
if break_play:
break
- i = i + 1 # per play
+ i = i + 1 # per play
if entry:
- entrylist.append(entry) # per playbook
+ entrylist.append(entry) # per playbook
# send the stats callback for this playbook
if self._tqm is not None:
@@ -276,7 +274,7 @@ class PlaybookExecutor:
for x in replay_hosts:
fd.write("%s\n" % x)
except Exception as e:
- display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_str(e)))
+ display.warning("Could not create retry file '%s'.\n\t%s" % (retry_path, to_native(e)))
return False
return True
diff --git a/lib/ansible/executor/process/worker.py b/lib/ansible/executor/process/worker.py
index d579d1fa3c..d93de24ab3 100644
--- a/lib/ansible/executor/process/worker.py
+++ b/lib/ansible/executor/process/worker.py
@@ -19,16 +19,10 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.compat.six.moves import queue
-
-import json
import multiprocessing
import os
-import signal
import sys
-import time
import traceback
-import zlib
from jinja2.exceptions import TemplateNotFound
@@ -40,13 +34,10 @@ try:
except ImportError:
HAS_ATFORK=False
-from ansible.errors import AnsibleError, AnsibleConnectionFailure
+from ansible.errors import AnsibleConnectionFailure
from ansible.executor.task_executor import TaskExecutor
from ansible.executor.task_result import TaskResult
-from ansible.playbook.handler import Handler
-from ansible.playbook.task import Task
-from ansible.vars.unsafe_proxy import AnsibleJSONUnsafeDecoder
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
try:
from __main__ import display
@@ -144,11 +135,11 @@ class WorkerProcess(multiprocessing.Process):
try:
self._host.vars = dict()
self._host.groups = []
- task_result = TaskResult(self._host.name, self._task._uuid, dict(failed=True, exception=to_unicode(traceback.format_exc()), stdout=''))
+ task_result = TaskResult(self._host.name, self._task._uuid, dict(failed=True, exception=to_text(traceback.format_exc()), stdout=''))
self._rslt_q.put(task_result, block=False)
except:
- display.debug(u"WORKER EXCEPTION: %s" % to_unicode(e))
- display.debug(u"WORKER TRACEBACK: %s" % to_unicode(traceback.format_exc()))
+ display.debug(u"WORKER EXCEPTION: %s" % to_text(e))
+ display.debug(u"WORKER TRACEBACK: %s" % to_text(traceback.format_exc()))
display.debug("WORKER PROCESS EXITING")
diff --git a/lib/ansible/executor/task_executor.py b/lib/ansible/executor/task_executor.py
index a5bb076761..91da5d3b04 100644
--- a/lib/ansible/executor/task_executor.py
+++ b/lib/ansible/executor/task_executor.py
@@ -20,7 +20,6 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
-import json
import subprocess
import sys
import time
@@ -31,12 +30,12 @@ from ansible.compat.six import iteritems, string_types, binary_type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure
from ansible.executor.task_result import TaskResult
+from ansible.module_utils._text import to_bytes, to_text
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.template import Templar
from ansible.utils.encrypt import key_for_hostname
from ansible.utils.listify import listify_lookup_plugin_terms
-from ansible.utils.unicode import to_unicode, to_bytes
from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var
try:
@@ -130,7 +129,7 @@ class TaskExecutor:
if isinstance(res, UnsafeProxy):
return res._obj
elif isinstance(res, binary_type):
- return to_unicode(res, errors='strict')
+ return to_text(res, errors='surrogate_or_strict')
elif isinstance(res, dict):
for k in res:
res[k] = _clean_res(res[k])
@@ -144,16 +143,16 @@ class TaskExecutor:
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
- return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
+ return dict(failed=True, msg=to_text(e, nonstring='simplerepr'))
except Exception as e:
- return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_unicode(traceback.format_exc()), stdout='')
+ return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_text(traceback.format_exc()), stdout='')
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
- display.debug(u"error closing connection: %s" % to_unicode(e))
+ display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
@@ -177,16 +176,18 @@ class TaskExecutor:
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
- #TODO: remove convert_bare true and deprecate this in with_
+ # TODO: remove convert_bare true and deprecate this in with_
if self._task.loop == 'first_found':
# first_found loops are special. If the item is undefined
# then we want to fall through to the next value rather
# than failing.
- loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=False, convert_bare=True)
+ loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar,
+ loader=self._loader, fail_on_undefined=False, convert_bare=True)
loop_terms = [t for t in loop_terms if not templar._contains_vars(t)]
else:
try:
- loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True, convert_bare=True)
+ loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar,
+ loader=self._loader, fail_on_undefined=True, convert_bare=True)
except AnsibleUndefinedVariable as e:
display.deprecated("Skipping task due to undefined Error, in the future this will be a fatal error.: %s" % to_bytes(e))
return None
@@ -195,7 +196,7 @@ class TaskExecutor:
mylookup = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
- for subdir in ['template', 'var', 'file']: #TODO: move this to constants?
+ for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in self._task.action:
break
setattr(mylookup,'_subdir', subdir + 's')
@@ -239,13 +240,15 @@ class TaskExecutor:
label = None
loop_pause = 0
if self._task.loop_control:
- # the value may be 'None', so we still need to default it back to 'item'
+ # the value may be 'None', so we still need to default it back to 'item'
loop_var = self._task.loop_control.loop_var or 'item'
label = self._task.loop_control.label or ('{{' + loop_var + '}}')
loop_pause = self._task.loop_control.pause or 0
if loop_var in task_vars:
- display.warning("The loop variable '%s' is already in use. You should set the `loop_var` value in the `loop_control` option for the task to something else to avoid variable collisions and unexpected behavior." % loop_var)
+ display.warning(u"The loop variable '%s' is already in use."
+ u"You should set the `loop_var` value in the `loop_control` option for the task"
+ u" to something else to avoid variable collisions and unexpected behavior." % loop_var)
ran_once = False
items = self._squash_items(items, loop_var, task_vars)
@@ -263,7 +266,7 @@ class TaskExecutor:
tmp_task._parent = self._task._parent
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
- results.append(dict(failed=True, msg=to_unicode(e)))
+ results.append(dict(failed=True, msg=to_text(e)))
continue
# now we swap the internal task and play context with their copies,
@@ -279,7 +282,7 @@ class TaskExecutor:
res[loop_var] = item
res['_ansible_item_result'] = True
- if not label is None:
+ if label is not None:
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
res['_ansible_item_label'] = templar.template(label, fail_on_undefined=False)
@@ -421,7 +424,7 @@ class TaskExecutor:
include_file = templar.template(include_file)
return dict(include=include_file, include_variables=include_variables)
- #TODO: not needed?
+ # TODO: not needed?
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action == 'include_role':
include_variables = self._task.args.copy()
@@ -482,7 +485,7 @@ class TaskExecutor:
try:
result = self._handler.run(task_vars=variables)
except AnsibleConnectionFailure as e:
- return dict(unreachable=True, msg=to_unicode(e))
+ return dict(unreachable=True, msg=to_text(e))
display.debug("handler run complete")
# preserve no log
@@ -666,7 +669,7 @@ class TaskExecutor:
try:
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
- err = to_unicode(err)
+ err = to_text(err)
if u"Bad configuration option" in err or u"Usage:" in err:
conn_type = "paramiko"
except OSError:
diff --git a/lib/ansible/executor/task_queue_manager.py b/lib/ansible/executor/task_queue_manager.py
index b1a205d2e5..6744367363 100644
--- a/lib/ansible/executor/task_queue_manager.py
+++ b/lib/ansible/executor/task_queue_manager.py
@@ -28,21 +28,20 @@ import time
from collections import deque
from ansible import constants as C
+from ansible.compat.six import string_types
from ansible.errors import AnsibleError
from ansible.executor import action_write_locks
from ansible.executor.play_iterator import PlayIterator
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.stats import AggregateStats
-from ansible.module_utils.facts import Facts
+from ansible.module_utils._text import to_text
from ansible.playbook.block import Block
from ansible.playbook.play_context import PlayContext
from ansible.plugins import action_loader, callback_loader, connection_loader, filter_loader, lookup_loader, module_loader, strategy_loader, test_loader
-from ansible.template import Templar
-from ansible.vars.hostvars import HostVars
from ansible.plugins.callback import CallbackBase
+from ansible.template import Templar
from ansible.utils.helpers import pct_to_int
-from ansible.utils.unicode import to_unicode
-from ansible.compat.six import string_types
+from ansible.vars.hostvars import HostVars
try:
from __main__ import display
@@ -288,7 +287,8 @@ class TaskQueueManager:
stdout_callback_loaded = True
elif callback_name == 'tree' and self._run_tree:
pass
- elif not self._run_additional_callbacks or (callback_needs_whitelist and (C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST)):
+ elif not self._run_additional_callbacks or (callback_needs_whitelist and (
+ C.DEFAULT_CALLBACK_WHITELIST is None or callback_name not in C.DEFAULT_CALLBACK_WHITELIST)):
continue
self._callback_plugins.append(callback_plugin())
@@ -336,8 +336,8 @@ class TaskQueueManager:
serial_items = [serial_items]
max_serial = max([pct_to_int(x, num_hosts) for x in serial_items])
- contenders = [self._options.forks, max_serial, num_hosts]
- contenders = [v for v in contenders if v is not None and v > 0]
+ contenders = [self._options.forks, max_serial, num_hosts]
+ contenders = [v for v in contenders if v is not None and v > 0]
self._initialize_processes(min(contenders))
play_context = PlayContext(new_play, self._options, self.passwords, self._connection_lockfile.fileno())
@@ -446,7 +446,7 @@ class TaskQueueManager:
# try to find v2 method, fallback to v1 method, ignore callback if no method found
methods = []
for possible in [method_name, 'v2_on_any']:
- gotit = getattr(callback_plugin, possible, None)
+ gotit = getattr(callback_plugin, possible, None)
if gotit is None:
gotit = getattr(callback_plugin, possible.replace('v2_',''), None)
if gotit is not None:
@@ -468,8 +468,8 @@ class TaskQueueManager:
else:
method(*args, **kwargs)
except Exception as e:
- #TODO: add config toggle to make this fatal or not?
- display.warning(u"Failure using method (%s) in callback plugin (%s): %s" % (to_unicode(method_name), to_unicode(callback_plugin), to_unicode(e)))
+ # TODO: add config toggle to make this fatal or not?
+ display.warning(u"Failure using method (%s) in callback plugin (%s): %s" % (to_text(method_name), to_text(callback_plugin), to_text(e)))
from traceback import format_tb
from sys import exc_info
display.debug('Callback Exception: \n' + ' '.join(format_tb(exc_info()[2])))
diff --git a/lib/ansible/galaxy/api.py b/lib/ansible/galaxy/api.py
index 5dd9d75736..471c8eb4e9 100644
--- a/lib/ansible/galaxy/api.py
+++ b/lib/ansible/galaxy/api.py
@@ -28,12 +28,12 @@ import json
import ansible.constants as C
from ansible.compat.six import string_types
-from ansible.compat.six.moves.urllib.parse import quote as urlquote, urlencode
from ansible.compat.six.moves.urllib.error import HTTPError
+from ansible.compat.six.moves.urllib.parse import quote as urlquote, urlencode
from ansible.errors import AnsibleError
-from ansible.module_utils.urls import open_url
from ansible.galaxy.token import GalaxyToken
-from ansible.utils.unicode import to_str
+from ansible.module_utils._text import to_native
+from ansible.module_utils.urls import open_url
try:
from __main__ import display
@@ -115,12 +115,12 @@ class GalaxyAPI(object):
try:
return_data = open_url(url, validate_certs=self._validate_certs)
except Exception as e:
- raise AnsibleError("Failed to get data from the API server (%s): %s " % (url, to_str(e)))
+ raise AnsibleError("Failed to get data from the API server (%s): %s " % (url, to_native(e)))
try:
data = json.load(return_data)
except Exception as e:
- raise AnsibleError("Could not process data from the API server (%s): %s " % (url, to_str(e)))
+ raise AnsibleError("Could not process data from the API server (%s): %s " % (url, to_native(e)))
if 'current_version' not in data:
raise AnsibleError("missing required 'current_version' from server response (%s)" % url)
diff --git a/lib/ansible/inventory/__init__.py b/lib/ansible/inventory/__init__.py
index f8a98929f4..eb498bc238 100644
--- a/lib/ansible/inventory/__init__.py
+++ b/lib/ansible/inventory/__init__.py
@@ -33,12 +33,10 @@ from ansible.errors import AnsibleError
from ansible.inventory.dir import InventoryDirectory, get_file_parser
from ansible.inventory.group import Group
from ansible.inventory.host import Host
+from ansible.module_utils._text import to_bytes, to_text
+from ansible.parsing.utils.addresses import parse_address
from ansible.plugins import vars_loader
-from ansible.utils.unicode import to_unicode, to_bytes
from ansible.utils.vars import combine_vars
-from ansible.parsing.utils.addresses import parse_address
-
-HOSTS_PATTERNS_CACHE = {}
try:
from __main__ import display
@@ -46,6 +44,10 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
+HOSTS_PATTERNS_CACHE = {}
+
+
class Inventory(object):
"""
Host inventory for ansible.
@@ -125,7 +127,7 @@ class Inventory(object):
try:
(host, port) = parse_address(h, allow_ranges=False)
except AnsibleError as e:
- display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_unicode(e))
+ display.vvv("Unable to parse address from hostname, leaving unchanged: %s" % to_text(e))
host = h
port = None
@@ -138,7 +140,7 @@ class Inventory(object):
self.localhost = new_host
all.add_host(new_host)
elif self._loader.path_exists(host_list):
- #TODO: switch this to a plugin loader and a 'condition' per plugin on which it should be tried, restoring 'inventory pllugins'
+ # TODO: switch this to a plugin loader and a 'condition' per plugin on which it should be tried, restoring 'inventory pllugins'
if self.is_directory(host_list):
# Ensure basedir is inside the directory
host_list = os.path.join(self.host_list, "")
@@ -151,7 +153,7 @@ class Inventory(object):
# should never happen, but JIC
raise AnsibleError("Unable to parse %s as an inventory source" % host_list)
else:
- display.warning("Host file not found: %s" % to_unicode(host_list))
+ display.warning("Host file not found: %s" % to_text(host_list))
self._vars_plugins = [ x for x in vars_loader.all(self) ]
@@ -191,7 +193,7 @@ class Inventory(object):
return results
def get_hosts(self, pattern="all", ignore_limits_and_restrictions=False):
- """
+ """
Takes a pattern or list of patterns and returns a list of matching
inventory host names, taking into account any active restrictions
or applied subsets
@@ -205,9 +207,9 @@ class Inventory(object):
if not ignore_limits_and_restrictions:
if self._subset:
- pattern_hash += u":%s" % to_unicode(self._subset)
+ pattern_hash += u":%s" % to_text(self._subset)
if self._restriction:
- pattern_hash += u":%s" % to_unicode(self._restriction)
+ pattern_hash += u":%s" % to_text(self._restriction)
if pattern_hash not in HOSTS_PATTERNS_CACHE:
@@ -326,7 +328,7 @@ class Inventory(object):
return hosts
def _match_one_pattern(self, pattern):
- """
+ """
Takes a single pattern and returns a list of matching host names.
Ignores intersection (&) and exclusion (!) specifiers.
@@ -426,7 +428,7 @@ class Inventory(object):
"""
Takes a list of hosts and a (start,end) tuple and returns the subset of
hosts based on the subscript (which may be None to return all hosts).
- """
+ """
if not hosts or not subscript:
return hosts
@@ -491,7 +493,8 @@ class Inventory(object):
py_interp = sys.executable
if not py_interp:
# sys.executable is not set in some cornercases. #13585
- display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default. You can correct this by setting ansible_python_interpreter for localhost')
+ display.warning('Unable to determine python interpreter from sys.executable. Using /usr/bin/python default.'
+ ' You can correct this by setting ansible_python_interpreter for localhost')
py_interp = '/usr/bin/python'
new_host.set_variable("ansible_python_interpreter", py_interp)
self.get_group("ungrouped").add_host(new_host)
@@ -648,7 +651,7 @@ class Inventory(object):
return sorted(self.groups.keys(), key=lambda x: x)
def restrict_to_hosts(self, restriction):
- """
+ """
Restrict list operations to the hosts given in restriction. This is used
to batch serial operations in main playbook code, don't use this for other
reasons.
@@ -660,12 +663,12 @@ class Inventory(object):
self._restriction = [ h.name for h in restriction ]
def subset(self, subset_pattern):
- """
+ """
Limits inventory results to a subset of inventory that matches a given
pattern, such as to select a given geographic of numeric slice amongst
- a previous 'hosts' selection that only select roles, or vice versa.
+ a previous 'hosts' selection that only select roles, or vice versa.
Corresponds to --limit parameter to ansible-playbook
- """
+ """
if subset_pattern is None:
self._subset = None
else:
@@ -781,7 +784,7 @@ class Inventory(object):
path = os.path.realpath(os.path.join(basedir, 'group_vars'))
found_vars = set()
if os.path.exists(path):
- found_vars = set(os.listdir(to_unicode(path)))
+ found_vars = set(os.listdir(to_text(path)))
return found_vars
def _find_host_vars_files(self, basedir):
@@ -791,7 +794,7 @@ class Inventory(object):
path = os.path.realpath(os.path.join(basedir, 'host_vars'))
found_vars = set()
if os.path.exists(path):
- found_vars = set(os.listdir(to_unicode(path)))
+ found_vars = set(os.listdir(to_text(path)))
return found_vars
def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False, return_results=False):
@@ -832,13 +835,13 @@ class Inventory(object):
# Before trying to load vars from file, check that the directory contains relvant file names
if host is None and any(map(lambda ext: group.name + ext in self._group_vars_files, C.YAML_FILENAME_EXTENSIONS)):
# load vars in dir/group_vars/name_of_group
- base_path = to_unicode(os.path.abspath(os.path.join(to_bytes(basedir), b"group_vars/" + to_bytes(group.name))), errors='strict')
+ base_path = to_text(os.path.abspath(os.path.join(to_bytes(basedir), b"group_vars/" + to_bytes(group.name))), errors='surrogate_or_strict')
host_results = self._variable_manager.add_group_vars_file(base_path, self._loader)
if return_results:
results = combine_vars(results, host_results)
elif group is None and any(map(lambda ext: host.name + ext in self._host_vars_files, C.YAML_FILENAME_EXTENSIONS)):
# same for hostvars in dir/host_vars/name_of_host
- base_path = to_unicode(os.path.abspath(os.path.join(to_bytes(basedir), b"host_vars/" + to_bytes(host.name))), errors='strict')
+ base_path = to_text(os.path.abspath(os.path.join(to_bytes(basedir), b"host_vars/" + to_bytes(host.name))), errors='surrogate_or_strict')
group_results = self._variable_manager.add_host_vars_file(base_path, self._loader)
if return_results:
results = combine_vars(results, group_results)
diff --git a/lib/ansible/inventory/ini.py b/lib/ansible/inventory/ini.py
index 172f8d0351..81c3ec90e4 100644
--- a/lib/ansible/inventory/ini.py
+++ b/lib/ansible/inventory/ini.py
@@ -28,9 +28,10 @@ from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.inventory.expand_hosts import detect_range
from ansible.inventory.expand_hosts import expand_hostname_range
+from ansible.module_utils._text import to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.utils.shlex import shlex_split
-from ansible.utils.unicode import to_unicode
+
class InventoryParser(object):
"""
@@ -56,7 +57,7 @@ class InventoryParser(object):
(data, private) = loader._get_file_contents(filename)
else:
with open(filename) as fh:
- data = to_unicode(fh.read())
+ data = to_text(fh.read())
data = data.split('\n')
self._parse(data)
@@ -125,7 +126,7 @@ class InventoryParser(object):
continue
elif line.startswith('[') and line.endswith(']'):
- self._raise_error("Invalid section entry: '%s'. Please make sure that there are no spaces" % line + \
+ self._raise_error("Invalid section entry: '%s'. Please make sure that there are no spaces" % line +
"in the section entry, and that there are no other invalid characters")
# It's not a section, so the current state tells us what kind of
@@ -188,7 +189,6 @@ class InventoryParser(object):
if group.depth == 0 and group.name not in ('all', 'ungrouped'):
self.groups['all'].add_child_group(group)
-
def _parse_group_name(self, line):
'''
Takes a single line and tries to parse it as a group name. Returns the
@@ -323,7 +323,7 @@ class InventoryParser(object):
except SyntaxError:
# Is this a hash with an equals at the end?
pass
- return to_unicode(v, nonstring='passthru', errors='strict')
+ return to_text(v, nonstring='passthru', errors='surrogate_or_strict')
def get_host_variables(self, host):
return {}
diff --git a/lib/ansible/inventory/script.py b/lib/ansible/inventory/script.py
index 95e48eff58..6288b79445 100644
--- a/lib/ansible/inventory/script.py
+++ b/lib/ansible/inventory/script.py
@@ -31,7 +31,7 @@ from ansible.errors import AnsibleError
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.module_utils.basic import json_dict_bytes_to_unicode
-from ansible.utils.unicode import to_str, to_unicode
+from ansible.module_utils._text import to_native, to_text
class InventoryScript:
@@ -61,9 +61,9 @@ class InventoryScript:
# make sure script output is unicode so that json loader will output
# unicode strings itself
try:
- self.data = to_unicode(stdout, errors="strict")
+ self.data = to_text(stdout, errors="strict")
except Exception as e:
- raise AnsibleError("inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}".format(to_str(self.filename), to_str(e)))
+ raise AnsibleError("inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}".format(to_native(self.filename), to_native(e)))
# see comment about _meta below
self.host_vars_from_top = None
@@ -78,11 +78,11 @@ class InventoryScript:
self.raw = self._loader.load(self.data)
except Exception as e:
sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(to_str(self.filename), to_str(e)))
+ raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(to_native(self.filename), to_native(e)))
if not isinstance(self.raw, Mapping):
sys.stderr.write(err + "\n")
- raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_str(self.filename)))
+ raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_native(self.filename)))
group = None
for (group_name, data) in self.raw.items():
@@ -152,7 +152,7 @@ class InventoryScript:
try:
got = self.host_vars_from_top.get(host.name, {})
except AttributeError as e:
- raise AnsibleError("Improperly formated host information for %s: %s" % (host.name,to_str(e)))
+ raise AnsibleError("Improperly formated host information for %s: %s" % (host.name,to_native(e)))
return got
cmd = [self.filename, "--host", host.name]
diff --git a/lib/ansible/module_utils/_text.py b/lib/ansible/module_utils/_text.py
index fe5acf7f27..cf075e3bb4 100644
--- a/lib/ansible/module_utils/_text.py
+++ b/lib/ansible/module_utils/_text.py
@@ -32,16 +32,18 @@
making backwards compatibility guarantees. The API may change between
releases. Do not use this unless you are willing to port your module code.
"""
+import codecs
from ansible.module_utils.six import PY3, text_type, binary_type
-import codecs
+
try:
codecs.lookup_error('surrogateescape')
HAS_SURROGATEESCAPE = True
except LookupError:
HAS_SURROGATEESCAPE = False
+
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a byte string
@@ -109,7 +111,14 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
# Note: We do these last even though we have to call to_bytes again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
- value = str(obj)
+ try:
+ value = str(obj)
+ except UnicodeError:
+ try:
+ value = repr(obj)
+ except UnicodeError:
+ # Giving up
+ return to_bytes('')
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
@@ -122,6 +131,7 @@ def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
return to_bytes(value, encoding, errors)
+
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a text string
@@ -175,7 +185,14 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
# Note: We do these last even though we have to call to_text again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
- value = str(obj)
+ try:
+ value = str(obj)
+ except UnicodeError:
+ try:
+ value = repr(obj)
+ except UnicodeError:
+ # Giving up
+ return u''
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
@@ -187,6 +204,7 @@ def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
return to_text(value, encoding, errors)
+
#: :py:func:`to_native`
#: Transform a variable into the native str type for the python version
#:
diff --git a/lib/ansible/module_utils/basic.py b/lib/ansible/module_utils/basic.py
index 12add4eb38..61f68a3965 100644
--- a/lib/ansible/module_utils/basic.py
+++ b/lib/ansible/module_utils/basic.py
@@ -805,7 +805,7 @@ class AnsibleModule(object):
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
- ret = selinux.matchpathcon(to_native(path, errors='strict'), mode)
+ ret = selinux.matchpathcon(to_native(path, errors='surrogate_or_strict'), mode)
except OSError:
return context
if ret[0] == -1:
@@ -820,7 +820,7 @@ class AnsibleModule(object):
if not HAVE_SELINUX or not self.selinux_enabled():
return context
try:
- ret = selinux.lgetfilecon_raw(to_native(path, errors='strict'))
+ ret = selinux.lgetfilecon_raw(to_native(path, errors='surrogate_or_strict'))
except OSError:
e = get_exception()
if e.errno == errno.ENOENT:
@@ -2121,10 +2121,10 @@ class AnsibleModule(object):
to_clean_args = args
if PY2:
if isinstance(args, text_type):
- to_clean_args = args.encode('utf-8')
+ to_clean_args = to_bytes(args)
else:
if isinstance(args, binary_type):
- to_clean_args = args.decode('utf-8', errors='replace')
+ to_clean_args = to_text(args)
if isinstance(args, (text_type, binary_type)):
to_clean_args = shlex.split(to_clean_args)
@@ -2193,11 +2193,7 @@ class AnsibleModule(object):
if not binary_data:
data += '\n'
if isinstance(data, text_type):
- if PY3:
- errors = 'surrogateescape'
- else:
- errors = 'strict'
- data = data.encode('utf-8', errors=errors)
+ data = to_bytes(data)
cmd.stdin.write(data)
cmd.stdin.close()
diff --git a/lib/ansible/parsing/dataloader.py b/lib/ansible/parsing/dataloader.py
index 9cac94d83c..f27e31ae2b 100644
--- a/lib/ansible/parsing/dataloader.py
+++ b/lib/ansible/parsing/dataloader.py
@@ -27,16 +27,15 @@ import tempfile
from yaml import YAMLError
from ansible.compat.six import text_type, string_types
-
from ansible.errors import AnsibleFileNotFound, AnsibleParserError, AnsibleError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
+from ansible.module_utils.basic import is_executable
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.vault import VaultLib
from ansible.parsing.quoting import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
-from ansible.module_utils.basic import is_executable
from ansible.utils.path import unfrackpath
-from ansible.utils.unicode import to_unicode, to_bytes, to_str
try:
from __main__ import display
@@ -44,6 +43,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
class DataLoader():
'''
@@ -127,15 +127,15 @@ class DataLoader():
def path_exists(self, path):
path = self.path_dwim(path)
- return os.path.exists(to_bytes(path, errors='strict'))
+ return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))
def is_file(self, path):
path = self.path_dwim(path)
- return os.path.isfile(to_bytes(path, errors='strict')) or path == os.devnull
+ return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull
def is_directory(self, path):
path = self.path_dwim(path)
- return os.path.isdir(to_bytes(path, errors='strict'))
+ return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))
def list_directory(self, path):
path = self.path_dwim(path)
@@ -156,7 +156,7 @@ class DataLoader():
try:
loader.dispose()
except AttributeError:
- pass # older versions of yaml don't have dispose function, ignore
+ pass # older versions of yaml don't have dispose function, ignore
def _get_file_contents(self, file_name):
'''
@@ -178,7 +178,7 @@ class DataLoader():
data = self._vault.decrypt(data, filename=b_file_name)
show_content = False
- data = to_unicode(data, errors='strict')
+ data = to_text(data, errors='surrogate_or_strict')
return (data, show_content)
except (IOError, OSError) as e:
@@ -208,7 +208,7 @@ class DataLoader():
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
- self._basedir = to_unicode(basedir)
+ self._basedir = to_text(basedir)
def path_dwim(self, given):
'''
@@ -216,14 +216,14 @@ class DataLoader():
'''
given = unquote(given)
- given = to_unicode(given, errors='strict')
+ given = to_text(given, errors='surrogate_or_strict')
if given.startswith(u"/"):
return os.path.abspath(given)
elif given.startswith(u"~"):
return os.path.abspath(os.path.expanduser(given))
else:
- basedir = to_unicode(self._basedir, errors='strict')
+ basedir = to_text(self._basedir, errors='surrogate_or_strict')
return os.path.abspath(os.path.join(basedir, given))
def path_dwim_relative(self, path, dirname, source):
@@ -247,8 +247,8 @@ class DataLoader():
basedir = unfrackpath(path)
# is it a role and if so make sure you get correct base path
- if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='strict')) \
- or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='strict')):
+ if path.endswith('tasks') and os.path.exists(to_bytes(os.path.join(path,'main.yml'), errors='surrogate_or_strict')) \
+ or os.path.exists(to_bytes(os.path.join(path,'tasks/main.yml'), errors='surrogate_or_strict')):
isrole = True
if path.endswith('tasks'):
basedir = unfrackpath(os.path.dirname(path))
@@ -271,7 +271,7 @@ class DataLoader():
search.append(self.path_dwim(source))
for candidate in search:
- if os.path.exists(to_bytes(candidate, errors='strict')):
+ if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
break
return candidate
@@ -296,19 +296,19 @@ class DataLoader():
elif source.startswith('~') or source.startswith(os.path.sep):
# path is absolute, no relative needed, check existence and return source
test_path = unfrackpath(b_source)
- if os.path.exists(to_bytes(test_path, errors='strict')):
+ if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
result = test_path
else:
search = []
for path in paths:
upath = unfrackpath(path)
- b_upath = to_bytes(upath, errors='strict')
+ b_upath = to_bytes(upath, errors='surrogate_or_strict')
b_mydir = os.path.dirname(b_upath)
# if path is in role and 'tasks' not there already, add it into the search
if b_upath.endswith(b'tasks') and os.path.exists(os.path.join(b_upath, b'main.yml')) \
- or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \
- or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')):
+ or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \
+ or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')):
if b_mydir.endswith(b'tasks'):
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
@@ -324,11 +324,11 @@ class DataLoader():
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
- display.debug(u'search_path:\n\t%s' % to_unicode(b'\n\t'.join(search), errors='replace'))
+ display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
for b_candidate in search:
- display.vvvvv(u'looking for "%s" at "%s"' % (source, to_unicode(b_candidate)))
+ display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
if os.path.exists(b_candidate):
- result = to_unicode(b_candidate)
+ result = to_text(b_candidate)
break
return result
@@ -339,8 +339,8 @@ class DataLoader():
retrieve password from STDOUT
"""
- this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='strict'))
- if not os.path.exists(to_bytes(this_path, errors='strict')):
+ this_path = os.path.realpath(to_bytes(os.path.expanduser(vault_password_file), errors='surrogate_or_strict'))
+ if not os.path.exists(to_bytes(this_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("The vault password file %s was not found" % this_path)
if self.is_executable(this_path):
@@ -348,7 +348,8 @@ class DataLoader():
# STDERR not captured to make it easier for users to prompt for input in their scripts
p = subprocess.Popen(this_path, stdout=subprocess.PIPE)
except OSError as e:
- raise AnsibleError("Problem running vault password script %s (%s). If this is not a script, remove the executable bit from the file." % (' '.join(this_path), e))
+ raise AnsibleError("Problem running vault password script %s (%s)."
+ " If this is not a script, remove the executable bit from the file." % (' '.join(this_path), to_native(e)))
stdout, stderr = p.communicate()
self.set_vault_password(stdout.strip('\r\n'))
else:
@@ -381,11 +382,11 @@ class DataLoader():
"""
if not file_path or not isinstance(file_path, string_types):
- raise AnsibleParserError("Invalid filename: '%s'" % to_str(file_path))
+ raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
- b_file_path = to_bytes(file_path, errors='strict')
+ b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
- raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_str(file_path))
+ raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))
if not self._vault:
self._vault = VaultLib(password="")
@@ -410,7 +411,7 @@ class DataLoader():
return real_path
except (IOError, OSError) as e:
- raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_str(real_path), to_str(e)))
+ raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)))
def cleanup_tmp_file(self, file_path):
"""
@@ -420,11 +421,11 @@ class DataLoader():
"""
if file_path in self._tempfiles:
os.unlink(file_path)
- self._tempfiles.remove(file_path);
+ self._tempfiles.remove(file_path)
def cleanup_all_tmp_files(self):
for f in self._tempfiles:
try:
self.cleanup_tmp_file(f)
except:
- pass #TODO: this should at least warn
+ pass # TODO: this should at least warn
diff --git a/lib/ansible/parsing/splitter.py b/lib/ansible/parsing/splitter.py
index 4849d08265..e6cb452035 100644
--- a/lib/ansible/parsing/splitter.py
+++ b/lib/ansible/parsing/splitter.py
@@ -23,8 +23,10 @@ import re
import codecs
from ansible.errors import AnsibleParserError
+from ansible.module_utils._text import to_text
from ansible.parsing.quoting import unquote
+
# Decode escapes adapted from rspeer's answer here:
# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
_HEXCHAR = '[a-fA-F0-9]'
@@ -36,12 +38,14 @@ _ESCAPE_SEQUENCE_RE = re.compile(r'''
| \\[\\'"abfnrtv] # Single-character escapes
)'''.format(_HEXCHAR*8, _HEXCHAR*4, _HEXCHAR*2), re.UNICODE | re.VERBOSE)
+
def _decode_escapes(s):
def decode_match(match):
return codecs.decode(match.group(0), 'unicode-escape')
return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
+
def parse_kv(args, check_raw=False):
'''
Convert a string of key/value items to a dict. If any free-form params
@@ -50,9 +54,7 @@ def parse_kv(args, check_raw=False):
they will simply be ignored.
'''
- ### FIXME: args should already be a unicode string
- from ansible.utils.unicode import to_unicode
- args = to_unicode(args, nonstring='passthru')
+ args = to_text(args, nonstring='passthru')
options = {}
if args is not None:
@@ -60,7 +62,7 @@ def parse_kv(args, check_raw=False):
vargs = split_args(args)
except ValueError as ve:
if 'no closing quotation' in str(ve).lower():
- raise AnsibleParsingError("error parsing argument string, try quoting the entire line.")
+ raise AnsibleParserError("error parsing argument string, try quoting the entire line.")
else:
raise
@@ -99,6 +101,7 @@ def parse_kv(args, check_raw=False):
return options
+
def _get_quote_state(token, quote_char):
'''
the goal of this block is to determine if the quoted string
@@ -118,6 +121,7 @@ def _get_quote_state(token, quote_char):
quote_char = cur_char
return quote_char
+
def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
'''
this function counts the number of opening/closing blocks for a
@@ -132,6 +136,7 @@ def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
cur_depth = 0
return cur_depth
+
def split_args(args):
'''
Splits args on whitespace, but intelligently reassembles
@@ -166,9 +171,9 @@ def split_args(args):
quote_char = None
inside_quotes = False
- print_depth = 0 # used to count nested jinja2 {{ }} blocks
- block_depth = 0 # used to count nested jinja2 {% %} blocks
- comment_depth = 0 # used to count nested jinja2 {# #} blocks
+ print_depth = 0 # used to count nested jinja2 {{ }} blocks
+ block_depth = 0 # used to count nested jinja2 {% %} blocks
+ comment_depth = 0 # used to count nested jinja2 {# #} blocks
# now we loop over each split chunk, coalescing tokens if the white space
# split occurred within quotes or a jinja2 block of some kind
diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
index 42cebeb551..1845210d3d 100644
--- a/lib/ansible/parsing/vault/__init__.py
+++ b/lib/ansible/parsing/vault/__init__.py
@@ -29,16 +29,10 @@ from ansible.errors import AnsibleError
from hashlib import sha256
from binascii import hexlify
from binascii import unhexlify
-
-try:
- from __main__ import display
-except ImportError:
- from ansible.utils.display import Display
- display = Display()
+from hashlib import md5
# Note: Only used for loading obsolete VaultAES files. All files are written
# using the newer VaultAES256 which does not require md5
-from hashlib import md5
try:
from Crypto.Hash import SHA256, HMAC
@@ -67,6 +61,15 @@ try:
except ImportError:
HAS_AES = False
+from ansible.compat.six import PY3
+from ansible.module_utils._text import to_bytes, to_text
+
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
+
# OpenSSL pbkdf2_hmac
HAS_PBKDF2HMAC = False
try:
@@ -81,12 +84,11 @@ except Exception as e:
import traceback
display.debug("Traceback from import of cryptography was {0}".format(traceback.format_exc()))
-from ansible.compat.six import PY3
-from ansible.utils.unicode import to_unicode, to_bytes
HAS_ANY_PBKDF2HMAC = HAS_PBKDF2 or HAS_PBKDF2HMAC
-CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
+CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform." \
+ " You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
b_HEADER = b'$ANSIBLE_VAULT'
HEADER = '$ANSIBLE_VAULT'
@@ -105,6 +107,7 @@ def check_prereqs():
class AnsibleVaultError(AnsibleError):
pass
+
def is_encrypted(b_data):
""" Test if this is vault encrypted data blob
@@ -116,6 +119,7 @@ def is_encrypted(b_data):
return True
return False
+
def is_encrypted_file(file_obj):
"""Test if the contents of a file obj are a vault encrypted data blob.
@@ -252,7 +256,7 @@ class VaultLib:
b_header = HEADER.encode('utf-8')
header = b';'.join([b_header, self.b_version,
- to_bytes(self.cipher_name,'utf-8',errors='strict')])
+ to_bytes(self.cipher_name,'utf-8', errors='strict')])
tmpdata = [header]
tmpdata += [b_data[i:i + 80] for i in range(0, len(b_data), 80)]
tmpdata += [b'']
@@ -278,7 +282,7 @@ class VaultLib:
tmpheader = tmpdata[0].strip().split(b';')
self.b_version = tmpheader[1].strip()
- self.cipher_name = to_unicode(tmpheader[2].strip())
+ self.cipher_name = to_text(tmpheader[2].strip())
clean_data = b''.join(tmpdata[1:])
return clean_data
@@ -306,7 +310,7 @@ class VaultEditor:
file_len = os.path.getsize(tmp_path)
- if file_len > 0: # avoid work when file was empty
+ if file_len > 0: # avoid work when file was empty
max_chunk_len = min(1024*1024*2, file_len)
passes = 3
@@ -321,7 +325,7 @@ class VaultEditor:
fh.write(data)
fh.write(data[:file_len % chunk_len])
- assert(fh.tell() == file_len) # FIXME remove this assert once we have unittests to check its accuracy
+ assert(fh.tell() == file_len) # FIXME remove this assert once we have unittests to check its accuracy
os.fsync(fh)
def _shred_file(self, tmp_path):
@@ -528,6 +532,7 @@ class VaultEditor:
return editor
+
class VaultFile(object):
def __init__(self, password, filename):
@@ -568,6 +573,7 @@ class VaultFile(object):
else:
return self.filename
+
########################################
# CIPHERS #
########################################
diff --git a/lib/ansible/parsing/yaml/constructor.py b/lib/ansible/parsing/yaml/constructor.py
index a67481a781..0a53fb1c7f 100644
--- a/lib/ansible/parsing/yaml/constructor.py
+++ b/lib/ansible/parsing/yaml/constructor.py
@@ -22,12 +22,11 @@ __metaclass__ = type
from yaml.constructor import Constructor, ConstructorError
from yaml.nodes import MappingNode
+from ansible.module_utils._text import to_bytes
+from ansible.parsing.vault import VaultLib
from ansible.parsing.yaml.objects import AnsibleMapping, AnsibleSequence, AnsibleUnicode
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
-
from ansible.vars.unsafe_proxy import wrap_var
-from ansible.parsing.vault import VaultLib
-from ansible.utils.unicode import to_bytes
try:
from __main__ import display
@@ -74,7 +73,8 @@ class AnsibleConstructor(Constructor):
"found unacceptable key (%s)" % exc, key_node.start_mark)
if key in mapping:
- display.warning(u'While constructing a mapping from {1}, line {2}, column {3}, found a duplicate dict key ({0}). Using last defined value only.'.format(key, *mapping.ansible_pos))
+ display.warning(u'While constructing a mapping from {1}, line {2}, column {3}, found a duplicate dict key ({0}).'
+ u' Using last defined value only.'.format(key, *mapping.ansible_pos))
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
diff --git a/lib/ansible/parsing/yaml/objects.py b/lib/ansible/parsing/yaml/objects.py
index 60553d5cb4..ea0b274c3d 100644
--- a/lib/ansible/parsing/yaml/objects.py
+++ b/lib/ansible/parsing/yaml/objects.py
@@ -22,8 +22,7 @@ __metaclass__ = type
import yaml
from ansible.compat.six import text_type
-from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
class AnsibleBaseYAMLObject(object):
diff --git a/lib/ansible/playbook/base.py b/lib/ansible/playbook/base.py
index 60cda79070..77fd6890b5 100644
--- a/lib/ansible/playbook/base.py
+++ b/lib/ansible/playbook/base.py
@@ -19,25 +19,22 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import collections
import itertools
import operator
import uuid
-from copy import copy as shallowcopy, deepcopy
+from copy import copy as shallowcopy
from functools import partial
-from inspect import getmembers
-
-from ansible.compat.six import iteritems, string_types, with_metaclass
from jinja2.exceptions import UndefinedError
+from ansible.compat.six import iteritems, string_types, with_metaclass
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable
-from ansible.parsing.dataloader import DataLoader
+from ansible.module_utils._text import to_text
from ansible.playbook.attribute import Attribute, FieldAttribute
+from ansible.parsing.dataloader import DataLoader
from ansible.utils.boolean import boolean
from ansible.utils.vars import combine_vars, isidentifier
-from ansible.utils.unicode import to_unicode
try:
from __main__ import display
@@ -52,6 +49,7 @@ def _generic_g(prop_name, self):
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
+
def _generic_g_method(prop_name, self):
try:
if self._squashed:
@@ -61,6 +59,7 @@ def _generic_g_method(prop_name, self):
except KeyError:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, prop_name))
+
def _generic_g_parent(prop_name, self):
try:
value = self._attributes[prop_name]
@@ -74,12 +73,15 @@ def _generic_g_parent(prop_name, self):
return value
+
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
+
def _generic_d(prop_name, self):
del self._attributes[prop_name]
+
class BaseMeta(type):
"""
@@ -142,6 +144,7 @@ class BaseMeta(type):
return super(BaseMeta, cls).__new__(cls, name, parents, dct)
+
class Base(with_metaclass(BaseMeta, object)):
# connection/transport
@@ -376,7 +379,7 @@ class Base(with_metaclass(BaseMeta, object)):
# and make sure the attribute is of the type it should be
if value is not None:
if attribute.isa == 'string':
- value = to_unicode(value)
+ value = to_text(value)
elif attribute.isa == 'int':
value = int(value)
elif attribute.isa == 'float':
@@ -395,8 +398,8 @@ class Base(with_metaclass(BaseMeta, object)):
elif not isinstance(value, list):
if isinstance(value, string_types) and attribute.isa == 'barelist':
display.deprecated(
- "Using comma separated values for a list has been deprecated. " \
- "You should instead use the correct YAML syntax for lists. " \
+ "Using comma separated values for a list has been deprecated. "
+ "You should instead use the correct YAML syntax for lists. "
)
value = value.split(',')
else:
@@ -532,4 +535,3 @@ class Base(with_metaclass(BaseMeta, object)):
setattr(self, '_uuid', data.get('uuid'))
self._finalized = data.get('finalized', False)
self._squashed = data.get('squashed', False)
-
diff --git a/lib/ansible/playbook/task.py b/lib/ansible/playbook/task.py
index f5bd653314..e987045438 100644
--- a/lib/ansible/playbook/task.py
+++ b/lib/ansible/playbook/task.py
@@ -22,12 +22,10 @@ __metaclass__ = type
import os
from ansible.compat.six import iteritems, string_types
-
from ansible.errors import AnsibleError, AnsibleParserError
-
+from ansible.module_utils._text import to_native
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping, AnsibleUnicode
-
from ansible.plugins import lookup_loader
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
@@ -38,7 +36,6 @@ from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
-from ansible.utils.unicode import to_str
try:
from __main__ import display
@@ -182,7 +179,7 @@ class Task(Base, Conditional, Taggable, Become):
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
- raise AnsibleParserError(to_str(e), obj=ds)
+ raise AnsibleParserError(to_native(e), obj=ds)
# the command/shell/script modules used to support the `cmd` arg,
# which corresponds to what we now call _raw_params, so move that
@@ -232,11 +229,11 @@ class Task(Base, Conditional, Taggable, Become):
def _load_loop_control(self, attr, ds):
if not isinstance(ds, dict):
- raise AnsibleParserError(
- "the `loop_control` value must be specified as a dictionary and cannot " \
- "be a variable itself (though it can contain variables)",
- obj=ds,
- )
+ raise AnsibleParserError(
+ "the `loop_control` value must be specified as a dictionary and cannot "
+ "be a variable itself (though it can contain variables)",
+ obj=ds,
+ )
return LoopControl.load(data=ds, variable_manager=self._variable_manager, loader=self._loader)
@@ -267,18 +264,18 @@ class Task(Base, Conditional, Taggable, Become):
return dict()
elif isinstance(value, list):
- if len(value) == 1:
+ if len(value) == 1:
return templar.template(value[0], convert_bare=True)
else:
env = []
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
- env[env_item] = templar.template(env_item, convert_bare=True)
+ env[env_item] = templar.template(env_item, convert_bare=True)
elif isinstance(value, dict):
env = dict()
for env_item in value:
if isinstance(env_item, (string_types, AnsibleUnicode)) and env_item in templar._available_variables.keys():
- env[env_item] = templar.template(value[env_item], convert_bare=True)
+ env[env_item] = templar.template(value[env_item], convert_bare=True)
# at this point it should be a simple string
return templar.template(value, convert_bare=True)
@@ -436,7 +433,7 @@ class Task(Base, Conditional, Taggable, Become):
'''
path_stack = []
- dep_chain = self.get_dep_chain()
+ dep_chain = self.get_dep_chain()
# inside role: add the dependency chain from current to dependant
if dep_chain:
path_stack.extend(reversed([x._role_path for x in dep_chain]))
@@ -452,4 +449,3 @@ class Task(Base, Conditional, Taggable, Become):
if self._parent:
return self._parent.all_parents_static()
return True
-
diff --git a/lib/ansible/plugins/__init__.py b/lib/ansible/plugins/__init__.py
index 0a2a0b1474..f5a6f112a8 100644
--- a/lib/ansible/plugins/__init__.py
+++ b/lib/ansible/plugins/__init__.py
@@ -31,7 +31,8 @@ import warnings
from collections import defaultdict
from ansible import constants as C
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
+
try:
from __main__ import display
@@ -44,9 +45,11 @@ MODULE_CACHE = {}
PATH_CACHE = {}
PLUGIN_PATH_CACHE = {}
+
def get_all_plugin_loaders():
return [(name, obj) for (name, obj) in inspect.getmembers(sys.modules[__name__]) if isinstance(obj, PluginLoader)]
+
class PluginLoader:
'''
@@ -72,11 +75,11 @@ class PluginLoader:
self.config = config
- if not class_name in MODULE_CACHE:
+ if class_name not in MODULE_CACHE:
MODULE_CACHE[class_name] = {}
- if not class_name in PATH_CACHE:
+ if class_name not in PATH_CACHE:
PATH_CACHE[class_name] = None
- if not class_name in PLUGIN_PATH_CACHE:
+ if class_name not in PLUGIN_PATH_CACHE:
PLUGIN_PATH_CACHE[class_name] = defaultdict(dict)
self._module_cache = MODULE_CACHE[class_name]
@@ -140,9 +143,9 @@ class PluginLoader:
results = []
results.append(dir)
for root, subdirs, files in os.walk(dir, followlinks=True):
- if '__init__.py' in files:
- for x in subdirs:
- results.append(os.path.join(root,x))
+ if '__init__.py' in files:
+ for x in subdirs:
+ results.append(os.path.join(root,x))
return results
def _get_package_paths(self):
@@ -250,7 +253,7 @@ class PluginLoader:
try:
full_paths = (os.path.join(path, f) for f in os.listdir(path))
except OSError as e:
- display.warning("Error accessing plugin paths: %s" % to_unicode(e))
+ display.warning("Error accessing plugin paths: %s" % to_text(e))
for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
full_name = os.path.basename(full_path)
@@ -358,7 +361,7 @@ class PluginLoader:
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
msg = 'Loading %s \'%s\' from %s' % (class_name, os.path.basename(name), path)
-
+
if len(searched_paths) > 1:
msg = '%s (searched paths: %s)' % (msg, self.format_paths(searched_paths))
@@ -389,7 +392,7 @@ class PluginLoader:
try:
obj = getattr(self._module_cache[path], self.class_name)
except AttributeError as e:
- display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_unicode(e)))
+ display.warning("Skipping plugin (%s) as it seems to be invalid: %s" % (path, to_text(e)))
continue
if self.base_class:
@@ -398,11 +401,11 @@ class PluginLoader:
module = __import__(self.package, fromlist=[self.base_class])
# Check whether this obj has the required base class.
try:
- plugin_class = getattr(module, self.base_class)
+ plugin_class = getattr(module, self.base_class)
except AttributeError:
- continue
+ continue
if not issubclass(obj, plugin_class):
- continue
+ continue
self._display_plugin_load(self.class_name, name, self._searched_paths, path,
found_in_cache=found_in_cache, class_only=class_only)
diff --git a/lib/ansible/plugins/action/__init__.py b/lib/ansible/plugins/action/__init__.py
index ef9920033d..efe0e869cb 100644
--- a/lib/ansible/plugins/action/__init__.py
+++ b/lib/ansible/plugins/action/__init__.py
@@ -35,9 +35,10 @@ from ansible.compat.six import binary_type, text_type, iteritems, with_metaclass
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.executor.module_common import modify_module
-from ansible.release import __version__
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.utils.jsonify import jsonify
-from ansible.utils.unicode import to_bytes, to_str, to_unicode
+from ansible.release import __version__
+
try:
from __main__ import display
@@ -86,7 +87,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
* Module parameters. These are stored in self._task.args
"""
# store the module invocation details into the results
- results = {}
+ results = {}
if self._task.async == 0:
results['invocation'] = dict(
module_name = self._task.action,
@@ -146,7 +147,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
"run 'git submodule update --init --recursive' to correct this problem." % (module_name))
# insert shared code and arguments into the module
- (module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args, task_vars=task_vars, module_compression=self._play_context.module_compression)
+ (module_data, module_style, module_shebang) = modify_module(module_name, module_path, module_args,
+ task_vars=task_vars, module_compression=self._play_context.module_compression)
return (module_style, module_shebang, module_data, module_path)
@@ -283,10 +285,10 @@ class ActionBase(with_metaclass(ABCMeta, object)):
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'wb')
try:
- data = to_bytes(data, errors='strict')
+ data = to_bytes(data, errors='surrogate_or_strict')
afo.write(data)
except Exception as e:
- raise AnsibleError("failure writing module data to temporary file for transfer: %s" % str(e))
+ raise AnsibleError("failure writing module data to temporary file for transfer: %s" % to_native(e))
afo.flush()
afo.close()
@@ -372,17 +374,22 @@ class ActionBase(with_metaclass(ABCMeta, object)):
res = self._remote_chown(remote_paths, self._play_context.become_user)
if res['rc'] != 0 and remote_user == 'root':
# chown failed even if remove_user is root
- raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. Unprivileged become user would be unable to read the file.')
+ raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root.'
+ ' Unprivileged become user would be unable to read the file.')
elif res['rc'] != 0:
if C.ALLOW_WORLD_READABLE_TMPFILES:
# chown and fs acls failed -- do things this insecure
# way only if the user opted in in the config file
- display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user which may be insecure. For information on securing this, see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
+ display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user.'
+ ' This may be insecure. For information on securing this, see'
+ ' https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user')
res = self._remote_chmod(remote_paths, 'a+%s' % mode)
if res['rc'] != 0:
raise AnsibleError('Failed to set file mode on remote files (rc: {0}, err: {1})'.format(res['rc'], res['stderr']))
else:
- raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user (rc: {0}, err: {1}). For information on working around this, see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], res['stderr']))
+ raise AnsibleError('Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user'
+ ' (rc: {0}, err: {1}). For information on working around this,'
+ ' see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user'.format(res['rc'], res['stderr']))
elif execute:
# Can't depend on the file being transferred with execute
# permissions. Only need user perms because no become was
@@ -438,7 +445,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
mystat['stat']['checksum'] = '1'
# happens sometimes when it is a dir and not on bsd
- if not 'checksum' in mystat['stat']:
+ if 'checksum' not in mystat['stat']:
mystat['stat']['checksum'] = ''
return mystat['stat']
@@ -453,26 +460,25 @@ class ActionBase(with_metaclass(ABCMeta, object)):
3 = its a directory, not a file
4 = stat module failed, likely due to not finding python
'''
- x = "0" # unknown error has occured
+ x = "0" # unknown error has occured
try:
remote_stat = self._execute_remote_stat(path, all_vars, follow=follow)
if remote_stat['exists'] and remote_stat['isdir']:
- x = "3" # its a directory not a file
+ x = "3" # its a directory not a file
else:
- x = remote_stat['checksum'] # if 1, file is missing
+ x = remote_stat['checksum'] # if 1, file is missing
except AnsibleError as e:
- errormsg = to_unicode(e)
- if errormsg.endswith('Permission denied'):
- x = "2" # cannot read file
- elif errormsg.endswith('MODULE FAILURE'):
- x = "4" # python not found or module uncaught exception
+ errormsg = to_text(e)
+ if errormsg.endswith(u'Permission denied'):
+ x = "2" # cannot read file
+ elif errormsg.endswith(u'MODULE FAILURE'):
+ x = "4" # python not found or module uncaught exception
finally:
return x
-
def _remote_expand_user(self, path):
''' takes a remote path and performs tilde expansion on the remote host '''
- if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
+ if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
return path
# FIXME: Can't use os.path.sep for Windows paths.
@@ -681,7 +687,8 @@ class ActionBase(with_metaclass(ABCMeta, object)):
tmp_rm_res = self._low_level_execute_command(tmp_rm_cmd, sudoable=False)
tmp_rm_data = self._parse_returned_data(tmp_rm_res)
if tmp_rm_data.get('rc', 0) != 0:
- display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'), tmp_rm_res.get('stderr', 'No error string available.')))
+ display.warning('Error deleting remote temporary files (rc: {0}, stderr: {1})'.format(tmp_rm_res.get('rc'),
+ tmp_rm_res.get('stderr', 'No error string available.')))
# parse the main result
data = self._parse_returned_data(res)
@@ -709,7 +716,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
data['exception'] = res['stderr']
return data
- def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='replace'):
+ def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=None, encoding_errors='surrogate_or_replace'):
'''
This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to
@@ -758,16 +765,16 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance(stdout, binary_type):
- out = to_unicode(stdout, errors=encoding_errors)
+ out = to_text(stdout, errors=encoding_errors)
elif not isinstance(stdout, text_type):
- out = to_unicode(b''.join(stdout.readlines()), errors=encoding_errors)
+ out = to_text(b''.join(stdout.readlines()), errors=encoding_errors)
else:
out = stdout
if isinstance(stderr, binary_type):
- err = to_unicode(stderr, errors=encoding_errors)
+ err = to_text(stderr, errors=encoding_errors)
elif not isinstance(stderr, text_type):
- err = to_unicode(b''.join(stderr.readlines()), errors=encoding_errors)
+ err = to_text(b''.join(stderr.readlines()), errors=encoding_errors)
else:
err = stderr
@@ -871,7 +878,6 @@ class ActionBase(with_metaclass(ABCMeta, object)):
result = self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
if result is None:
- raise AnsibleError("Unable to find '%s' in expected paths." % to_str(needle))
+ raise AnsibleError("Unable to find '%s' in expected paths." % to_native(needle))
return result
-
diff --git a/lib/ansible/plugins/action/assemble.py b/lib/ansible/plugins/action/assemble.py
index 4d1f7a6cc0..f02b9bfa25 100644
--- a/lib/ansible/plugins/action/assemble.py
+++ b/lib/ansible/plugins/action/assemble.py
@@ -24,10 +24,10 @@ import tempfile
import re
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_native, to_text
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
from ansible.utils.hashing import checksum_s
-from ansible.utils.unicode import to_str, to_unicode
class ActionModule(ActionBase):
@@ -42,7 +42,7 @@ class ActionModule(ActionBase):
delimit_me = False
add_newline = False
- for f in (to_unicode(p, errors='strict') for p in sorted(os.listdir(src_path))):
+ for f in (to_text(p, errors='surrogate_or_strict') for p in sorted(os.listdir(src_path))):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = u"%s/%s" % (src_path, f)
@@ -114,7 +114,7 @@ class ActionModule(ActionBase):
src = self._find_needle('files', src)
except AnsibleError as e:
result['failed'] = True
- result['msg'] = to_str(e)
+ result['msg'] = to_native(e)
return result
if not os.path.isdir(src):
diff --git a/lib/ansible/plugins/action/async.py b/lib/ansible/plugins/action/async.py
index 5e1bc467d2..6c348c7d9f 100644
--- a/lib/ansible/plugins/action/async.py
+++ b/lib/ansible/plugins/action/async.py
@@ -22,9 +22,10 @@ import pipes
import random
from ansible import constants as C
-from ansible.plugins.action import ActionBase
from ansible.compat.six import iteritems
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
+from ansible.plugins.action import ActionBase
+
class ActionModule(ActionBase):
@@ -76,7 +77,7 @@ class ActionModule(ActionBase):
elif module_style == 'old':
args_data = ""
for k, v in iteritems(module_args):
- args_data += '%s="%s" ' % (k, pipes.quote(to_unicode(v)))
+ args_data += '%s="%s" ' % (k, pipes.quote(to_text(v)))
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), args_data)
remote_paths = tmp, remote_module_path, remote_async_module_path
@@ -100,7 +101,7 @@ class ActionModule(ActionBase):
if not self._should_remove_tmp_path(tmp):
async_cmd.append("-preserve_tmp")
- async_cmd = " ".join([to_unicode(x) for x in async_cmd])
+ async_cmd = " ".join(to_text(x) for x in async_cmd)
result.update(self._low_level_execute_command(cmd=async_cmd))
result['changed'] = True
diff --git a/lib/ansible/plugins/action/copy.py b/lib/ansible/plugins/action/copy.py
index 140c714c27..f71ecadc77 100644
--- a/lib/ansible/plugins/action/copy.py
+++ b/lib/ansible/plugins/action/copy.py
@@ -24,10 +24,10 @@ import os
import tempfile
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
from ansible.utils.hashing import checksum
-from ansible.utils.unicode import to_bytes, to_str, to_unicode
class ActionModule(ActionBase):
@@ -81,7 +81,7 @@ class ActionModule(ActionBase):
source = content_tempfile
except Exception as err:
result['failed'] = True
- result['msg'] = "could not write content temp file: %s" % to_str(err)
+ result['msg'] = "could not write content temp file: %s" % to_native(err)
return result
# if we have first_available_file in our vars
@@ -91,19 +91,19 @@ class ActionModule(ActionBase):
elif remote_src:
result.update(self._execute_module(module_name='copy', module_args=self._task.args, task_vars=task_vars, delete_remote_tmp=False))
return result
- else: # find in expected paths
+ else: # find in expected paths
try:
source = self._find_needle('files', source)
except AnsibleError as e:
result['failed'] = True
- result['msg'] = to_unicode(e)
+ result['msg'] = to_text(e)
return result
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
source_files = []
# If source is a directory populate our list else source is a file and translate it to a tuple.
- if os.path.isdir(to_bytes(source, errors='strict')):
+ if os.path.isdir(to_bytes(source, errors='surrogate_or_strict')):
# Get the amount of spaces to remove to get the relative path.
if source_trailing_slash:
sz = len(source)
@@ -113,7 +113,7 @@ class ActionModule(ActionBase):
# Walk the directory and append the file tuples to source_files.
for base_path, sub_folders, files in os.walk(to_bytes(source)):
for file in files:
- full_path = to_unicode(os.path.join(base_path, file), errors='strict')
+ full_path = to_text(os.path.join(base_path, file), errors='surrogate_or_strict')
rel_path = full_path[sz:]
if rel_path.startswith('/'):
rel_path = rel_path[1:]
@@ -247,7 +247,9 @@ class ActionModule(ActionBase):
if 'content' in new_module_args:
del new_module_args['content']
- module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+ module_return = self._execute_module(module_name='copy',
+ module_args=new_module_args, task_vars=task_vars,
+ tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True
else:
@@ -272,7 +274,9 @@ class ActionModule(ActionBase):
)
# Execute the file module.
- module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+ module_return = self._execute_module(module_name='file',
+ module_args=new_module_args, task_vars=task_vars,
+ tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True
if not module_return.get('checksum'):
diff --git a/lib/ansible/plugins/action/debug.py b/lib/ansible/plugins/action/debug.py
index aa2bee4768..f9f87adef5 100644
--- a/lib/ansible/plugins/action/debug.py
+++ b/lib/ansible/plugins/action/debug.py
@@ -20,8 +20,8 @@ __metaclass__ = type
from ansible.compat.six import string_types
from ansible.errors import AnsibleUndefinedVariable
+from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
-from ansible.utils.unicode import to_unicode
class ActionModule(ActionBase):
@@ -66,7 +66,7 @@ class ActionModule(ActionBase):
if isinstance(self._task.args['var'], (list, dict)):
# If var is a list or dict, use the type as key to display
- result[to_unicode(type(self._task.args['var']))] = results
+ result[to_text(type(self._task.args['var']))] = results
else:
result[self._task.args['var']] = results
else:
diff --git a/lib/ansible/plugins/action/fetch.py b/lib/ansible/plugins/action/fetch.py
index 693c8ddc8d..69cec30742 100644
--- a/lib/ansible/plugins/action/fetch.py
+++ b/lib/ansible/plugins/action/fetch.py
@@ -21,11 +21,11 @@ import os
import base64
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_bytes
class ActionModule(ActionBase):
@@ -160,7 +160,7 @@ class ActionModule(ActionBase):
self._connection.fetch_file(source, dest)
else:
try:
- f = open(to_bytes(dest, errors='strict'), 'w')
+ f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
f.write(remote_data)
f.close()
except (IOError, OSError) as e:
diff --git a/lib/ansible/plugins/action/include_vars.py b/lib/ansible/plugins/action/include_vars.py
index b1867d48f5..97c9a8c68d 100644
--- a/lib/ansible/plugins/action/include_vars.py
+++ b/lib/ansible/plugins/action/include_vars.py
@@ -22,8 +22,8 @@ from os import path, walk
import re
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_native
from ansible.plugins.action import ActionBase
-from ansible.utils.unicode import to_str
class ActionModule(ActionBase):
@@ -137,7 +137,7 @@ class ActionModule(ActionBase):
results.update(updated_results)
except AnsibleError as e:
- err_msg = to_str(e)
+ err_msg = to_native(e)
raise AnsibleError(err_msg)
if self.return_results_as_name:
diff --git a/lib/ansible/plugins/action/net_config.py b/lib/ansible/plugins/action/net_config.py
index 216320bfc4..5cb3e646b9 100644
--- a/lib/ansible/plugins/action/net_config.py
+++ b/lib/ansible/plugins/action/net_config.py
@@ -26,10 +26,12 @@ import glob
import urlparse
from ansible.plugins.action import ActionBase
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
+
PRIVATE_KEYS_RE = re.compile('__.+__')
+
class ActionModule(ActionBase):
TRANSFERS_FILES = False
@@ -56,7 +58,6 @@ class ActionModule(ActionBase):
result['__backup__'])
result['backup_path'] = filepath
-
# strip out any keys that have two leading and two trailing
# underscore characters
for key in result.keys():
@@ -98,7 +99,7 @@ class ActionModule(ActionBase):
try:
with open(source, 'r') as f:
- template_data = to_unicode(f.read())
+ template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
@@ -114,5 +115,3 @@ class ActionModule(ActionBase):
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
-
-
diff --git a/lib/ansible/plugins/action/net_template.py b/lib/ansible/plugins/action/net_template.py
index 1561e7e215..f76506aaae 100644
--- a/lib/ansible/plugins/action/net_template.py
+++ b/lib/ansible/plugins/action/net_template.py
@@ -19,18 +19,18 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import sys
import os
import time
import glob
import urlparse
+from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
-from ansible.utils.boolean import boolean
-from ansible.utils.unicode import to_unicode
+
BOOLEANS = ('true', 'false', 'yes', 'no')
+
class ActionModule(ActionBase):
TRANSFERS_FILES = False
@@ -92,7 +92,7 @@ class ActionModule(ActionBase):
try:
with open(source, 'r') as f:
- template_data = to_unicode(f.read())
+ template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
@@ -108,5 +108,3 @@ class ActionModule(ActionBase):
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
-
-
diff --git a/lib/ansible/plugins/action/patch.py b/lib/ansible/plugins/action/patch.py
index db1a2eaba7..94bcf67746 100644
--- a/lib/ansible/plugins/action/patch.py
+++ b/lib/ansible/plugins/action/patch.py
@@ -20,10 +20,10 @@ __metaclass__ = type
import os
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_native
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
-from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_str
class ActionModule(ActionBase):
@@ -52,7 +52,7 @@ class ActionModule(ActionBase):
src = self._find_needle('files', src)
except AnsibleError as e:
result['failed'] = True
- result['msg'] = to_str(e)
+ result['msg'] = to_native(e)
return result
# create the remote tmp dir if needed, and put the source file there
diff --git a/lib/ansible/plugins/action/script.py b/lib/ansible/plugins/action/script.py
index 286142fafc..44cbff02e1 100644
--- a/lib/ansible/plugins/action/script.py
+++ b/lib/ansible/plugins/action/script.py
@@ -19,15 +19,14 @@ __metaclass__ = type
import os
-from ansible.plugins.action import ActionBase
from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_str
+from ansible.module_utils._text import to_native
+from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
TRANSFERS_FILES = True
-
def run(self, tmp=None, task_vars=None):
''' handler for file transfer operations '''
if task_vars is None:
@@ -74,7 +73,7 @@ class ActionModule(ActionBase):
try:
source = self._loader.get_real_file(self._find_needle('files', source))
except AnsibleError as e:
- return dict(failed=True, msg=to_str(e))
+ return dict(failed=True, msg=to_native(e))
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source))
diff --git a/lib/ansible/plugins/action/template.py b/lib/ansible/plugins/action/template.py
index 895ac835c8..1daca6073e 100644
--- a/lib/ansible/plugins/action/template.py
+++ b/lib/ansible/plugins/action/template.py
@@ -23,12 +23,11 @@ import pwd
import time
from ansible import constants as C
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum_s
from ansible.utils.boolean import boolean
-from ansible.utils.unicode import to_bytes, to_unicode, to_str
-from ansible.errors import AnsibleError
-
class ActionModule(ActionBase):
@@ -78,7 +77,7 @@ class ActionModule(ActionBase):
source = self._find_needle('templates', source)
except AnsibleError as e:
result['failed'] = True
- result['msg'] = to_str(e)
+ result['msg'] = to_native(e)
if 'failed' in result:
return result
@@ -96,7 +95,7 @@ class ActionModule(ActionBase):
b_source = to_bytes(source)
try:
with open(b_source, 'r') as f:
- template_data = to_unicode(f.read())
+ template_data = to_text(f.read())
try:
template_uid = pwd.getpwuid(os.stat(b_source).st_uid).pw_name
@@ -163,7 +162,7 @@ class ActionModule(ActionBase):
if self._play_context.diff:
diff = self._get_diff_data(dest, resultant, task_vars, source_file=False)
- if not self._play_context.check_mode: # do actual work thorugh copy
+ if not self._play_context.check_mode: # do actual work through copy
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
# fix file permissions when the copy is done as a different user
@@ -176,7 +175,7 @@ class ActionModule(ActionBase):
dest=dest,
original_basename=os.path.basename(source),
follow=True,
- ),
+ ),
)
result.update(self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False))
diff --git a/lib/ansible/plugins/action/unarchive.py b/lib/ansible/plugins/action/unarchive.py
index 5884cb4a95..70cde2e97d 100644
--- a/lib/ansible/plugins/action/unarchive.py
+++ b/lib/ansible/plugins/action/unarchive.py
@@ -20,10 +20,11 @@ __metaclass__ = type
import os
+from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_native
from ansible.plugins.action import ActionBase
from ansible.utils.boolean import boolean
-from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_str
+
class ActionModule(ActionBase):
@@ -82,7 +83,7 @@ class ActionModule(ActionBase):
source = self._loader.get_real_file(self._find_needle('files', source))
except AnsibleError as e:
result['failed'] = True
- result['msg'] = to_str(e)
+ result['msg'] = to_native(e)
self._remove_tmp_path(tmp)
return result
diff --git a/lib/ansible/plugins/action/win_reboot.py b/lib/ansible/plugins/action/win_reboot.py
index ea2021be70..48056c32db 100644
--- a/lib/ansible/plugins/action/win_reboot.py
+++ b/lib/ansible/plugins/action/win_reboot.py
@@ -19,26 +19,24 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.plugins.action import ActionBase
-from ansible.utils.boolean import boolean
-from ansible.utils.unicode import to_unicode
-from ansible.errors import AnsibleUndefinedVariable
-
import socket
import time
-import traceback
from datetime import datetime, timedelta
+from ansible.plugins.action import ActionBase
+
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
+
class TimedOutException(Exception):
pass
+
class ActionModule(ActionBase):
TRANSFERS_FILES = False
@@ -50,7 +48,7 @@ class ActionModule(ActionBase):
def do_until_success_or_timeout(self, what, timeout_sec, what_desc, fail_sleep_sec=1):
max_end_time = datetime.utcnow() + timedelta(seconds=timeout_sec)
-
+
while datetime.utcnow() < max_end_time:
try:
what()
@@ -82,7 +80,7 @@ class ActionModule(ActionBase):
winrm_port = self._connection._winrm_port
result = super(ActionModule, self).run(tmp, task_vars)
-
+
# initiate reboot
(rc, stdout, stderr) = self._connection.exec_command("shutdown /r /t %d" % pre_reboot_delay_sec)
@@ -92,7 +90,7 @@ class ActionModule(ActionBase):
result['msg'] = "Shutdown command failed, error text was %s" % stderr
return result
- def raise_if_port_open():
+ def raise_if_port_open():
try:
sock = socket.create_connection((winrm_host, winrm_port), connect_timeout_sec)
sock.close()
@@ -137,4 +135,3 @@ class ActionModule(ActionBase):
result['msg'] = toex.message
return result
-
diff --git a/lib/ansible/plugins/cache/jsonfile.py b/lib/ansible/plugins/cache/jsonfile.py
index 1ee183e094..f2f42f3cc2 100644
--- a/lib/ansible/plugins/cache/jsonfile.py
+++ b/lib/ansible/plugins/cache/jsonfile.py
@@ -31,9 +31,9 @@ except ImportError:
from ansible import constants as C
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
from ansible.parsing.utils.jsonify import jsonify
from ansible.plugins.cache.base import BaseCacheModule
-from ansible.utils.unicode import to_bytes
try:
from __main__ import display
diff --git a/lib/ansible/plugins/callback/__init__.py b/lib/ansible/plugins/callback/__init__.py
index 19ced6d49a..d02e5ac3be 100644
--- a/lib/ansible/plugins/callback/__init__.py
+++ b/lib/ansible/plugins/callback/__init__.py
@@ -24,12 +24,10 @@ import difflib
import warnings
from copy import deepcopy
-from ansible.compat.six import string_types
-
from ansible import constants as C
-from ansible.vars import strip_internal_keys
+from ansible.module_utils._text import to_text
from ansible.utils.color import stringc
-from ansible.utils.unicode import to_unicode
+from ansible.vars import strip_internal_keys
try:
from __main__ import display as global_display
@@ -37,14 +35,15 @@ except ImportError:
from ansible.utils.display import Display
global_display = Display()
-__all__ = ["CallbackBase"]
-
try:
from __main__ import cli
except ImportError:
- # using API w/o cli
+ # using API w/o cli
cli = False
+__all__ = ["CallbackBase"]
+
+
class CallbackBase:
'''
@@ -146,8 +145,8 @@ class CallbackBase:
after_header = "after: %s" % diff['after_header']
else:
after_header = 'after'
- differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True),
- to_unicode(diff['after']).splitlines(True),
+ differ = difflib.unified_diff(to_text(diff['before']).splitlines(True),
+ to_text(diff['after']).splitlines(True),
fromfile=before_header,
tofile=after_header,
fromfiledate='',
@@ -166,7 +165,7 @@ class CallbackBase:
if has_diff:
ret.append('\n')
if 'prepared' in diff:
- ret.append(to_unicode(diff['prepared']))
+ ret.append(to_text(diff['prepared']))
except UnicodeDecodeError:
ret.append(">> the files are different, but the diff library cannot compare unicode strings\n\n")
return u''.join(ret)
@@ -362,4 +361,3 @@ class CallbackBase:
def v2_runner_retry(self, result):
pass
-
diff --git a/lib/ansible/plugins/callback/junit.py b/lib/ansible/plugins/callback/junit.py
index 067c7551b0..9ce0dfa172 100644
--- a/lib/ansible/plugins/callback/junit.py
+++ b/lib/ansible/plugins/callback/junit.py
@@ -21,8 +21,8 @@ __metaclass__ = type
import os
import time
+from ansible.module_utils._text import to_bytes
from ansible.plugins.callback import CallbackBase
-from ansible.utils.unicode import to_bytes
try:
from junit_xml import TestSuite, TestCase
@@ -40,6 +40,7 @@ except ImportError:
except ImportError:
HAS_ORDERED_DICT = False
+
class CallbackModule(CallbackBase):
"""
This callback writes playbook output to a JUnit formatted XML file.
@@ -181,7 +182,7 @@ class CallbackModule(CallbackBase):
output_file = os.path.join(self._output_dir, '%s-%s.xml' % (self._playbook_name, time.time()))
with open(output_file, 'wb') as xml:
- xml.write(to_bytes(report, errors='strict'))
+ xml.write(to_bytes(report, errors='surrogate_or_strict'))
def v2_playbook_on_start(self, playbook):
self._playbook_path = playbook._file_name
diff --git a/lib/ansible/plugins/callback/log_plays.py b/lib/ansible/plugins/callback/log_plays.py
index 7b708a74ba..f82ebda9e1 100644
--- a/lib/ansible/plugins/callback/log_plays.py
+++ b/lib/ansible/plugins/callback/log_plays.py
@@ -23,9 +23,10 @@ import os
import time
import json
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.plugins.callback import CallbackBase
+
# NOTE: in Ansible 1.2 or later general logging is available without
# this plugin, just set ANSIBLE_LOG_PATH as an environment variable
# or log_path in the DEFAULTS section of your ansible configuration
diff --git a/lib/ansible/plugins/callback/mail.py b/lib/ansible/plugins/callback/mail.py
index 3169b4c6e3..65bdc5bbbe 100644
--- a/lib/ansible/plugins/callback/mail.py
+++ b/lib/ansible/plugins/callback/mail.py
@@ -25,9 +25,10 @@ import smtplib
import json
from ansible.compat.six import string_types
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
from ansible.plugins.callback import CallbackBase
+
def mail(subject='Ansible error mail', sender=None, to=None, cc=None, bcc=None, body=None, smtphost=None):
if sender is None:
@@ -84,7 +85,7 @@ class CallbackModule(CallbackBase):
if ignore_errors:
return
sender = '"Ansible: %s" <root>' % host
- attach = res._task.action
+ attach = res._task.action
if 'invocation' in res._result:
attach = "%s: %s" % (res._result['invocation']['module_name'], json.dumps(res._result['invocation']['module_args']))
diff --git a/lib/ansible/plugins/callback/tree.py b/lib/ansible/plugins/callback/tree.py
index f067197cbc..e403d99fd2 100644
--- a/lib/ansible/plugins/callback/tree.py
+++ b/lib/ansible/plugins/callback/tree.py
@@ -20,10 +20,10 @@ __metaclass__ = type
import os
+from ansible.constants import TREE_DIR
+from ansible.module_utils._text import to_bytes
from ansible.plugins.callback import CallbackBase
from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_bytes
-from ansible.constants import TREE_DIR
class CallbackModule(CallbackBase):
@@ -68,4 +68,3 @@ class CallbackModule(CallbackBase):
def v2_runner_on_unreachable(self, result):
self.result_to_tree(result)
-
diff --git a/lib/ansible/plugins/connection/__init__.py b/lib/ansible/plugins/connection/__init__.py
index edd6b498e8..43921822e4 100644
--- a/lib/ansible/plugins/connection/__init__.py
+++ b/lib/ansible/plugins/connection/__init__.py
@@ -1,4 +1,3 @@
-
# (c) 2015 Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
@@ -32,8 +31,9 @@ from ansible.compat.six import with_metaclass
from ansible import constants as C
from ansible.compat.six import string_types
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins import shell_loader
-from ansible.utils.unicode import to_bytes, to_unicode
+
try:
from __main__ import display
@@ -138,9 +138,9 @@ class ConnectionBase(with_metaclass(ABCMeta, object)):
# exception, it merely mangles the output:
# >>> shlex.split(u't e')
# ['t\x00\x00\x00', '\x00\x00\x00e\x00\x00\x00']
- return [to_unicode(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
+ return [to_text(x.strip()) for x in shlex.split(to_bytes(argstring)) if x.strip()]
except AttributeError:
- return [to_unicode(x.strip()) for x in shlex.split(argstring) if x.strip()]
+ return [to_text(x.strip()) for x in shlex.split(argstring) if x.strip()]
@abstractproperty
def transport(self):
diff --git a/lib/ansible/plugins/connection/accelerate.py b/lib/ansible/plugins/connection/accelerate.py
index 110e2bb2da..757350467a 100644
--- a/lib/ansible/plugins/connection/accelerate.py
+++ b/lib/ansible/plugins/connection/accelerate.py
@@ -27,10 +27,11 @@ import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleConnectionFailure
+from ansible.module_utils._text import to_bytes
from ansible.parsing.utils.jsonify import jsonify
from ansible.plugins.connection import ConnectionBase
from ansible.utils.encrypt import key_for_hostname, keyczar_encrypt, keyczar_decrypt
-from ansible.utils.unicode import to_bytes
+
try:
from __main__ import display
@@ -211,7 +212,7 @@ class Connection(ConnectionBase):
''' transfer a file from local to remote '''
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
- in_path = to_bytes(in_path, errors='strict')
+ in_path = to_bytes(in_path, errors='surrogate_or_strict')
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
@@ -265,7 +266,7 @@ class Connection(ConnectionBase):
if self.send_data(data):
raise AnsibleError("failed to initiate the file fetch with %s" % self._play_context.remote_addr)
- fh = open(to_bytes(out_path, errors='strict'), "w")
+ fh = open(to_bytes(out_path, errors='surrogate_or_strict'), "w")
try:
bytes = 0
while True:
diff --git a/lib/ansible/plugins/connection/chroot.py b/lib/ansible/plugins/connection/chroot.py
index d9f499a11c..d067027a91 100644
--- a/lib/ansible/plugins/connection/chroot.py
+++ b/lib/ansible/plugins/connection/chroot.py
@@ -28,9 +28,10 @@ import traceback
from ansible import constants as C
from ansible.errors import AnsibleError
-from ansible.plugins.connection import ConnectionBase, BUFSIZE
from ansible.module_utils.basic import is_executable
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
+from ansible.plugins.connection import ConnectionBase, BUFSIZE
+
try:
from __main__ import display
@@ -93,7 +94,7 @@ class Connection(ConnectionBase):
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
display.vvv("EXEC %s" % (local_cmd), host=self.chroot)
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -129,7 +130,7 @@ class Connection(ConnectionBase):
out_path = pipes.quote(self._prefix_login_path(out_path))
try:
- with open(to_bytes(in_path, errors='strict'), 'rb') as in_file:
+ with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
@@ -155,7 +156,7 @@ class Connection(ConnectionBase):
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
- with open(to_bytes(out_path, errors='strict'), 'wb+') as out_file:
+ with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
diff --git a/lib/ansible/plugins/connection/docker.py b/lib/ansible/plugins/connection/docker.py
index 7db04cba3e..f4b991ad51 100644
--- a/lib/ansible/plugins/connection/docker.py
+++ b/lib/ansible/plugins/connection/docker.py
@@ -35,8 +35,9 @@ from distutils.version import LooseVersion
import ansible.constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
-from ansible.utils.unicode import to_bytes
+
try:
from __main__ import display
@@ -196,7 +197,7 @@ class Connection(ConnectionBase):
local_cmd = self._build_exec_cmd([self._play_context.executable, '-c', cmd])
display.vvv("EXEC %s" % (local_cmd,), host=self._play_context.remote_addr)
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -223,7 +224,7 @@ class Connection(ConnectionBase):
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
out_path = self._prefix_login_path(out_path)
- if not os.path.exists(to_bytes(in_path, errors='strict')):
+ if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound(
"file or module does not exist: %s" % in_path)
@@ -233,8 +234,8 @@ class Connection(ConnectionBase):
# Although docker version 1.8 and later provide support, the
# owner and group of the files are always set to root
args = self._build_exec_cmd([self._play_context.executable, "-c", "dd of=%s bs=%s" % (out_path, BUFSIZE)])
- args = [to_bytes(i, errors='strict') for i in args]
- with open(to_bytes(in_path, errors='strict'), 'rb') as in_file:
+ args = [to_bytes(i, errors='surrogate_or_strict') for i in args]
+ with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = subprocess.Popen(args, stdin=in_file,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -256,7 +257,7 @@ class Connection(ConnectionBase):
out_dir = os.path.dirname(out_path)
args = [self.docker_cmd, "cp", "%s:%s" % (self._play_context.remote_addr, in_path), out_dir]
- args = [to_bytes(i, errors='strict') for i in args]
+ args = [to_bytes(i, errors='surrogate_or_strict') for i in args]
p = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
diff --git a/lib/ansible/plugins/connection/jail.py b/lib/ansible/plugins/connection/jail.py
index ac2cdf9891..93936a0a78 100644
--- a/lib/ansible/plugins/connection/jail.py
+++ b/lib/ansible/plugins/connection/jail.py
@@ -27,10 +27,9 @@ import pipes
import subprocess
import traceback
-from ansible import constants as C
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
-from ansible.utils.unicode import to_bytes
try:
from __main__ import display
@@ -117,7 +116,7 @@ class Connection(ConnectionBase):
local_cmd += [self.jail, self._play_context.executable, '-c', set_env + cmd]
display.vvv("EXEC %s" % (local_cmd,), host=self.jail)
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -153,7 +152,7 @@ class Connection(ConnectionBase):
out_path = pipes.quote(self._prefix_login_path(out_path))
try:
- with open(to_bytes(in_path, errors='strict'), 'rb') as in_file:
+ with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
@@ -179,7 +178,7 @@ class Connection(ConnectionBase):
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
- with open(to_bytes(out_path, errors='strict'), 'wb+') as out_file:
+ with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
diff --git a/lib/ansible/plugins/connection/libvirt_lxc.py b/lib/ansible/plugins/connection/libvirt_lxc.py
index 2b92e9c9dc..4ccb66f556 100644
--- a/lib/ansible/plugins/connection/libvirt_lxc.py
+++ b/lib/ansible/plugins/connection/libvirt_lxc.py
@@ -29,8 +29,9 @@ import traceback
from ansible import constants as C
from ansible.errors import AnsibleError
+from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
-from ansible.utils.unicode import to_bytes
+
try:
from __main__ import display
@@ -94,7 +95,7 @@ class Connection(ConnectionBase):
local_cmd += [self.lxc, '--', executable, '-c', cmd]
display.vvv("EXEC %s" % (local_cmd,), host=self.lxc)
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
@@ -130,7 +131,7 @@ class Connection(ConnectionBase):
out_path = pipes.quote(self._prefix_login_path(out_path))
try:
- with open(to_bytes(in_path, errors='strict'), 'rb') as in_file:
+ with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
@@ -156,7 +157,7 @@ class Connection(ConnectionBase):
except OSError:
raise AnsibleError("chroot connection requires dd command in the chroot")
- with open(to_bytes(out_path, errors='strict'), 'wb+') as out_file:
+ with open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
diff --git a/lib/ansible/plugins/connection/local.py b/lib/ansible/plugins/connection/local.py
index 0c7bb5ccb7..728522acda 100644
--- a/lib/ansible/plugins/connection/local.py
+++ b/lib/ansible/plugins/connection/local.py
@@ -30,8 +30,9 @@ from ansible.compat.six import text_type, binary_type
import ansible.constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes, to_native
from ansible.plugins.connection import ConnectionBase
-from ansible.utils.unicode import to_bytes, to_str
+
try:
from __main__ import display
@@ -122,14 +123,14 @@ class Connection(ConnectionBase):
super(Connection, self).put_file(in_path, out_path)
display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._play_context.remote_addr)
- if not os.path.exists(to_bytes(in_path, errors='strict')):
- raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_str(in_path)))
+ if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
+ raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_native(in_path)))
try:
- shutil.copyfile(to_bytes(in_path, errors='strict'), to_bytes(out_path, errors='strict'))
+ shutil.copyfile(to_bytes(in_path, errors='surrogate_or_strict'), to_bytes(out_path, errors='surrogate_or_strict'))
except shutil.Error:
- raise AnsibleError("failed to copy: {0} and {1} are the same".format(to_str(in_path), to_str(out_path)))
+ raise AnsibleError("failed to copy: {0} and {1} are the same".format(to_native(in_path), to_native(out_path)))
except IOError as e:
- raise AnsibleError("failed to transfer file to {0}: {1}".format(to_str(out_path), to_str(e)))
+ raise AnsibleError("failed to transfer file to {0}: {1}".format(to_native(out_path), to_native(e)))
def fetch_file(self, in_path, out_path):
''' fetch a file from local to local -- for copatibility '''
diff --git a/lib/ansible/plugins/connection/lxc.py b/lib/ansible/plugins/connection/lxc.py
index 36c87bb593..34cb66c484 100644
--- a/lib/ansible/plugins/connection/lxc.py
+++ b/lib/ansible/plugins/connection/lxc.py
@@ -24,10 +24,6 @@ import traceback
import select
import fcntl
import errno
-from ansible import errors
-from ansible import constants as C
-from ansible.plugins.connection import ConnectionBase
-from ansible.utils.unicode import to_bytes
HAS_LIBLXC = False
try:
@@ -36,6 +32,12 @@ try:
except ImportError:
pass
+from ansible import constants as C
+from ansible import errors
+from ansible.module_utils._text import to_bytes
+from ansible.plugins.connection import ConnectionBase
+
+
class Connection(ConnectionBase):
''' Local lxc based connections '''
@@ -102,8 +104,8 @@ class Connection(ConnectionBase):
''' run a command on the chroot '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
- executable = to_bytes(self._play_context.executable, errors='strict')
- local_cmd = [executable, '-c', to_bytes(cmd, errors='strict')]
+ executable = to_bytes(self._play_context.executable, errors='surrogate_or_strict')
+ local_cmd = [executable, '-c', to_bytes(cmd, errors='surrogate_or_strict')]
read_stdout, write_stdout = None, None
read_stderr, write_stderr = None, None
@@ -154,8 +156,8 @@ class Connection(ConnectionBase):
''' transfer a file from local to lxc '''
super(Connection, self).put_file(in_path, out_path)
self._display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.container_name)
- in_path = to_bytes(in_path, errors='strict')
- out_path = to_bytes(out_path, errors='strict')
+ in_path = to_bytes(in_path, errors='surrogate_or_strict')
+ out_path = to_bytes(out_path, errors='surrogate_or_strict')
if not os.path.exists(in_path):
msg = "file or module does not exist: %s" % in_path
@@ -182,8 +184,8 @@ class Connection(ConnectionBase):
''' fetch a file from lxc to local '''
super(Connection, self).fetch_file(in_path, out_path)
self._display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.container_name)
- in_path = to_bytes(in_path, errors='strict')
- out_path = to_bytes(out_path, errors='strict')
+ in_path = to_bytes(in_path, errors='surrogate_or_strict')
+ out_path = to_bytes(out_path, errors='surrogate_or_strict')
try:
dst_file = open(out_path, "wb")
diff --git a/lib/ansible/plugins/connection/lxd.py b/lib/ansible/plugins/connection/lxd.py
index 8e225c973c..d822bcfa21 100644
--- a/lib/ansible/plugins/connection/lxd.py
+++ b/lib/ansible/plugins/connection/lxd.py
@@ -23,8 +23,8 @@ from distutils.spawn import find_executable
from subprocess import call, Popen, PIPE
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes, to_text
from ansible.plugins.connection import ConnectionBase
-from ansible.utils.unicode import to_bytes, to_unicode
class Connection(ConnectionBase):
@@ -61,14 +61,14 @@ class Connection(ConnectionBase):
local_cmd = [self._lxc_cmd, "exec", self._host, "--", self._play_context.executable, "-c", cmd]
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
- in_data = to_bytes(in_data, errors='strict', nonstring='passthru')
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
+ in_data = to_bytes(in_data, errors='surrogate_or_strict', nonstring='passthru')
process = Popen(local_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
stdout, stderr = process.communicate(in_data)
- stdout = to_unicode(stdout)
- stderr = to_unicode(stderr)
+ stdout = to_text(stdout)
+ stderr = to_text(stderr)
if stderr == "error: Container is not running.\n":
raise AnsibleConnectionFailure("container not running: %s" % self._host)
@@ -84,12 +84,12 @@ class Connection(ConnectionBase):
self._display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self._host)
- if not os.path.isfile(to_bytes(in_path, errors='strict')):
+ if not os.path.isfile(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("input path is not a file: %s" % in_path)
local_cmd = [self._lxc_cmd, "file", "push", in_path, self._host + "/" + out_path]
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
@@ -101,7 +101,7 @@ class Connection(ConnectionBase):
local_cmd = [self._lxc_cmd, "file", "pull", self._host + "/" + in_path, out_path]
- local_cmd = [to_bytes(i, errors='strict') for i in local_cmd]
+ local_cmd = [to_bytes(i, errors='surrogate_or_strict') for i in local_cmd]
call(local_cmd)
diff --git a/lib/ansible/plugins/connection/paramiko_ssh.py b/lib/ansible/plugins/connection/paramiko_ssh.py
index 2f8226e35d..4fdf0fe4bd 100644
--- a/lib/ansible/plugins/connection/paramiko_ssh.py
+++ b/lib/ansible/plugins/connection/paramiko_ssh.py
@@ -44,7 +44,7 @@ from ansible.compat.six.moves import input
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
try:
from __main__ import display
@@ -52,6 +52,7 @@ except ImportError:
from ansible.utils.display import Display
display = Display()
+
AUTHENTICITY_MSG="""
paramiko: The authenticity of host '%s' can't be established.
The %s key fingerprint is %s.
@@ -273,7 +274,7 @@ class Connection(ConnectionBase):
display.vvv("EXEC %s" % cmd, host=self._play_context.remote_addr)
- cmd = to_bytes(cmd, errors='strict')
+ cmd = to_bytes(cmd, errors='surrogate_or_strict')
no_prompt_out = b''
no_prompt_err = b''
@@ -330,7 +331,7 @@ class Connection(ConnectionBase):
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self._play_context.remote_addr)
- if not os.path.exists(to_bytes(in_path, errors='strict')):
+ if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
try:
@@ -339,7 +340,7 @@ class Connection(ConnectionBase):
raise AnsibleError("failed to open a SFTP connection (%s)" % e)
try:
- self.sftp.put(to_bytes(in_path, errors='strict'), to_bytes(out_path, errors='strict'))
+ self.sftp.put(to_bytes(in_path, errors='surrogate_or_strict'), to_bytes(out_path, errors='surrogate_or_strict'))
except IOError:
raise AnsibleError("failed to transfer file to %s" % out_path)
@@ -365,7 +366,7 @@ class Connection(ConnectionBase):
raise AnsibleError("failed to open a SFTP connection (%s)", e)
try:
- self.sftp.get(to_bytes(in_path, errors='strict'), to_bytes(out_path, errors='strict'))
+ self.sftp.get(to_bytes(in_path, errors='surrogate_or_strict'), to_bytes(out_path, errors='surrogate_or_strict'))
except IOError:
raise AnsibleError("failed to transfer file from %s" % in_path)
diff --git a/lib/ansible/plugins/connection/ssh.py b/lib/ansible/plugins/connection/ssh.py
index def3f85403..fd99ccdef0 100644
--- a/lib/ansible/plugins/connection/ssh.py
+++ b/lib/ansible/plugins/connection/ssh.py
@@ -29,11 +29,11 @@ import subprocess
import time
from ansible import constants as C
+from ansible.compat.six import text_type, binary_type
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.plugins.connection import ConnectionBase
from ansible.utils.path import unfrackpath, makedirs_safe
-from ansible.utils.unicode import to_bytes, to_unicode, to_str
-from ansible.compat.six import text_type, binary_type
try:
from __main__ import display
@@ -107,7 +107,7 @@ class Connection(ConnectionBase):
explanation of why they were added.
"""
self._command += args
- display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(map(to_unicode, args)), host=self._play_context.remote_addr)
+ display.vvvvv('SSH: ' + explanation + ': (%s)' % ')('.join(map(to_text, args)), host=self._play_context.remote_addr)
def _build_command(self, binary, *other_args):
'''
@@ -222,7 +222,7 @@ class Connection(ConnectionBase):
# The directory must exist and be writable.
makedirs_safe(b_cpdir, 0o700)
if not os.access(b_cpdir, os.W_OK):
- raise AnsibleError("Cannot write to ControlPath %s" % to_str(cpdir))
+ raise AnsibleError("Cannot write to ControlPath %s" % to_native(cpdir))
args = ("-o", "ControlPath=" + C.ANSIBLE_SSH_CONTROL_PATH % dict(directory=cpdir))
self._add_args("found only ControlPersist; added ControlPath", args)
@@ -275,7 +275,7 @@ class Connection(ConnectionBase):
output = []
for b_line in b_chunk.splitlines(True):
- display_line = to_unicode(b_line, errors='replace').rstrip('\r\n')
+ display_line = to_text(b_line).rstrip('\r\n')
suppress_output = False
#display.debug("Examining line (source=%s, state=%s): '%s'" % (source, state, display_line))
@@ -314,7 +314,7 @@ class Connection(ConnectionBase):
Starts the command and communicates with it until it ends.
'''
- display_cmd = list(map(pipes.quote, map(to_unicode, cmd)))
+ display_cmd = list(map(pipes.quote, map(to_text, cmd)))
display.vvv(u'SSH: EXEC {0}'.format(u' '.join(display_cmd)), host=self.host)
# Start the given command. If we don't need to pipeline data, we can try
@@ -424,7 +424,7 @@ class Connection(ConnectionBase):
if p.poll() is not None:
break
self._terminate_process(p)
- raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, to_str(b_stdout)))
+ raise AnsibleError('Timeout (%ds) waiting for privilege escalation prompt: %s' % (timeout, to_native(b_stdout)))
# Read whatever output is available on stdout and stderr, and stop
# listening to the pipe if it's been closed.
@@ -434,14 +434,14 @@ class Connection(ConnectionBase):
if b_chunk == b'':
rpipes.remove(p.stdout)
b_tmp_stdout += b_chunk
- display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, to_unicode(b_chunk, errors='replace')))
+ display.debug("stdout chunk (state=%s):\n>>>%s<<<\n" % (state, to_text(b_chunk)))
if p.stderr in rfd:
b_chunk = p.stderr.read()
if b_chunk == b'':
rpipes.remove(p.stderr)
b_tmp_stderr += b_chunk
- display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, to_unicode(b_chunk, errors='replace')))
+ display.debug("stderr chunk (state=%s):\n>>>%s<<<\n" % (state, to_text(b_chunk)))
# We examine the output line-by-line until we have negotiated any
# privilege escalation prompt and subsequent success/error message.
@@ -631,8 +631,8 @@ class Connection(ConnectionBase):
super(Connection, self).put_file(in_path, out_path)
display.vvv(u"PUT {0} TO {1}".format(in_path, out_path), host=self.host)
- if not os.path.exists(to_bytes(in_path, errors='strict')):
- raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_str(in_path)))
+ if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
+ raise AnsibleFileNotFound("file or module does not exist: {0}".format(to_native(in_path)))
# scp and sftp require square brackets for IPv6 addresses, but
# accept them for hostnames and IPv4 addresses too.
@@ -649,7 +649,7 @@ class Connection(ConnectionBase):
(returncode, stdout, stderr) = self._run(cmd, in_data)
if returncode != 0:
- raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(to_str(out_path), to_str(stdout), to_str(stderr)))
+ raise AnsibleError("failed to transfer file to {0}:\n{1}\n{2}".format(to_native(out_path), to_native(stdout), to_native(stderr)))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
diff --git a/lib/ansible/plugins/connection/winrm.py b/lib/ansible/plugins/connection/winrm.py
index 670d878cdd..fbc31ae4ed 100644
--- a/lib/ansible/plugins/connection/winrm.py
+++ b/lib/ansible/plugins/connection/winrm.py
@@ -26,9 +26,21 @@ import shlex
import traceback
import json
+HAVE_KERBEROS = False
+try:
+ import kerberos
+ HAVE_KERBEROS = True
+except ImportError:
+ pass
+
from ansible.compat.six import string_types
from ansible.compat.six.moves.urllib.parse import urlunsplit
from ansible.errors import AnsibleError, AnsibleConnectionFailure
+from ansible.errors import AnsibleFileNotFound
+from ansible.module_utils._text import to_bytes, to_native, to_text
+from ansible.plugins.connection import ConnectionBase
+from ansible.utils.hashing import secure_hash
+from ansible.utils.path import makedirs_safe
try:
import winrm
@@ -42,25 +54,13 @@ try:
except ImportError:
raise AnsibleError("xmltodict is not installed")
-HAVE_KERBEROS = False
-try:
- import kerberos
- HAVE_KERBEROS = True
-except ImportError:
- pass
-
-from ansible.errors import AnsibleFileNotFound
-from ansible.plugins.connection import ConnectionBase
-from ansible.utils.hashing import secure_hash
-from ansible.utils.path import makedirs_safe
-from ansible.utils.unicode import to_bytes, to_unicode, to_str
-
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
+
class Connection(ConnectionBase):
'''WinRM connections over HTTP/HTTPS.'''
@@ -156,10 +156,10 @@ class Connection(ConnectionBase):
return protocol
except Exception as e:
- err_msg = to_unicode(e).strip()
- if re.search(to_unicode(r'Operation\s+?timed\s+?out'), err_msg, re.I):
+ err_msg = to_text(e).strip()
+ if re.search(to_text(r'Operation\s+?timed\s+?out'), err_msg, re.I):
raise AnsibleError('the connection attempt timed out')
- m = re.search(to_unicode(r'Code\s+?(\d{3})'), err_msg)
+ m = re.search(to_text(r'Code\s+?(\d{3})'), err_msg)
if m:
code = int(m.groups()[0])
if code == 401:
@@ -167,9 +167,9 @@ class Connection(ConnectionBase):
elif code == 411:
return protocol
errors.append(u'%s: %s' % (transport, err_msg))
- display.vvvvv(u'WINRM CONNECTION ERROR: %s\n%s' % (err_msg, to_unicode(traceback.format_exc())), host=self._winrm_host)
+ display.vvvvv(u'WINRM CONNECTION ERROR: %s\n%s' % (err_msg, to_text(traceback.format_exc())), host=self._winrm_host)
if errors:
- raise AnsibleConnectionFailure(', '.join(map(to_str, errors)))
+ raise AnsibleConnectionFailure(', '.join(map(to_native, errors)))
else:
raise AnsibleError('No transport found for WinRM connection')
@@ -220,12 +220,12 @@ class Connection(ConnectionBase):
# TODO: check result from response and set stdin_push_failed if we have nonzero
if from_exec:
- display.vvvvv('WINRM RESULT %r' % to_unicode(response), host=self._winrm_host)
+ display.vvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
else:
- display.vvvvvv('WINRM RESULT %r' % to_unicode(response), host=self._winrm_host)
+ display.vvvvvv('WINRM RESULT %r' % to_text(response), host=self._winrm_host)
- display.vvvvvv('WINRM STDOUT %s' % to_unicode(response.std_out), host=self._winrm_host)
- display.vvvvvv('WINRM STDERR %s' % to_unicode(response.std_err), host=self._winrm_host)
+ display.vvvvvv('WINRM STDOUT %s' % to_text(response.std_out), host=self._winrm_host)
+ display.vvvvvv('WINRM STDERR %s' % to_text(response.std_err), host=self._winrm_host)
if stdin_push_failed:
raise AnsibleError('winrm send_input failed; \nstdout: %s\nstderr %s' % (response.std_out, response.std_err))
@@ -250,7 +250,7 @@ class Connection(ConnectionBase):
def exec_command(self, cmd, in_data=None, sudoable=True):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
cmd_parts = shlex.split(to_bytes(cmd), posix=False)
- cmd_parts = map(to_unicode, cmd_parts)
+ cmd_parts = map(to_text, cmd_parts)
script = None
cmd_ext = cmd_parts and self._shell._unquote(cmd_parts[0]).lower()[-4:] or ''
# Support running .ps1 files (via script/raw).
@@ -266,7 +266,7 @@ class Connection(ConnectionBase):
cmd_parts = self._shell._encode_script(script, as_list=True, strict_mode=False)
if '-EncodedCommand' in cmd_parts:
encoded_cmd = cmd_parts[cmd_parts.index('-EncodedCommand') + 1]
- decoded_cmd = to_unicode(base64.b64decode(encoded_cmd).decode('utf-16-le'))
+ decoded_cmd = to_text(base64.b64decode(encoded_cmd).decode('utf-16-le'))
display.vvv("EXEC %s" % decoded_cmd, host=self._winrm_host)
else:
display.vvv("EXEC %s" % cmd, host=self._winrm_host)
@@ -300,9 +300,9 @@ class Connection(ConnectionBase):
# FUTURE: determine buffer size at runtime via remote winrm config?
def _put_file_stdin_iterator(self, in_path, out_path, buffer_size=250000):
- in_size = os.path.getsize(to_bytes(in_path, errors='strict'))
+ in_size = os.path.getsize(to_bytes(in_path, errors='surrogate_or_strict'))
offset = 0
- with open(to_bytes(in_path, errors='strict'), 'rb') as in_file:
+ with open(to_bytes(in_path, errors='surrogate_or_strict'), 'rb') as in_file:
for out_data in iter((lambda:in_file.read(buffer_size)), ''):
offset += len(out_data)
self._display.vvvvv('WINRM PUT "%s" to "%s" (offset=%d size=%d)' % (in_path, out_path, offset, len(out_data)), host=self._winrm_host)
@@ -318,7 +318,7 @@ class Connection(ConnectionBase):
super(Connection, self).put_file(in_path, out_path)
out_path = self._shell._unquote(out_path)
display.vvv('PUT "%s" TO "%s"' % (in_path, out_path), host=self._winrm_host)
- if not os.path.exists(to_bytes(in_path, errors='strict')):
+ if not os.path.exists(to_bytes(in_path, errors='surrogate_or_strict')):
raise AnsibleFileNotFound('file or module does not exist: "%s"' % in_path)
script_template = u'''
@@ -357,7 +357,7 @@ class Connection(ConnectionBase):
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:], stdin_iterator=self._put_file_stdin_iterator(in_path, out_path))
# TODO: improve error handling
if result.status_code != 0:
- raise AnsibleError(to_str(result.std_err))
+ raise AnsibleError(to_native(result.std_err))
put_output = json.loads(result.std_out)
remote_sha1 = put_output.get("sha1")
@@ -368,7 +368,7 @@ class Connection(ConnectionBase):
local_sha1 = secure_hash(in_path)
if not remote_sha1 == local_sha1:
- raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_str(remote_sha1), to_str(local_sha1)))
+ raise AnsibleError("Remote sha1 hash {0} does not match local hash {1}".format(to_native(remote_sha1), to_native(local_sha1)))
def fetch_file(self, in_path, out_path):
super(Connection, self).fetch_file(in_path, out_path)
@@ -407,7 +407,7 @@ class Connection(ConnectionBase):
cmd_parts = self._shell._encode_script(script, as_list=True)
result = self._winrm_exec(cmd_parts[0], cmd_parts[1:])
if result.status_code != 0:
- raise IOError(to_str(result.std_err))
+ raise IOError(to_native(result.std_err))
if result.std_out.strip() == '[DIR]':
data = None
else:
@@ -418,9 +418,9 @@ class Connection(ConnectionBase):
else:
if not out_file:
# If out_path is a directory and we're expecting a file, bail out now.
- if os.path.isdir(to_bytes(out_path, errors='strict')):
+ if os.path.isdir(to_bytes(out_path, errors='surrogate_or_strict')):
break
- out_file = open(to_bytes(out_path, errors='strict'), 'wb')
+ out_file = open(to_bytes(out_path, errors='surrogate_or_strict'), 'wb')
out_file.write(data)
if len(data) < buffer_size:
break
diff --git a/lib/ansible/plugins/connection/zone.py b/lib/ansible/plugins/connection/zone.py
index eaf0a049b9..3c1322d3ec 100644
--- a/lib/ansible/plugins/connection/zone.py
+++ b/lib/ansible/plugins/connection/zone.py
@@ -31,7 +31,8 @@ import traceback
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.connection import ConnectionBase, BUFSIZE
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
+
try:
from __main__ import display
diff --git a/lib/ansible/plugins/filter/core.py b/lib/ansible/plugins/filter/core.py
index 0889cee591..24c3901b80 100644
--- a/lib/ansible/plugins/filter/core.py
+++ b/lib/ansible/plugins/filter/core.py
@@ -19,7 +19,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-
import sys
import base64
import itertools
@@ -39,15 +38,6 @@ import uuid
import yaml
from jinja2.filters import environmentfilter
-from ansible.compat.six import iteritems, string_types
-
-from ansible import errors
-from ansible.parsing.yaml.dumper import AnsibleDumper
-from ansible.utils.hashing import md5s, checksum_s
-from ansible.utils.unicode import unicode_wrap, to_unicode
-from ansible.utils.vars import merge_hash
-from ansible.vars.hostvars import HostVars
-from ansible.compat.six.moves import reduce
try:
import passlib.hash
@@ -55,6 +45,16 @@ try:
except:
HAS_PASSLIB = False
+from ansible import errors
+from ansible.compat.six import iteritems, string_types
+from ansible.compat.six.moves import reduce
+from ansible.module_utils._text import to_text
+from ansible.parsing.yaml.dumper import AnsibleDumper
+from ansible.utils.hashing import md5s, checksum_s
+from ansible.utils.unicode import unicode_wrap
+from ansible.utils.vars import merge_hash
+from ansible.vars.hostvars import HostVars
+
UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E')
@@ -72,12 +72,12 @@ class AnsibleJSONEncoder(json.JSONEncoder):
def to_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, **kw)
- return to_unicode(transformed)
+ return to_text(transformed)
def to_nice_yaml(a, indent=4, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=indent, allow_unicode=True, default_flow_style=False, **kw)
- return to_unicode(transformed)
+ return to_text(transformed)
def to_json(a, *args, **kw):
''' Convert the value to JSON '''
@@ -132,7 +132,7 @@ def fileglob(pathname):
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
- value = to_unicode(value, errors='strict', nonstring='simplerepr')
+ value = to_text(value, errors='surrogate_or_strict', nonstring='simplerepr')
if ignorecase:
flags = re.I
diff --git a/lib/ansible/plugins/lookup/__init__.py b/lib/ansible/plugins/lookup/__init__.py
index 2a90321e40..7112b4ab0b 100644
--- a/lib/ansible/plugins/lookup/__init__.py
+++ b/lib/ansible/plugins/lookup/__init__.py
@@ -98,8 +98,8 @@ class LookupBase(with_metaclass(ABCMeta, object)):
must be converted into python's unicode type as the strings will be run
through jinja2 which has this requirement. You can use::
- from ansible.utils.unicode import to_unicode
- result_string = to_unicode(result_string)
+ from ansible.module_utils._text import to_text
+ result_string = to_text(result_string)
"""
pass
diff --git a/lib/ansible/plugins/lookup/csvfile.py b/lib/ansible/plugins/lookup/csvfile.py
index 936288e26f..195a69e56a 100644
--- a/lib/ansible/plugins/lookup/csvfile.py
+++ b/lib/ansible/plugins/lookup/csvfile.py
@@ -22,7 +22,8 @@ import csv
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
-from ansible.utils.unicode import to_bytes, to_str, to_unicode
+from ansible.module_utils._text import to_bytes, to_native, to_text
+
class CSVRecoder:
"""
@@ -49,7 +50,7 @@ class CSVReader:
def next(self):
row = self.reader.next()
- return [to_unicode(s) for s in row]
+ return [to_text(s) for s in row]
def __iter__(self):
return self
@@ -66,7 +67,7 @@ class LookupModule(LookupBase):
if row[0] == key:
return row[int(col)]
except Exception as e:
- raise AnsibleError("csvfile: %s" % to_str(e))
+ raise AnsibleError("csvfile: %s" % to_native(e))
return dflt
diff --git a/lib/ansible/plugins/lookup/fileglob.py b/lib/ansible/plugins/lookup/fileglob.py
index 66de4f1f45..3e0523fdd5 100644
--- a/lib/ansible/plugins/lookup/fileglob.py
+++ b/lib/ansible/plugins/lookup/fileglob.py
@@ -22,7 +22,8 @@ import glob
from ansible.plugins.lookup import LookupBase
from ansible.errors import AnsibleFileNotFound
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
+
class LookupModule(LookupBase):
@@ -36,6 +37,6 @@ class LookupModule(LookupBase):
except AnsibleFileNotFound:
dwimmed_path = None
if dwimmed_path:
- globbed = glob.glob(to_bytes(os.path.join(dwimmed_path, term_file), errors='strict'))
- ret.extend(to_unicode(g, errors='strict') for g in globbed if os.path.isfile(g))
+ globbed = glob.glob(to_bytes(os.path.join(dwimmed_path, term_file), errors='surrogate_or_strict'))
+ ret.extend(to_text(g, errors='surrogate_or_strict') for g in globbed if os.path.isfile(g))
return ret
diff --git a/lib/ansible/plugins/lookup/filetree.py b/lib/ansible/plugins/lookup/filetree.py
index f4d96af876..7ff5621214 100644
--- a/lib/ansible/plugins/lookup/filetree.py
+++ b/lib/ansible/plugins/lookup/filetree.py
@@ -22,12 +22,6 @@ import pwd
import grp
import stat
-from ansible.plugins.lookup import LookupBase
-from ansible.utils.unicode import to_str
-
-from __main__ import display
-warning = display.warning
-
HAVE_SELINUX=False
try:
import selinux
@@ -35,6 +29,11 @@ try:
except ImportError:
pass
+from ansible.plugins.lookup import LookupBase
+from ansible.module_utils._text import to_native
+
+from __main__ import display
+
# If selinux fails to find a default, return an array of None
def selinux_context(path):
@@ -43,7 +42,7 @@ def selinux_context(path):
try:
# note: the selinux module uses byte strings on python2 and text
# strings on python3
- ret = selinux.lgetfilecon_raw(to_str(path))
+ ret = selinux.lgetfilecon_raw(to_native(path))
except OSError:
return context
if ret[0] != -1:
@@ -60,7 +59,7 @@ def file_props(root, path):
try:
st = os.lstat(abspath)
except OSError as e:
- warning('filetree: Error using stat() on path %s (%s)' % (abspath, e))
+ display.warning('filetree: Error using stat() on path %s (%s)' % (abspath, e))
return None
ret = dict(root=root, path=path)
@@ -74,7 +73,7 @@ def file_props(root, path):
ret['state'] = 'file'
ret['src'] = abspath
else:
- warning('filetree: Error file type of %s is not supported' % abspath)
+ display.warning('filetree: Error file type of %s is not supported' % abspath)
return None
ret['uid'] = st.st_uid
diff --git a/lib/ansible/plugins/lookup/ini.py b/lib/ansible/plugins/lookup/ini.py
index 5425a115f8..cbe0d13e6d 100644
--- a/lib/ansible/plugins/lookup/ini.py
+++ b/lib/ansible/plugins/lookup/ini.py
@@ -30,7 +30,7 @@ except ImportError:
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes, to_text
def _parse_params(term):
@@ -59,13 +59,15 @@ class LookupModule(LookupBase):
def read_properties(self, filename, key, dflt, is_regexp):
config = StringIO()
- config.write(u'[java_properties]\n' + open(to_bytes(filename, errors='strict')).read())
+ current_cfg_file = open(to_bytes(filename, errors='surrogate_or_strict'), 'rb')
+
+ config.write(u'[java_properties]\n' + to_text(current_cfg_file.read(), errors='surrogate_or_strict'))
config.seek(0, os.SEEK_SET)
self.cp.readfp(config)
return self.get_value(key, 'java_properties', dflt, is_regexp)
def read_ini(self, filename, key, section, dflt, is_regexp):
- self.cp.readfp(open(to_bytes(filename, errors='strict')))
+ self.cp.readfp(open(to_bytes(filename, errors='surrogate_or_strict')))
return self.get_value(key, section, dflt, is_regexp)
def get_value(self, key, section, dflt, is_regexp):
diff --git a/lib/ansible/plugins/lookup/shelvefile.py b/lib/ansible/plugins/lookup/shelvefile.py
index 415bc358ed..e0ae4f9ad6 100644
--- a/lib/ansible/plugins/lookup/shelvefile.py
+++ b/lib/ansible/plugins/lookup/shelvefile.py
@@ -21,10 +21,10 @@ import shelve
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
-class LookupModule(LookupBase):
+class LookupModule(LookupBase):
def read_shelve(self, shelve_filename, key):
"""
@@ -66,7 +66,7 @@ class LookupModule(LookupBase):
if res is None:
raise AnsibleError("Key %s not found in shelve file %s" % (key, file))
# Convert the value read to string
- ret.append(to_unicode(res))
+ ret.append(to_text(res))
break
else:
raise AnsibleError("Could not locate shelve file in lookup: %s" % file)
diff --git a/lib/ansible/plugins/lookup/template.py b/lib/ansible/plugins/lookup/template.py
index 2c722945d0..4ab0ad1ed0 100644
--- a/lib/ansible/plugins/lookup/template.py
+++ b/lib/ansible/plugins/lookup/template.py
@@ -21,7 +21,7 @@ import os
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
-from ansible.utils.unicode import to_unicode, to_bytes
+from ansible.module_utils._text import to_bytes, to_text
try:
from __main__ import display
@@ -43,8 +43,8 @@ class LookupModule(LookupBase):
lookupfile = self.find_file_in_search_path(variables, 'templates', term)
display.vvvv("File lookup using %s as file" % lookupfile)
if lookupfile:
- with open(to_bytes(lookupfile, errors='strict'), 'r') as f:
- template_data = to_unicode(f.read())
+ with open(to_bytes(lookupfile, errors='surrogate_or_strict'), 'rb') as f:
+ template_data = to_text(f.read(), errors='surrogate_or_strict')
# set jinja2 internal search path for includes
if 'ansible_search_path' in variables:
diff --git a/lib/ansible/plugins/lookup/url.py b/lib/ansible/plugins/lookup/url.py
index be9d2e08b0..e84dbe0ae7 100644
--- a/lib/ansible/plugins/lookup/url.py
+++ b/lib/ansible/plugins/lookup/url.py
@@ -18,11 +18,11 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
+from ansible.compat.six.moves.urllib.error import HTTPError, URLError
from ansible.errors import AnsibleError
-from ansible.plugins.lookup import LookupBase
+from ansible.module_utils._text import to_text
from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
-from ansible.utils.unicode import to_unicode
-from ansible.compat.six.moves.urllib.error import HTTPError, URLError
+from ansible.plugins.lookup import LookupBase
try:
from __main__ import display
@@ -52,5 +52,5 @@ class LookupModule(LookupBase):
raise AnsibleError("Error connecting to %s: %s" % (term, str(e)))
for line in response.read().splitlines():
- ret.append(to_unicode(line))
+ ret.append(to_text(line))
return ret
diff --git a/lib/ansible/plugins/shell/powershell.py b/lib/ansible/plugins/shell/powershell.py
index 5cbaae74b3..b883dead22 100644
--- a/lib/ansible/plugins/shell/powershell.py
+++ b/lib/ansible/plugins/shell/powershell.py
@@ -22,9 +22,9 @@ import os
import re
import shlex
-from ansible.compat.six import text_type
from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
+
_common_args = ['PowerShell', '-NoProfile', '-NonInteractive', '-ExecutionPolicy', 'Unrestricted']
@@ -34,6 +34,7 @@ _powershell_version = os.environ.get('POWERSHELL_VERSION', None)
if _powershell_version:
_common_args = ['PowerShell', '-Version', _powershell_version] + _common_args[1:]
+
class ShellModule(object):
# Common shell filenames that this plugin handles
@@ -60,7 +61,7 @@ class ShellModule(object):
raise AnsibleError("PowerShell environment value for key '%s' exceeds 32767 characters in length" % key)
# powershell single quoted literals need single-quote doubling as their only escaping
value = value.replace("'", "''")
- return text_type(value)
+ return to_text(value, errors='surrogate_or_strict')
def env_prefix(self, **kwargs):
env = self.env.copy()
@@ -164,7 +165,7 @@ class ShellModule(object):
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None):
cmd_parts = shlex.split(to_bytes(cmd), posix=False)
- cmd_parts = map(to_unicode, cmd_parts)
+ cmd_parts = map(to_text, cmd_parts)
if shebang and shebang.lower() == '#!powershell':
if not self._unquote(cmd_parts[0]).lower().endswith('.ps1'):
cmd_parts[0] = '"%s.ps1"' % self._unquote(cmd_parts[0])
@@ -219,7 +220,7 @@ class ShellModule(object):
def _unquote(self, value):
'''Remove any matching quotes that wrap the given value.'''
- value = to_unicode(value or '')
+ value = to_text(value or '')
m = re.match(r'^\s*?\'(.*?)\'\s*?$', value)
if m:
return m.group(1)
@@ -244,7 +245,7 @@ class ShellModule(object):
def _encode_script(self, script, as_list=False, strict_mode=True):
'''Convert a PowerShell script to a single base64-encoded command.'''
- script = to_unicode(script)
+ script = to_text(script)
if strict_mode:
script = u'Set-StrictMode -Version Latest\r\n%s' % script
script = '\n'.join([x.strip() for x in script.splitlines() if x.strip()])
diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py
index d2d18558b1..34324f92b4 100644
--- a/lib/ansible/plugins/strategy/__init__.py
+++ b/lib/ansible/plugins/strategy/__init__.py
@@ -19,18 +19,11 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import json
-import time
-import zlib
-from collections import defaultdict
-
from jinja2.exceptions import UndefinedError
from ansible.compat.six.moves import queue as Queue
-from ansible.compat.six import iteritems, text_type, string_types
-from ansible import constants as C
+from ansible.compat.six import iteritems, string_types
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
-from ansible.executor.play_iterator import PlayIterator
from ansible.executor.task_result import TaskResult
from ansible.inventory.host import Host
from ansible.inventory.group import Group
@@ -38,11 +31,10 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.included_file import IncludedFile
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
-from ansible.plugins import action_loader, connection_loader, filter_loader, lookup_loader, module_loader, test_loader
+from ansible.plugins import action_loader
from ansible.template import Templar
-from ansible.utils.unicode import to_unicode
-from ansible.vars.unsafe_proxy import wrap_var
from ansible.vars import combine_vars, strip_internal_keys
+from ansible.module_utils._text import to_text
try:
@@ -138,7 +130,7 @@ class StrategyBase:
ret_results = []
def get_original_host(host_name):
- host_name = to_unicode(host_name)
+ host_name = to_text(host_name)
if host_name in self._inventory._hosts_cache:
return self._inventory._hosts_cache[host_name]
else:
@@ -161,7 +153,7 @@ class StrategyBase:
target_handler_name = templar.template(handler_task.get_name())
if target_handler_name == handler_name:
return handler_task
- except (UndefinedError, AnsibleUndefinedVariable) as e:
+ except (UndefinedError, AnsibleUndefinedVariable):
# We skip this handler due to the fact that it may be using
# a variable in the name that was conditionally included via
# set_fact or some other method, and we don't want to error
@@ -182,7 +174,7 @@ class StrategyBase:
target_handler_name = templar.template(target_handler.get_name())
if target_handler_name == handler_name:
return True
- except (UndefinedError, AnsibleUndefinedVariable) as e:
+ except (UndefinedError, AnsibleUndefinedVariable):
pass
return parent_handler_match(target_handler._parent, handler_name)
else:
@@ -567,14 +559,13 @@ class StrategyBase:
# mark all of the hosts including this file as failed, send callbacks,
# and increment the stats for this host
for host in included_file._hosts:
- tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_unicode(e)))
+ tr = TaskResult(host=host, task=included_file._task, return_data=dict(failed=True, reason=to_text(e)))
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.name] = True
self._tqm._stats.increment('failures', host.name)
self._tqm.send_callback('v2_runner_on_failed', tr)
return []
-
# finally, send the callback and return the list of blocks loaded
self._tqm.send_callback('v2_playbook_on_include', included_file)
display.debug("done processing included file")
diff --git a/lib/ansible/plugins/strategy/free.py b/lib/ansible/plugins/strategy/free.py
index a3e54a66ef..7f92de60ac 100644
--- a/lib/ansible/plugins/strategy/free.py
+++ b/lib/ansible/plugins/strategy/free.py
@@ -26,7 +26,8 @@ from ansible.playbook.included_file import IncludedFile
from ansible.plugins import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
+
try:
from __main__ import display
@@ -66,8 +67,7 @@ class StrategyModule(StrategyBase):
break
work_to_do = False # assume we have no more work to do
- starting_host = last_host # save current position so we know when we've
- # looped back around and need to break
+ starting_host = last_host # save current position so we know when we've looped back around and need to break
# try and find an unblocked host with a task to run
host_results = []
@@ -109,7 +109,7 @@ class StrategyModule(StrategyBase):
display.debug("done getting variables")
try:
- task.name = to_unicode(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
+ task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
display.debug("done templating")
except:
# just ignore any errors during task name templating,
@@ -120,10 +120,10 @@ class StrategyModule(StrategyBase):
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
if run_once:
if action and getattr(action, 'BYPASS_HOST_LOOP', False):
- raise AnsibleError("The '%s' module bypasses the host loop, which is currently not supported in the free strategy " \
+ raise AnsibleError("The '%s' module bypasses the host loop, which is currently not supported in the free strategy "
"and would instead execute for every host in the inventory list." % task.action, obj=task._ds)
else:
- display.warning("Using run_once with the free strategy is not currently supported. This task will still be " \
+ display.warning("Using run_once with the free strategy is not currently supported. This task will still be "
"executed for every host in the inventory list.")
# check to see if this task should be skipped, due to it being a member of a
@@ -143,7 +143,8 @@ class StrategyModule(StrategyBase):
# handle step if needed, skip meta actions as they are used internally
if not self._step or self._take_step(task, host_name):
if task.any_errors_fatal:
- display.warning("Using any_errors_fatal with the free strategy is not supported, as tasks are executed independently on each host")
+ display.warning("Using any_errors_fatal with the free strategy is not supported,"
+ " as tasks are executed independently on each host")
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
self._queue_task(host, task, task_vars, play_context)
del task_vars
diff --git a/lib/ansible/plugins/strategy/linear.py b/lib/ansible/plugins/strategy/linear.py
index 50ba4a70eb..e5920200a1 100644
--- a/lib/ansible/plugins/strategy/linear.py
+++ b/lib/ansible/plugins/strategy/linear.py
@@ -29,7 +29,8 @@ from ansible.playbook.task import Task
from ansible.plugins import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_text
+
try:
from __main__ import display
@@ -243,7 +244,7 @@ class StrategyModule(StrategyBase):
saved_name = task.name
display.debug("done copying, going to template now")
try:
- task.name = to_unicode(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
+ task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
display.debug("done templating")
except:
# just ignore any errors during task name templating,
@@ -368,7 +369,7 @@ class StrategyModule(StrategyBase):
for host in included_file._hosts:
self._tqm._failed_hosts[host.name] = True
iterator.mark_host_failed(host)
- display.error(to_unicode(e), wrap_text=False)
+ display.error(to_text(e), wrap_text=False)
include_failure = True
continue
diff --git a/lib/ansible/template/__init__.py b/lib/ansible/template/__init__.py
index 05ec12f03d..2eec3bf3c7 100644
--- a/lib/ansible/template/__init__.py
+++ b/lib/ansible/template/__init__.py
@@ -25,8 +25,8 @@ import os
import re
from io import StringIO
+from numbers import Number
-from ansible.compat.six import string_types, text_type, binary_type
from jinja2 import Environment
from jinja2.loaders import FileSystemLoader
from jinja2.exceptions import TemplateSyntaxError, UndefinedError
@@ -34,19 +34,20 @@ from jinja2.utils import concat as j2_concat
from jinja2.runtime import StrictUndefined
from ansible import constants as C
+from ansible.compat.six import string_types, text_type
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable
from ansible.plugins import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
from ansible.template.vars import AnsibleJ2Vars
-from ansible.utils.unicode import to_unicode, to_str
+from ansible.module_utils._text import to_native, to_text
+
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
-from numbers import Number
try:
from __main__ import display
@@ -107,6 +108,7 @@ def _escape_backslashes(data, jinja_env):
return data
+
def _count_newlines_from_end(in_str):
'''
Counts the number of newlines at the end of a string. This is used during
@@ -255,10 +257,10 @@ class Templar:
if prev_idx is not None:
# replace the opening
data.seek(prev_idx, os.SEEK_SET)
- data.write(to_unicode(self.environment.comment_start_string))
+ data.write(to_text(self.environment.comment_start_string))
# replace the closing
data.seek(token_start, os.SEEK_SET)
- data.write(to_unicode(self.environment.comment_end_string))
+ data.write(to_text(self.environment.comment_end_string))
else:
raise AnsibleError("Error while cleaning data for safety: unhandled regex match")
@@ -293,7 +295,7 @@ class Templar:
return self._clean_data(variable)
else:
# Do we need to convert these into text_type as well?
- # return self._clean_data(to_unicode(variable._obj, nonstring='passthru'))
+ # return self._clean_data(to_text(variable._obj, nonstring='passthru'))
return self._clean_data(variable._obj)
try:
@@ -330,7 +332,7 @@ class Templar:
if convert_data and not self._no_type_regex.match(variable):
# if this looks like a dictionary or list, convert it to such using the safe_eval method
if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
- result.startswith("[") or result in ("True", "False"):
+ result.startswith("[") or result in ("True", "False"):
eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
if eval_results[1] is None:
result = eval_results[0]
@@ -383,7 +385,7 @@ class Templar:
returns True if the data contains a variable pattern
'''
if isinstance(data, string_types):
- for marker in [self.environment.block_start_string, self.environment.variable_start_string, self.environment.comment_start_string]:
+ for marker in (self.environment.block_start_string, self.environment.variable_start_string, self.environment.comment_start_string):
if marker in data:
return True
return False
@@ -399,8 +401,9 @@ class Templar:
first_part = variable.split("|")[0].split(".")[0].split("[")[0]
if (contains_filters or first_part in self._available_variables) and self.environment.variable_start_string not in variable:
if bare_deprecated:
- display.deprecated("Using bare variables is deprecated. Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
- (self.environment.variable_start_string, variable, self.environment.variable_end_string))
+ display.deprecated("Using bare variables is deprecated."
+ " Update your playbooks so that the environment value uses the full variable syntax ('%s%s%s')" %
+ (self.environment.variable_start_string, variable, self.environment.variable_end_string))
return "%s%s%s" % (self.environment.variable_start_string, variable, self.environment.variable_end_string)
# the variable didn't meet the conditions to be converted,
@@ -485,10 +488,10 @@ class Templar:
try:
t = myenv.from_string(data)
except TemplateSyntaxError as e:
- raise AnsibleError("template error while templating string: %s. String: %s" % (to_str(e), to_str(data)))
+ raise AnsibleError("template error while templating string: %s. String: %s" % (to_native(e), to_native(data)))
except Exception as e:
- if 'recursion' in to_str(e):
- raise AnsibleError("recursive loop detected in template string: %s" % to_str(data))
+ if 'recursion' in to_native(e):
+ raise AnsibleError("recursive loop detected in template string: %s" % to_native(data))
else:
return data
@@ -503,13 +506,13 @@ class Templar:
try:
res = j2_concat(rf)
except TypeError as te:
- if 'StrictUndefined' in to_str(te):
- errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_str(data)
- errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_str(te)
+ if 'StrictUndefined' in to_native(te):
+ errmsg = "Unable to look up a name or access an attribute in template string (%s).\n" % to_native(data)
+ errmsg += "Make sure your variable name does not contain invalid characters like '-': %s" % to_native(te)
raise AnsibleUndefinedVariable(errmsg)
else:
- display.debug("failing because of a type error, template data is: %s" % to_str(data))
- raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_str(data),to_str(te)))
+ display.debug("failing because of a type error, template data is: %s" % to_native(data))
+ raise AnsibleError("Unexpected templating type error occurred on (%s): %s" % (to_native(data),to_native(te)))
if preserve_trailing_newlines:
# The low level calls above do not preserve the newline
@@ -534,4 +537,3 @@ class Templar:
else:
#TODO: return warning about undefined var
return data
-
diff --git a/lib/ansible/template/vars.py b/lib/ansible/template/vars.py
index badf93b1e8..efb1ce734f 100644
--- a/lib/ansible/template/vars.py
+++ b/lib/ansible/template/vars.py
@@ -21,7 +21,8 @@ __metaclass__ = type
from ansible.compat.six import iteritems
from jinja2.utils import missing
-from ansible.utils.unicode import to_unicode
+from ansible.module_utils._text import to_native
+
__all__ = ['AnsibleJ2Vars']
@@ -88,7 +89,7 @@ class AnsibleJ2Vars:
try:
value = self._templar.template(variable)
except Exception as e:
- raise type(e)(to_unicode(variable) + ': ' + e.message)
+ raise type(e)(to_native(variable) + ': ' + e.message)
return value
def add_locals(self, locals):
@@ -99,4 +100,3 @@ class AnsibleJ2Vars:
if locals is None:
return self
return AnsibleJ2Vars(self._templar, self._globals, locals=locals, *self._extras)
-
diff --git a/lib/ansible/utils/display.py b/lib/ansible/utils/display.py
index 76ecb22715..8034f73ee3 100644
--- a/lib/ansible/utils/display.py
+++ b/lib/ansible/utils/display.py
@@ -35,7 +35,8 @@ from termios import TIOCGWINSZ
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.color import stringc
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
+
try:
# Python 2
@@ -45,7 +46,6 @@ except NameError:
pass
-
logger = None
#TODO: make this a logging callback instead
if C.DEFAULT_LOG_PATH:
@@ -105,7 +105,7 @@ class Display:
""" Display a message to the user
Note: msg *must* be a unicode string to prevent UnicodeError tracebacks.
- """
+ """
# FIXME: this needs to be implemented
#msg = utils.sanitize_output(msg)
@@ -124,7 +124,7 @@ class Display:
# Convert back to text string on python3
# We first convert to a byte string so that we get rid of
# characters that are invalid in the user's locale
- msg2 = to_unicode(msg2, self._output_encoding(stderr=stderr))
+ msg2 = to_text(msg2, self._output_encoding(stderr=stderr))
if not stderr:
fileobj = sys.stdout
@@ -149,7 +149,7 @@ class Display:
# Convert back to text string on python3
# We first convert to a byte string so that we get rid of
# characters that are invalid in the user's locale
- msg2 = to_unicode(msg2, self._output_encoding(stderr=stderr))
+ msg2 = to_text(msg2, self._output_encoding(stderr=stderr))
if color == C.COLOR_ERROR:
logger.error(msg2)
@@ -279,7 +279,7 @@ class Display:
if sys.version_info >= (3,):
# Convert back into text on python3. We do this double conversion
# to get rid of characters that are illegal in the user's locale
- prompt_string = to_unicode(prompt_string)
+ prompt_string = to_text(prompt_string)
if private:
return getpass.getpass(msg)
@@ -323,7 +323,7 @@ class Display:
result = do_encrypt(result, encrypt, salt_size, salt)
# handle utf-8 chars
- result = to_unicode(result, errors='strict')
+ result = to_text(result, errors='surrogate_or_strict')
return result
@staticmethod
diff --git a/lib/ansible/utils/hashing.py b/lib/ansible/utils/hashing.py
index a486f3e7b0..3c5b125b0b 100644
--- a/lib/ansible/utils/hashing.py
+++ b/lib/ansible/utils/hashing.py
@@ -39,14 +39,14 @@ except ImportError:
_md5 = None
from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
def secure_hash_s(data, hash_func=sha1):
''' Return a secure hash hex digest of data. '''
digest = hash_func()
- data = to_bytes(data, errors='strict')
+ data = to_bytes(data, errors='surrogate_or_strict')
digest.update(data)
return digest.hexdigest()
@@ -54,12 +54,12 @@ def secure_hash_s(data, hash_func=sha1):
def secure_hash(filename, hash_func=sha1):
''' Return a secure hash hex digest of local file, None if file is not present or a directory. '''
- if not os.path.exists(to_bytes(filename, errors='strict')) or os.path.isdir(to_bytes(filename, errors='strict')):
+ if not os.path.exists(to_bytes(filename, errors='surrogate_or_strict')) or os.path.isdir(to_bytes(filename, errors='strict')):
return None
digest = hash_func()
blocksize = 64 * 1024
try:
- infile = open(to_bytes(filename, errors='strict'), 'rb')
+ infile = open(to_bytes(filename, errors='surrogate_or_strict'), 'rb')
block = infile.read(blocksize)
while block:
digest.update(block)
diff --git a/lib/ansible/utils/path.py b/lib/ansible/utils/path.py
index 989356a79a..0820b7f993 100644
--- a/lib/ansible/utils/path.py
+++ b/lib/ansible/utils/path.py
@@ -20,11 +20,12 @@ __metaclass__ = type
import os
from errno import EEXIST
from ansible.errors import AnsibleError
-from ansible.utils.unicode import to_bytes, to_str, to_unicode
-from ansible.compat.six import PY2
+from ansible.module_utils._text import to_bytes, to_native, to_text
+
__all__ = ['unfrackpath', 'makedirs_safe']
+
def unfrackpath(path):
'''
Returns a path that is free of symlinks, environment
@@ -40,10 +41,9 @@ def unfrackpath(path):
example::
'$HOME/../../var/mail' becomes '/var/spool/mail'
'''
- canonical_path = os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='strict')))))
- if PY2:
- return to_unicode(canonical_path, errors='strict')
- return to_unicode(canonical_path, errors='surrogateescape')
+ canonical_path = os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='surrogate_or_strict')))))
+ return to_text(canonical_path, errors='surrogate_or_strict')
+
def makedirs_safe(path, mode=None):
'''Safe way to create dirs in muliprocess/thread environments.
@@ -64,4 +64,4 @@ def makedirs_safe(path, mode=None):
os.makedirs(b_rpath)
except OSError as e:
if e.errno != EEXIST:
- raise AnsibleError("Unable to create local directories(%s): %s" % (to_str(rpath), to_str(e)))
+ raise AnsibleError("Unable to create local directories(%s): %s" % (to_native(rpath), to_native(e)))
diff --git a/lib/ansible/utils/shlex.py b/lib/ansible/utils/shlex.py
index 2dee15816b..1da7c66e58 100644
--- a/lib/ansible/utils/shlex.py
+++ b/lib/ansible/utils/shlex.py
@@ -21,8 +21,7 @@ __metaclass__ = type
import shlex
from ansible.compat.six import PY3
-
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes, to_text
if PY3:
@@ -31,5 +30,5 @@ if PY3:
else:
# shlex.split() wants bytes (i.e. ``str``) input on Python 2
def shlex_split(s, comments=False, posix=True):
- return map(to_unicode, shlex.split(to_bytes(s), comments, posix))
+ return map(to_text, shlex.split(to_bytes(s), comments, posix))
shlex_split.__doc__ = shlex.split.__doc__
diff --git a/lib/ansible/utils/unicode.py b/lib/ansible/utils/unicode.py
index bc184ca923..067877c0a7 100644
--- a/lib/ansible/utils/unicode.py
+++ b/lib/ansible/utils/unicode.py
@@ -19,264 +19,48 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.compat.six import string_types, text_type, binary_type, PY3
+from ansible.module_utils._text import to_bytes as _to_bytes, to_text, to_native
-# to_bytes and to_unicode were written by Toshio Kuratomi for the
-# python-kitchen library https://pypi.python.org/pypi/kitchen
-# They are licensed in kitchen under the terms of the GPLv2+
-# They were copied and modified for use in ansible by Toshio in Jan 2015
-# (simply removing the deprecated features)
+try:
+ from __main__ import display
+except ImportError:
+ from ansible.utils.display import Display
+ display = Display()
-#: Aliases for the utf-8 codec
-_UTF8_ALIASES = frozenset(('utf-8', 'UTF-8', 'utf8', 'UTF8', 'utf_8', 'UTF_8',
- 'utf', 'UTF', 'u8', 'U8'))
-#: Aliases for the latin-1 codec
-_LATIN1_ALIASES = frozenset(('latin-1', 'LATIN-1', 'latin1', 'LATIN1',
- 'latin', 'LATIN', 'l1', 'L1', 'cp819', 'CP819', '8859', 'iso8859-1',
- 'ISO8859-1', 'iso-8859-1', 'ISO-8859-1'))
-# EXCEPTION_CONVERTERS is defined below due to using to_unicode
+__all__ = ('to_bytes', 'to_unicode', 'to_str', 'unicode_wrap')
-def to_unicode(obj, encoding='utf-8', errors='replace', nonstring=None):
- '''Convert an object into a :class:`unicode` string
- :arg obj: Object to convert to a :class:`unicode` string. This should
- normally be a byte :class:`str`
- :kwarg encoding: What encoding to try converting the byte :class:`str` as.
- Defaults to :term:`utf-8`
- :kwarg errors: If errors are found while decoding, perform this action.
- Defaults to ``replace`` which replaces the invalid bytes with
- a character that means the bytes were unable to be decoded. Other
- values are the same as the error handling schemes in the `codec base
- classes
- <http://docs.python.org/library/codecs.html#codec-base-classes>`_.
- For instance ``strict`` which raises an exception and ``ignore`` which
- simply omits the non-decodable characters.
- :kwarg nonstring: How to treat nonstring values. Possible values are:
+###
+### Backwards compat
+###
- :simplerepr: Attempt to call the object's "simple representation"
- method and return that value. Python-2.3+ has two methods that
- try to return a simple representation: :meth:`object.__unicode__`
- and :meth:`object.__str__`. We first try to get a usable value
- from :meth:`object.__unicode__`. If that fails we try the same
- with :meth:`object.__str__`.
- :empty: Return an empty :class:`unicode` string
- :strict: Raise a :exc:`TypeError`
- :passthru: Return the object unchanged
- :repr: Attempt to return a :class:`unicode` string of the repr of the
- object
+def to_bytes(*args, **kwargs):
+ display.deprecated(u'ansible.utils.unicode.to_bytes is deprecated. Use ansible.module_utils._text.to_bytes instead', version=u'2.4')
+ if 'errors' not in kwargs:
+ kwargs['errors'] = 'replace'
+ return _to_bytes(*args, **kwargs)
- Default is ``simplerepr``
- :raises TypeError: if :attr:`nonstring` is ``strict`` and
- a non-:class:`basestring` object is passed in or if :attr:`nonstring`
- is set to an unknown value
- :raises UnicodeDecodeError: if :attr:`errors` is ``strict`` and
- :attr:`obj` is not decodable using the given encoding
- :returns: :class:`unicode` string or the original object depending on the
- value of :attr:`nonstring`.
+def to_unicode(*args, **kwargs):
+ display.deprecated(u'ansible.utils.unicode.to_unicode is deprecated. Use ansible.module_utils._text.to_text instead', version=u'2.4')
+ if 'errors' not in kwargs:
+ kwargs['errors'] = 'replace'
+ return to_text(*args, **kwargs)
- Usually this should be used on a byte :class:`str` but it can take both
- byte :class:`str` and :class:`unicode` strings intelligently. Nonstring
- objects are handled in different ways depending on the setting of the
- :attr:`nonstring` parameter.
- The default values of this function are set so as to always return
- a :class:`unicode` string and never raise an error when converting from
- a byte :class:`str` to a :class:`unicode` string. However, when you do
- not pass validly encoded text (or a nonstring object), you may end up with
- output that you don't expect. Be sure you understand the requirements of
- your data, not just ignore errors by passing it through this function.
- '''
- # Could use isbasestring/isunicode here but we want this code to be as
- # fast as possible
- if isinstance(obj, text_type):
- return obj
- if isinstance(obj, binary_type):
- if encoding in _UTF8_ALIASES:
- return text_type(obj, 'utf-8', errors)
- if encoding in _LATIN1_ALIASES:
- return text_type(obj, 'latin-1', errors)
- return obj.decode(encoding, errors)
+def to_str(*args, **kwargs):
+ display.deprecated(u'ansible.utils.unicode.to_str is deprecated. Use ansible.module_utils._text.to_native instead', version=u'2.4')
+ if 'errors' not in kwargs:
+ kwargs['errors'] = 'replace'
+ return to_native(*args, **kwargs)
- if not nonstring:
- nonstring = 'simplerepr'
- if nonstring == 'empty':
- return u''
- elif nonstring == 'passthru':
- return obj
- elif nonstring == 'simplerepr':
- try:
- simple = obj.__unicode__()
- except (AttributeError, UnicodeError):
- simple = None
- if not simple:
- try:
- simple = text_type(obj)
- except UnicodeError:
- try:
- simple = obj.__str__()
- except (UnicodeError, AttributeError):
- simple = u''
- if isinstance(simple, binary_type):
- return text_type(simple, encoding, errors)
- return simple
- elif nonstring in ('repr', 'strict'):
- obj_repr = repr(obj)
- if isinstance(obj_repr, binary_type):
- obj_repr = text_type(obj_repr, encoding, errors)
- if nonstring == 'repr':
- return obj_repr
- raise TypeError('to_unicode was given "%(obj)s" which is neither'
- ' a byte string (str) or a unicode string' %
- {'obj': obj_repr.encode(encoding, 'replace')})
+### End Backwards compat
- raise TypeError('nonstring value, %(param)s, is not set to a valid'
- ' action' % {'param': nonstring})
-def to_bytes(obj, encoding='utf-8', errors='replace', nonstring=None):
- '''Convert an object into a byte :class:`str`
-
- :arg obj: Object to convert to a byte :class:`str`. This should normally
- be a :class:`unicode` string.
- :kwarg encoding: Encoding to use to convert the :class:`unicode` string
- into a byte :class:`str`. Defaults to :term:`utf-8`.
- :kwarg errors: If errors are found while encoding, perform this action.
- Defaults to ``replace`` which replaces the invalid bytes with
- a character that means the bytes were unable to be encoded. Other
- values are the same as the error handling schemes in the `codec base
- classes
- <http://docs.python.org/library/codecs.html#codec-base-classes>`_.
- For instance ``strict`` which raises an exception and ``ignore`` which
- simply omits the non-encodable characters.
- :kwarg nonstring: How to treat nonstring values. Possible values are:
-
- :simplerepr: Attempt to call the object's "simple representation"
- method and return that value. Python-2.3+ has two methods that
- try to return a simple representation: :meth:`object.__unicode__`
- and :meth:`object.__str__`. We first try to get a usable value
- from :meth:`object.__str__`. If that fails we try the same
- with :meth:`object.__unicode__`.
- :empty: Return an empty byte :class:`str`
- :strict: Raise a :exc:`TypeError`
- :passthru: Return the object unchanged
- :repr: Attempt to return a byte :class:`str` of the :func:`repr` of the
- object
-
- Default is ``simplerepr``.
-
- :raises TypeError: if :attr:`nonstring` is ``strict`` and
- a non-:class:`basestring` object is passed in or if :attr:`nonstring`
- is set to an unknown value.
- :raises UnicodeEncodeError: if :attr:`errors` is ``strict`` and all of the
- bytes of :attr:`obj` are unable to be encoded using :attr:`encoding`.
- :returns: byte :class:`str` or the original object depending on the value
- of :attr:`nonstring`.
-
- .. warning::
-
- If you pass a byte :class:`str` into this function the byte
- :class:`str` is returned unmodified. It is **not** re-encoded with
- the specified :attr:`encoding`. The easiest way to achieve that is::
-
- to_bytes(to_unicode(text), encoding='utf-8')
-
- The initial :func:`to_unicode` call will ensure text is
- a :class:`unicode` string. Then, :func:`to_bytes` will turn that into
- a byte :class:`str` with the specified encoding.
-
- Usually, this should be used on a :class:`unicode` string but it can take
- either a byte :class:`str` or a :class:`unicode` string intelligently.
- Nonstring objects are handled in different ways depending on the setting
- of the :attr:`nonstring` parameter.
-
- The default values of this function are set so as to always return a byte
- :class:`str` and never raise an error when converting from unicode to
- bytes. However, when you do not pass an encoding that can validly encode
- the object (or a non-string object), you may end up with output that you
- don't expect. Be sure you understand the requirements of your data, not
- just ignore errors by passing it through this function.
- '''
- # Could use isbasestring, isbytestring here but we want this to be as fast
- # as possible
- if isinstance(obj, binary_type):
- return obj
- if isinstance(obj, text_type):
- return obj.encode(encoding, errors)
- if not nonstring:
- nonstring = 'simplerepr'
-
- if nonstring == 'empty':
- return b''
- elif nonstring == 'passthru':
- return obj
- elif nonstring == 'simplerepr':
- try:
- simple = str(obj)
- except UnicodeError:
- try:
- simple = obj.__str__()
- except (AttributeError, UnicodeError):
- simple = None
- if not simple:
- try:
- simple = obj.__unicode__()
- except (AttributeError, UnicodeError):
- simple = b''
- if isinstance(simple, text_type):
- simple = simple.encode(encoding, 'replace')
- return simple
- elif nonstring in ('repr', 'strict'):
- try:
- obj_repr = obj.__repr__()
- except (AttributeError, UnicodeError):
- obj_repr = b''
- if isinstance(obj_repr, text_type):
- obj_repr = obj_repr.encode(encoding, errors)
- else:
- obj_repr = binary_type(obj_repr)
- if nonstring == 'repr':
- return obj_repr
- raise TypeError('to_bytes was given "%(obj)s" which is neither'
- ' a unicode string or a byte string (str)' % {'obj': obj_repr})
-
- raise TypeError('nonstring value, %(param)s, is not set to a valid'
- ' action' % {'param': nonstring})
-
-
-# force the return value of a function to be unicode. Use with partial to
-# ensure that a filter will return unicode values.
def unicode_wrap(func, *args, **kwargs):
- return to_unicode(func(*args, **kwargs), nonstring='passthru')
-
+ """If a function returns a string, force it to be a text string.
-# Alias for converting to native strings.
-# Native strings are the default string type for the particular version of
-# python. The objects are called "str" in both py2 and py3 but they mean
-# different things. In py2, it's a byte string like in C. In py3 it's an
-# abstract text type (like py2's unicode type).
-#
-# Use this when raising exceptions and wanting to get the string
-# representation of an object for the exception message. For example:
-#
-# try:
-# do_something()
-# except Exception as e:
-# raise AnsibleError(to_str(e))
-#
-# Note that this is because python's exception handling expects native strings
-# and doe the wrong thing if given the other sort of string (in py2, if given
-# unicode strings, it could traceback or omit the message. in py3, if given
-# byte strings it prints their repr (so the message ends up as b'message').
-#
-# If you use ansible's API instead of re-raising an exception, use to_unicode
-# instead:
-#
-# try:
-# do_something()
-# except Exception as e:
-# display.warn(to_unicode(e))
-if PY3:
- to_str = to_unicode
-else:
- to_str = to_bytes
+ Use with partial to ensure that filter plugins will return text values.
+ """
+ return to_text(func(*args, **kwargs), nonstring='passthru')
diff --git a/lib/ansible/utils/vars.py b/lib/ansible/utils/vars.py
index ee3dd5feaa..40ad17109f 100644
--- a/lib/ansible/utils/vars.py
+++ b/lib/ansible/utils/vars.py
@@ -28,7 +28,7 @@ from ansible.compat.six import iteritems, string_types
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.parsing.splitter import parse_kv
-from ansible.utils.unicode import to_unicode, to_str
+from ansible.module_utils._text import to_native, to_text
def _validate_mutable_mappings(a, b):
@@ -49,11 +49,12 @@ def _validate_mutable_mappings(a, b):
try:
myvars.append(dumps(x))
except:
- myvars.append(to_str(x))
+ myvars.append(to_native(x))
raise AnsibleError("failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}".format(
a.__class__.__name__, b.__class__.__name__, myvars[0], myvars[1])
)
+
def combine_vars(a, b):
"""
Return a copy of dictionaries of variables based on configured hash behavior
@@ -68,6 +69,7 @@ def combine_vars(a, b):
result.update(b)
return result
+
def merge_hash(a, b):
"""
Recursively merges hash b into a so that keys from b take precedence over keys from a
@@ -95,10 +97,11 @@ def merge_hash(a, b):
return result
+
def load_extra_vars(loader, options):
extra_vars = {}
for extra_vars_opt in options.extra_vars:
- extra_vars_opt = to_unicode(extra_vars_opt, errors='strict')
+ extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
if extra_vars_opt.startswith(u"@"):
# Argument is a YAML file (JSON is a subset of YAML)
data = loader.load_from_file(extra_vars_opt[1:])
@@ -111,6 +114,7 @@ def load_extra_vars(loader, options):
extra_vars = combine_vars(extra_vars, data)
return extra_vars
+
def load_options_vars(options):
options_vars = {}
# For now only return check mode, but we can easily return more
@@ -118,6 +122,7 @@ def load_options_vars(options):
options_vars['ansible_check_mode'] = options.check
return options_vars
+
def isidentifier(ident):
"""
Determines, if string is valid Python identifier using the ast module.
@@ -148,4 +153,3 @@ def isidentifier(ident):
return False
return True
-
diff --git a/lib/ansible/vars/unsafe_proxy.py b/lib/ansible/vars/unsafe_proxy.py
index ac5cce24af..4238d4d8e5 100644
--- a/lib/ansible/vars/unsafe_proxy.py
+++ b/lib/ansible/vars/unsafe_proxy.py
@@ -54,18 +54,21 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
-
-from ansible.utils.unicode import to_unicode
from ansible.compat.six import string_types, text_type
+from ansible.module_utils._text import to_text
+
__all__ = ['UnsafeProxy', 'AnsibleUnsafe', 'AnsibleJSONUnsafeEncoder', 'AnsibleJSONUnsafeDecoder', 'wrap_var']
+
class AnsibleUnsafe(object):
__UNSAFE__ = True
+
class AnsibleUnsafeText(text_type, AnsibleUnsafe):
pass
+
class UnsafeProxy(object):
def __new__(cls, obj, *args, **kwargs):
# In our usage we should only receive unicode strings.
@@ -73,10 +76,11 @@ class UnsafeProxy(object):
# we're given but we may want to take it out for testing and sanitize
# our input instead.
if isinstance(obj, string_types):
- obj = to_unicode(obj, errors='strict')
+ obj = to_text(obj, errors='surrogate_or_strict')
return AnsibleUnsafeText(obj)
return obj
+
class AnsibleJSONUnsafeEncoder(json.JSONEncoder):
def encode(self, obj):
if isinstance(obj, AnsibleUnsafe):
@@ -84,6 +88,7 @@ class AnsibleJSONUnsafeEncoder(json.JSONEncoder):
else:
return super(AnsibleJSONUnsafeEncoder, self).encode(obj)
+
class AnsibleJSONUnsafeDecoder(json.JSONDecoder):
def decode(self, obj):
value = super(AnsibleJSONUnsafeDecoder, self).decode(obj)
@@ -92,6 +97,7 @@ class AnsibleJSONUnsafeDecoder(json.JSONDecoder):
else:
return value
+
def _wrap_dict(v):
for k in v.keys():
if v[k] is not None:
@@ -115,4 +121,3 @@ def wrap_var(v):
if v is not None and not isinstance(v, AnsibleUnsafe):
v = UnsafeProxy(v)
return v
-
diff --git a/test/units/mock/procenv.py b/test/units/mock/procenv.py
index abf436b94d..e9d470c079 100644
--- a/test/units/mock/procenv.py
+++ b/test/units/mock/procenv.py
@@ -27,7 +27,8 @@ from contextlib import contextmanager
from io import BytesIO, StringIO
from ansible.compat.six import PY3
from ansible.compat.tests import unittest
-from ansible.utils.unicode import to_bytes
+from ansible.module_utils._text import to_bytes
+
@contextmanager
def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
@@ -48,6 +49,7 @@ def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
sys.stdin = real_stdin
sys.argv = real_argv
+
@contextmanager
def swap_stdout():
"""
@@ -62,6 +64,7 @@ def swap_stdout():
yield fake_stream
sys.stdout = old_stdout
+
class ModuleTestCase(unittest.TestCase):
def setUp(self, module_args=None):
if module_args is None:
diff --git a/test/units/parsing/vault/test_vault.py b/test/units/parsing/vault/test_vault.py
index 4fe0fff5e0..1be56413b8 100644
--- a/test/units/parsing/vault/test_vault.py
+++ b/test/units/parsing/vault/test_vault.py
@@ -30,11 +30,12 @@ from binascii import hexlify
from nose.plugins.skip import SkipTest
from ansible.compat.tests import unittest
-from ansible.utils.unicode import to_bytes, to_unicode
from ansible import errors
from ansible.parsing.vault import VaultLib
from ansible.parsing import vault
+from ansible.module_utils._text import to_bytes
+
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
diff --git a/test/units/parsing/vault/test_vault_editor.py b/test/units/parsing/vault/test_vault_editor.py
index 5ef6be4daf..3b7ccf2582 100644
--- a/test/units/parsing/vault/test_vault_editor.py
+++ b/test/units/parsing/vault/test_vault_editor.py
@@ -27,11 +27,12 @@ from nose.plugins.skip import SkipTest
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
-from ansible.utils.unicode import to_bytes, to_unicode
from ansible import errors
from ansible.parsing.vault import VaultLib
from ansible.parsing.vault import VaultEditor
+from ansible.module_utils._text import to_bytes, to_text
+
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
@@ -66,6 +67,7 @@ v11_data = """$ANSIBLE_VAULT;1.1;AES256
3631633031323837340a396530313963373030343933616133393566366137363761373930663833
3739"""
+
class TestVaultEditor(unittest.TestCase):
def setUp(self):
@@ -121,20 +123,19 @@ class TestVaultEditor(unittest.TestCase):
error_hit = False
try:
ve.decrypt_file(v10_file.name)
- except errors.AnsibleError as e:
+ except errors.AnsibleError:
error_hit = True
# verify decrypted content
f = open(v10_file.name, "rb")
- fdata = to_unicode(f.read())
+ fdata = to_text(f.read())
f.close()
os.unlink(v10_file.name)
- assert error_hit == False, "error decrypting 1.0 file"
+ assert error_hit is False, "error decrypting 1.0 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip()
-
def test_decrypt_1_1(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2:
raise SkipTest
@@ -149,20 +150,19 @@ class TestVaultEditor(unittest.TestCase):
error_hit = False
try:
ve.decrypt_file(v11_file.name)
- except errors.AnsibleError as e:
+ except errors.AnsibleError:
error_hit = True
# verify decrypted content
f = open(v11_file.name, "rb")
- fdata = to_unicode(f.read())
+ fdata = to_text(f.read())
f.close()
os.unlink(v11_file.name)
- assert error_hit == False, "error decrypting 1.0 file"
+ assert error_hit is False, "error decrypting 1.0 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip()
-
@unittest.skipIf(sys.version_info[0] >= 3, "VaultAES still needs to be ported to Python 3")
def test_rekey_migration(self):
"""
@@ -182,7 +182,7 @@ class TestVaultEditor(unittest.TestCase):
error_hit = False
try:
ve.rekey_file(v10_file.name, 'ansible2')
- except errors.AnsibleError as e:
+ except errors.AnsibleError:
error_hit = True
# verify decrypted content
@@ -190,7 +190,7 @@ class TestVaultEditor(unittest.TestCase):
fdata = f.read()
f.close()
- assert error_hit == False, "error rekeying 1.0 file to 1.1"
+ assert error_hit is False, "error rekeying 1.0 file to 1.1"
# ensure filedata can be decrypted, is 1.1 and is AES256
vl = VaultLib("ansible2")
@@ -198,13 +198,11 @@ class TestVaultEditor(unittest.TestCase):
error_hit = False
try:
dec_data = vl.decrypt(fdata)
- except errors.AnsibleError as e:
+ except errors.AnsibleError:
error_hit = True
os.unlink(v10_file.name)
assert vl.cipher_name == "AES256", "wrong cipher name set after rekey: %s" % vl.cipher_name
- assert error_hit == False, "error decrypting migrated 1.0 file"
+ assert error_hit is False, "error decrypting migrated 1.0 file"
assert dec_data.strip() == "foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
-
-
diff --git a/test/units/parsing/yaml/test_loader.py b/test/units/parsing/yaml/test_loader.py
index 1e224f6704..2b166f2bd4 100644
--- a/test/units/parsing/yaml/test_loader.py
+++ b/test/units/parsing/yaml/test_loader.py
@@ -32,7 +32,6 @@ from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing import vault
from ansible.parsing.yaml.objects import AnsibleVaultEncryptedUnicode
from ansible.parsing.yaml.dumper import AnsibleDumper
-from ansible.utils.unicode import to_bytes
from units.mock.yaml_helper import YamlTestUtils
@@ -41,6 +40,7 @@ try:
except ImportError:
from yaml.parser import ParserError
+
class NameStringIO(StringIO):
"""In py2.6, StringIO doesn't let you set name because a baseclass has it
as readonly property"""
@@ -49,6 +49,7 @@ class NameStringIO(StringIO):
def __init__(self, *args, **kwargs):
super(NameStringIO, self).__init__(*args, **kwargs)
+
class TestAnsibleLoaderBasic(unittest.TestCase):
def setUp(self):
@@ -283,6 +284,7 @@ class TestAnsibleLoaderVault(unittest.TestCase, YamlTestUtils):
self.assertFalse(plaintext_var != vault_string)
self.assertFalse(vault_string != plaintext_var)
+
class TestAnsibleLoaderPlay(unittest.TestCase):
def setUp(self):
diff --git a/test/units/plugins/action/test_action.py b/test/units/plugins/action/test_action.py
index 9a4cb74f3e..d9b2d33864 100644
--- a/test/units/plugins/action/test_action.py
+++ b/test/units/plugins/action/test_action.py
@@ -21,8 +21,6 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-import ast
-import json
import pipes
import os
@@ -33,7 +31,6 @@ except ImportError:
from nose.tools import eq_, raises
-from ansible.release import __version__ as ansible_version
from ansible import constants as C
from ansible.compat.six import text_type
from ansible.compat.tests import unittest
@@ -41,12 +38,12 @@ from ansible.compat.tests.mock import patch, MagicMock, mock_open
from ansible.errors import AnsibleError
from ansible.playbook.play_context import PlayContext
-from ansible.plugins import PluginLoader
from ansible.plugins.action import ActionBase
from ansible.template import Templar
-from ansible.utils.unicode import to_bytes
from units.mock.loader import DictDataLoader
+from ansible.module_utils._text import to_bytes
+
python_module_replacers = b"""
#!/usr/bin/python
@@ -67,11 +64,13 @@ WINDOWS_ARGS = "<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>"
class DerivedActionBase(ActionBase):
TRANSFERS_FILES = False
+
def run(self, tmp=None, task_vars=None):
# We're not testing the plugin run() method, just the helper
# methods ActionBase defines
return super(DerivedActionBase, self).run(tmp=tmp, task_vars=task_vars)
+
class TestActionBase(unittest.TestCase):
def test_action_base_run(self):
@@ -144,7 +143,7 @@ class TestActionBase(unittest.TestCase):
self.assertRaises(AnsibleError, action_base._configure_module, 'badmodule', mock_task.args)
# test powershell module formatting
- with patch.object(builtins, 'open', mock_open(read_data=to_bytes(powershell_module_replacers.strip(), encoding='utf-8'))) as m:
+ with patch.object(builtins, 'open', mock_open(read_data=to_bytes(powershell_module_replacers.strip(), encoding='utf-8'))):
mock_task.action = 'win_copy'
mock_task.args = dict(b=2)
mock_connection.module_implementation_preferences = ('.ps1',)
@@ -497,7 +496,10 @@ class TestActionBase(unittest.TestCase):
action_base._connection.has_pipelining = True
action_base._low_level_execute_command.return_value = dict(stdout='{"rc": 0, "stdout": "ok"}')
self.assertEqual(action_base._execute_module(module_name=None, module_args=None), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
- self.assertEqual(action_base._execute_module(module_name='foo', module_args=dict(z=9, y=8, x=7), task_vars=dict(a=1)), dict(_ansible_parsed=True, rc=0, stdout="ok", stdout_lines=['ok']))
+ self.assertEqual(action_base._execute_module(module_name='foo',
+ module_args=dict(z=9, y=8, x=7), task_vars=dict(a=1)),
+ dict(_ansible_parsed=True, rc=0, stdout="ok",
+ stdout_lines=['ok']))
# test with needing/removing a remote tmp path
action_base._configure_module.return_value = ('old', '#!/usr/bin/python', 'this is the module data', 'path')
@@ -555,6 +557,7 @@ class TestActionBase(unittest.TestCase):
finally:
C.BECOME_ALLOW_SAME_USER = become_allow_same_user
+
# Note: Using nose's generator test cases here so we can't inherit from
# unittest.TestCase
class TestFilterNonJsonLines(object):
@@ -592,4 +595,3 @@ class TestFilterNonJsonLines(object):
def test_unparsable_filter_non_json_lines(self):
for stdout_line in self.unparsable_cases:
yield self.check_unparsable_filter_non_json_lines, stdout_line
-
diff --git a/test/units/plugins/connections/test_connection_ssh.py b/test/units/plugins/connections/test_connection_ssh.py
index 18fba6d0f9..85c363450b 100644
--- a/test/units/plugins/connections/test_connection_ssh.py
+++ b/test/units/plugins/connections/test_connection_ssh.py
@@ -22,17 +22,17 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pipes
-import sys
from io import StringIO
from ansible.compat.tests import unittest
-from ansible.compat.tests.mock import patch, MagicMock, mock_open
+from ansible.compat.tests.mock import patch, MagicMock
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
-from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.module_utils._text import to_bytes
+
class TestConnectionBaseClass(unittest.TestCase):
@@ -277,20 +277,22 @@ class TestConnectionBaseClass(unittest.TestCase):
C.ANSIBLE_SSH_RETRIES = 9
# test a regular, successful execution
- conn._exec_command.return_value = (0, 'stdout', '')
+ conn._exec_command.return_value = (0, b'stdout', b'')
res = conn.exec_command('ssh', 'some data')
+ self.assertEquals(res, (0, b'stdout', b''), msg='exec_command did not return what the _exec_command helper returned')
# test a retry, followed by success
conn._exec_command.return_value = None
- conn._exec_command.side_effect = [(255, '', ''), (0, 'stdout', '')]
+ conn._exec_command.side_effect = [(255, '', ''), (0, b'stdout', b'')]
res = conn.exec_command('ssh', 'some data')
+ self.assertEquals(res, (0, b'stdout', b''), msg='exec_command did not return what the _exec_command helper returned')
# test multiple failures
- conn._exec_command.side_effect = [(255, '', '')]*10
+ conn._exec_command.side_effect = [(255, b'', b'')] * 10
self.assertRaises(AnsibleConnectionFailure, conn.exec_command, 'ssh', 'some data')
# test other failure from exec_command
- conn._exec_command.side_effect = [Exception('bad')]*10
+ conn._exec_command.side_effect = [Exception('bad')] * 10
self.assertRaises(Exception, conn.exec_command, 'ssh', 'some data')
@patch('os.path.exists')
@@ -308,20 +310,22 @@ class TestConnectionBaseClass(unittest.TestCase):
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
- res = conn.put_file('/path/to/in/file', '/path/to/dest/file')
+ conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None)
- res = conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
+ conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'put', to_bytes(pipes.quote('/path/to/in/file')), to_bytes(pipes.quote('/path/to/dest/file')))) + b'\n'
- res = conn.put_file('/path/to/in/file', '/path/to/dest/file')
+ conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data)
- expected_in_data = b' '.join((b'put', to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
- res = conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
+ expected_in_data = b' '.join((b'put',
+ to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')),
+ to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
+ conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data)
# test that a non-zero rc raises an error
@@ -346,23 +350,24 @@ class TestConnectionBaseClass(unittest.TestCase):
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
- res = conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
+ conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', None)
- res = conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
+ conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', None)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'get', to_bytes(pipes.quote('/path/to/in/file')), to_bytes(pipes.quote('/path/to/dest/file')))) + b'\n'
- res = conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
+ conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._run.assert_called_with('some command to run', expected_in_data)
- expected_in_data = b' '.join((b'get', to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')), to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
- res = conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
+ expected_in_data = b' '.join((b'get',
+ to_bytes(pipes.quote('/path/to/in/file/with/unicode-fö〩')),
+ to_bytes(pipes.quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
+ conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._run.assert_called_with('some command to run', expected_in_data)
# test that a non-zero rc raises an error
conn._run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
-