summaryrefslogtreecommitdiff
path: root/lib/ansible/parsing
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible/parsing')
-rw-r--r--lib/ansible/parsing/__init__.py222
-rw-r--r--lib/ansible/parsing/mod_args.py278
-rw-r--r--lib/ansible/parsing/splitter.py273
-rw-r--r--lib/ansible/parsing/utils/__init__.py21
-rw-r--r--lib/ansible/parsing/utils/jsonify.py45
-rw-r--r--lib/ansible/parsing/vault/__init__.py603
-rw-r--r--lib/ansible/parsing/yaml/__init__.py21
-rw-r--r--lib/ansible/parsing/yaml/constructor.py91
-rw-r--r--lib/ansible/parsing/yaml/loader.py51
-rw-r--r--lib/ansible/parsing/yaml/objects.py65
10 files changed, 1670 insertions, 0 deletions
diff --git a/lib/ansible/parsing/__init__.py b/lib/ansible/parsing/__init__.py
new file mode 100644
index 0000000000..9551343fbf
--- /dev/null
+++ b/lib/ansible/parsing/__init__.py
@@ -0,0 +1,222 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import json
+import os
+
+from yaml import load, YAMLError
+
+from ansible.errors import AnsibleParserError
+from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
+from ansible.parsing.vault import VaultLib
+from ansible.parsing.splitter import unquote
+from ansible.parsing.yaml.loader import AnsibleLoader
+from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
+from ansible.utils.path import unfrackpath
+
+class DataLoader():
+
+ '''
+ The DataLoader class is used to load and parse YAML or JSON content,
+ either from a given file name or from a string that was previously
+ read in through other means. A Vault password can be specified, and
+ any vault-encrypted files will be decrypted.
+
+ Data read from files will also be cached, so the file will never be
+ read from disk more than once.
+
+ Usage:
+
+ dl = DataLoader()
+ (or)
+ dl = DataLoader(vault_password='foo')
+
+ ds = dl.load('...')
+ ds = dl.load_from_file('/path/to/file')
+ '''
+
+ def __init__(self, vault_password=None):
+ self._basedir = '.'
+ self._vault_password = vault_password
+ self._FILE_CACHE = dict()
+
+ self._vault = VaultLib(password=vault_password)
+
+ def load(self, data, file_name='<string>', show_content=True):
+ '''
+ Creates a python datastructure from the given data, which can be either
+ a JSON or YAML string.
+ '''
+
+ try:
+ # we first try to load this data as JSON
+ return json.loads(data)
+ except:
+ # if loading JSON failed for any reason, we go ahead
+ # and try to parse it as YAML instead
+
+ if isinstance(data, AnsibleUnicode):
+ # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
+ # they are unable to cope with our subclass.
+ # Unwrap and re-wrap the unicode so we can keep track of line
+ # numbers
+ new_data = unicode(data)
+ else:
+ new_data = data
+ try:
+ new_data = self._safe_load(new_data, file_name=file_name)
+ except YAMLError as yaml_exc:
+ self._handle_error(yaml_exc, file_name, show_content)
+
+ if isinstance(data, AnsibleUnicode):
+ new_data = AnsibleUnicode(new_data)
+ new_data.ansible_pos = data.ansible_pos
+ return new_data
+
+ def load_from_file(self, file_name):
+ ''' Loads data from a file, which can contain either JSON or YAML. '''
+
+ file_name = self.path_dwim(file_name)
+
+ # if the file has already been read in and cached, we'll
+ # return those results to avoid more file/vault operations
+ if file_name in self._FILE_CACHE:
+ return self._FILE_CACHE[file_name]
+
+ # read the file contents and load the data structure from them
+ (file_data, show_content) = self._get_file_contents(file_name)
+ parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
+
+ # cache the file contents for next time
+ self._FILE_CACHE[file_name] = parsed_data
+
+ return parsed_data
+
+ def path_exists(self, path):
+ return os.path.exists(path)
+
+ def is_file(self, path):
+ return os.path.isfile(path)
+
+ def is_directory(self, path):
+ return os.path.isdir(path)
+
+ def list_directory(self, path):
+ return os.listdir(path)
+
+ def _safe_load(self, stream, file_name=None):
+ ''' Implements yaml.safe_load(), except using our custom loader class. '''
+
+ loader = AnsibleLoader(stream, file_name)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+ def _get_file_contents(self, file_name):
+ '''
+ Reads the file contents from the given file name, and will decrypt them
+ if they are found to be vault-encrypted.
+ '''
+
+ if not self.path_exists(file_name) or not self.is_file(file_name):
+ raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)
+
+ show_content = True
+ try:
+ with open(file_name, 'r') as f:
+ data = f.read()
+ if self._vault.is_encrypted(data):
+ data = self._vault.decrypt(data)
+ show_content = False
+ return (data, show_content)
+ except (IOError, OSError) as e:
+ raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))
+
+ def _handle_error(self, yaml_exc, file_name, show_content):
+ '''
+ Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
+ file name/position where a YAML exception occurred, and raises an AnsibleParserError
+ to display the syntax exception information.
+ '''
+
+ # if the YAML exception contains a problem mark, use it to construct
+ # an object the error class can use to display the faulty line
+ err_obj = None
+ if hasattr(yaml_exc, 'problem_mark'):
+ err_obj = AnsibleBaseYAMLObject()
+ err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)
+
+ raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)
+
+ def get_basedir(self):
+ ''' returns the current basedir '''
+ return self._basedir
+
+ def set_basedir(self, basedir):
+ ''' sets the base directory, used to find files when a relative path is given '''
+
+ if basedir is not None:
+ self._basedir = basedir
+
+ def path_dwim(self, given):
+ '''
+ make relative paths work like folks expect.
+ '''
+
+ given = unquote(given)
+
+ if given.startswith("/"):
+ return os.path.abspath(given)
+ elif given.startswith("~"):
+ return os.path.abspath(os.path.expanduser(given))
+ else:
+ return os.path.abspath(os.path.join(self._basedir, given))
+
+ def path_dwim_relative(self, role_path, dirname, source):
+ ''' find one file in a directory one level up in a dir named dirname relative to current '''
+
+ basedir = os.path.dirname(role_path)
+ if os.path.islink(basedir):
+ basedir = unfrackpath(basedir)
+ template2 = os.path.join(basedir, dirname, source)
+ else:
+ template2 = os.path.join(basedir, '..', dirname, source)
+
+ source1 = os.path.join(role_path, dirname, source)
+ if os.path.exists(source1):
+ return source1
+
+ cur_basedir = self._basedir
+ self.set_basedir(basedir)
+ source2 = self.path_dwim(template2)
+ if os.path.exists(source2):
+ self.set_basedir(cur_basedir)
+ return source2
+
+ obvious_local_path = self.path_dwim(source)
+ if os.path.exists(obvious_local_path):
+ self.set_basedir(cur_basedir)
+ return obvious_local_path
+
+ self.set_basedir(cur_basedir)
+ return source2 # which does not exist
+
diff --git a/lib/ansible/parsing/mod_args.py b/lib/ansible/parsing/mod_args.py
new file mode 100644
index 0000000000..ed527f1b08
--- /dev/null
+++ b/lib/ansible/parsing/mod_args.py
@@ -0,0 +1,278 @@
+# (c) 2014 Michael DeHaan, <michael@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from six import iteritems, string_types
+
+from ansible.errors import AnsibleParserError
+from ansible.plugins import module_loader
+from ansible.parsing.splitter import parse_kv
+
+class ModuleArgsParser:
+
+ """
+ There are several ways a module and argument set can be expressed:
+
+ # legacy form (for a shell command)
+ - action: shell echo hi
+
+ # common shorthand for local actions vs delegate_to
+ - local_action: shell echo hi
+
+ # most commonly:
+ - copy: src=a dest=b
+
+ # legacy form
+ - action: copy src=a dest=b
+
+ # complex args form, for passing structured data
+ - copy:
+ src: a
+ dest: b
+
+ # gross, but technically legal
+ - action:
+ module: copy
+ args:
+ src: a
+ dest: b
+
+ # extra gross, but also legal. in this case, the args specified
+ # will act as 'defaults' and will be overridden by any args specified
+ # in one of the other formats (complex args under the action, or
+ # parsed from the k=v string
+ - command: 'pwd'
+ args:
+ chdir: '/tmp'
+
+
+ This class has some of the logic to canonicalize these into the form
+
+ - module: <module_name>
+ delegate_to: <optional>
+ args: <args>
+
+ Args may also be munged for certain shell command parameters.
+ """
+
+ def __init__(self, task_ds=dict()):
+ assert isinstance(task_ds, dict)
+ self._task_ds = task_ds
+
+
+ def _split_module_string(self, str):
+ '''
+ when module names are expressed like:
+ action: copy src=a dest=b
+ the first part of the string is the name of the module
+ and the rest are strings pertaining to the arguments.
+ '''
+
+ tokens = str.split()
+ if len(tokens) > 1:
+ return (tokens[0], " ".join(tokens[1:]))
+ else:
+ return (tokens[0], "")
+
+
+ def _handle_shell_weirdness(self, action, args):
+ '''
+ given an action name and an args dictionary, return the
+ proper action name and args dictionary. This mostly is due
+ to shell/command being treated special and nothing else
+ '''
+
+ # don't handle non shell/command modules in this function
+ # TODO: in terms of the whole app, should 'raw' also fit here?
+ if action not in ['shell', 'command']:
+ return (action, args)
+
+ # the shell module really is the command module with an additional
+ # parameter
+ if action == 'shell':
+ action = 'command'
+ args['_uses_shell'] = True
+
+ return (action, args)
+
+ def _normalize_parameters(self, thing, action=None, additional_args=dict()):
+ '''
+ arguments can be fuzzy. Deal with all the forms.
+ '''
+
+ # final args are the ones we'll eventually return, so first update
+ # them with any additional args specified, which have lower priority
+ # than those which may be parsed/normalized next
+ final_args = dict()
+ if additional_args:
+ final_args.update(additional_args)
+
+ # how we normalize depends if we figured out what the module name is
+ # yet. If we have already figured it out, it's an 'old style' invocation.
+ # otherwise, it's not
+
+ if action is not None:
+ args = self._normalize_old_style_args(thing, action)
+ else:
+ (action, args) = self._normalize_new_style_args(thing)
+
+ # this can occasionally happen, simplify
+ if args and 'args' in args:
+ tmp_args = args['args']
+ del args['args']
+ if isinstance(tmp_args, string_types):
+ tmp_args = parse_kv(tmp_args)
+ args.update(tmp_args)
+
+ # finally, update the args we're going to return with the ones
+ # which were normalized above
+ if args:
+ final_args.update(args)
+
+ return (action, final_args)
+
+ def _normalize_old_style_args(self, thing, action):
+ '''
+ deals with fuzziness in old-style (action/local_action) module invocations
+ returns tuple of (module_name, dictionary_args)
+
+ possible example inputs:
+ { 'local_action' : 'shell echo hi' }
+ { 'action' : 'shell echo hi' }
+ { 'local_action' : { 'module' : 'ec2', 'x' : 1, 'y': 2 }}
+ standardized outputs like:
+ ( 'command', { _raw_params: 'echo hi', _uses_shell: True }
+ '''
+
+ if isinstance(thing, dict):
+ # form is like: local_action: { module: 'xyz', x: 2, y: 3 } ... uncommon!
+ args = thing
+ elif isinstance(thing, string_types):
+ # form is like: local_action: copy src=a dest=b ... pretty common
+ check_raw = action in ('command', 'shell', 'script')
+ args = parse_kv(thing, check_raw=check_raw)
+ elif thing is None:
+ # this can happen with modules which take no params, like ping:
+ args = None
+ else:
+ raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
+ return args
+
+ def _normalize_new_style_args(self, thing):
+ '''
+ deals with fuzziness in new style module invocations
+ accepting key=value pairs and dictionaries, and always returning dictionaries
+ returns tuple of (module_name, dictionary_args)
+
+ possible example inputs:
+ { 'shell' : 'echo hi' }
+ { 'ec2' : { 'region' : 'xyz' }
+ { 'ec2' : 'region=xyz' }
+ standardized outputs like:
+ ('ec2', { region: 'xyz'} )
+ '''
+
+ action = None
+ args = None
+
+ if isinstance(thing, dict):
+ # form is like: copy: { src: 'a', dest: 'b' } ... common for structured (aka "complex") args
+ thing = thing.copy()
+ if 'module' in thing:
+ action = thing['module']
+ args = thing.copy()
+ del args['module']
+
+ elif isinstance(thing, string_types):
+ # form is like: copy: src=a dest=b ... common shorthand throughout ansible
+ (action, args) = self._split_module_string(thing)
+ check_raw = action in ('command', 'shell', 'script')
+ args = parse_kv(args, check_raw=check_raw)
+
+ else:
+ # need a dict or a string, so giving up
+ raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
+
+ return (action, args)
+
+ def parse(self):
+ '''
+ Given a task in one of the supported forms, parses and returns
+ returns the action, arguments, and delegate_to values for the
+ task, dealing with all sorts of levels of fuzziness.
+ '''
+
+ thing = None
+
+ action = None
+ delegate_to = self._task_ds.get('delegate_to', None)
+ args = dict()
+
+
+ #
+ # We can have one of action, local_action, or module specified
+ #
+
+
+ # this is the 'extra gross' scenario detailed above, so we grab
+ # the args and pass them in as additional arguments, which can/will
+ # be overwritten via dict updates from the other arg sources below
+ # FIXME: add test cases for this
+ additional_args = self._task_ds.get('args', dict())
+
+ # action
+ if 'action' in self._task_ds:
+ # an old school 'action' statement
+ thing = self._task_ds['action']
+ action, args = self._normalize_parameters(thing, additional_args=additional_args)
+
+ # local_action
+ if 'local_action' in self._task_ds:
+ # local_action is similar but also implies a delegate_to
+ if action is not None:
+ raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds)
+ thing = self._task_ds.get('local_action', '')
+ delegate_to = 'localhost'
+ action, args = self._normalize_parameters(thing, additional_args=additional_args)
+
+ # module: <stuff> is the more new-style invocation
+
+ # walk the input dictionary to see we recognize a module name
+ for (item, value) in iteritems(self._task_ds):
+ if item in module_loader or item == 'meta' or item == 'include':
+ # finding more than one module name is a problem
+ if action is not None:
+ raise AnsibleParserError("conflicting action statements", obj=self._task_ds)
+ action = item
+ thing = value
+ action, args = self._normalize_parameters(value, action=action, additional_args=additional_args)
+
+ # if we didn't see any module in the task at all, it's not a task really
+ if action is None:
+ raise AnsibleParserError("no action detected in task", obj=self._task_ds)
+ # FIXME: disabled for now, as there are other places besides the shell/script modules where
+ # having variables as the sole param for the module is valid (include_vars, add_host, and group_by?)
+ #elif args.get('_raw_params', '') != '' and action not in ('command', 'shell', 'script', 'include_vars'):
+ # raise AnsibleParserError("this task has extra params, which is only allowed in the command, shell or script module.", obj=self._task_ds)
+
+ # shell modules require special handling
+ (action, args) = self._handle_shell_weirdness(action, args)
+
+ return (action, args, delegate_to)
diff --git a/lib/ansible/parsing/splitter.py b/lib/ansible/parsing/splitter.py
new file mode 100644
index 0000000000..a1dc051d24
--- /dev/null
+++ b/lib/ansible/parsing/splitter.py
@@ -0,0 +1,273 @@
+# (c) 2014 James Cammarata, <jcammarata@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import re
+import codecs
+
+# Decode escapes adapted from rspeer's answer here:
+# http://stackoverflow.com/questions/4020539/process-escape-sequences-in-a-string-in-python
+_HEXCHAR = '[a-fA-F0-9]'
+_ESCAPE_SEQUENCE_RE = re.compile(r'''
+ ( \\U{0} # 8-digit hex escapes
+ | \\u{1} # 4-digit hex escapes
+ | \\x{2} # 2-digit hex escapes
+ | \\[0-7]{{1,3}} # Octal escapes
+ | \\N\{{[^}}]+\}} # Unicode characters by name
+ | \\[\\'"abfnrtv] # Single-character escapes
+ )'''.format(_HEXCHAR*8, _HEXCHAR*4, _HEXCHAR*2), re.UNICODE | re.VERBOSE)
+
+def _decode_escapes(s):
+ def decode_match(match):
+ return codecs.decode(match.group(0), 'unicode-escape')
+
+ return _ESCAPE_SEQUENCE_RE.sub(decode_match, s)
+
+def parse_kv(args, check_raw=False):
+ '''
+ Convert a string of key/value items to a dict. If any free-form params
+ are found and the check_raw option is set to True, they will be added
+ to a new parameter called '_raw_params'. If check_raw is not enabled,
+ they will simply be ignored.
+ '''
+
+ ### FIXME: args should already be a unicode string
+ from ansible.utils.unicode import to_unicode
+ args = to_unicode(args, nonstring='passthru')
+
+ options = {}
+ if args is not None:
+ try:
+ vargs = split_args(args)
+ except ValueError as ve:
+ if 'no closing quotation' in str(ve).lower():
+ raise errors.AnsibleError("error parsing argument string, try quoting the entire line.")
+ else:
+ raise
+
+ raw_params = []
+ for x in vargs:
+ x = _decode_escapes(x)
+ if "=" in x:
+ pos = 0
+ try:
+ while True:
+ pos = x.index('=', pos + 1)
+ if pos > 0 and x[pos - 1] != '\\':
+ break
+ except ValueError:
+ # ran out of string, but we must have some escaped equals,
+ # so replace those and append this to the list of raw params
+ raw_params.append(x.replace('\\=', '='))
+ continue
+
+ k = x[:pos]
+ v = x[pos + 1:]
+
+ # only internal variables can start with an underscore, so
+ # we don't allow users to set them directy in arguments
+ if k.startswith('_'):
+ raise AnsibleError("invalid parameter specified: '%s'" % k)
+
+ # FIXME: make the retrieval of this list of shell/command
+ # options a function, so the list is centralized
+ if check_raw and k not in ('creates', 'removes', 'chdir', 'executable', 'warn'):
+ raw_params.append(x)
+ else:
+ options[k.strip()] = unquote(v.strip())
+ else:
+ raw_params.append(x)
+
+ # recombine the free-form params, if any were found, and assign
+ # them to a special option for use later by the shell/command module
+ if len(raw_params) > 0:
+ options[u'_raw_params'] = ' '.join(raw_params)
+
+ return options
+
+def _get_quote_state(token, quote_char):
+ '''
+ the goal of this block is to determine if the quoted string
+ is unterminated in which case it needs to be put back together
+ '''
+ # the char before the current one, used to see if
+ # the current character is escaped
+ prev_char = None
+ for idx, cur_char in enumerate(token):
+ if idx > 0:
+ prev_char = token[idx-1]
+ if cur_char in '"\'' and prev_char != '\\':
+ if quote_char:
+ if cur_char == quote_char:
+ quote_char = None
+ else:
+ quote_char = cur_char
+ return quote_char
+
+def _count_jinja2_blocks(token, cur_depth, open_token, close_token):
+ '''
+ this function counts the number of opening/closing blocks for a
+ given opening/closing type and adjusts the current depth for that
+ block based on the difference
+ '''
+ num_open = token.count(open_token)
+ num_close = token.count(close_token)
+ if num_open != num_close:
+ cur_depth += (num_open - num_close)
+ if cur_depth < 0:
+ cur_depth = 0
+ return cur_depth
+
+def split_args(args):
+ '''
+ Splits args on whitespace, but intelligently reassembles
+ those that may have been split over a jinja2 block or quotes.
+
+ When used in a remote module, we won't ever have to be concerned about
+ jinja2 blocks, however this function is/will be used in the
+ core portions as well before the args are templated.
+
+ example input: a=b c="foo bar"
+ example output: ['a=b', 'c="foo bar"']
+
+ Basically this is a variation shlex that has some more intelligence for
+ how Ansible needs to use it.
+ '''
+
+ # the list of params parsed out of the arg string
+ # this is going to be the result value when we are done
+ params = []
+
+ # Initial split on white space
+ args = args.strip()
+ items = args.strip().split('\n')
+
+ # iterate over the tokens, and reassemble any that may have been
+ # split on a space inside a jinja2 block.
+ # ex if tokens are "{{", "foo", "}}" these go together
+
+ # These variables are used
+ # to keep track of the state of the parsing, since blocks and quotes
+ # may be nested within each other.
+
+ quote_char = None
+ inside_quotes = False
+ print_depth = 0 # used to count nested jinja2 {{ }} blocks
+ block_depth = 0 # used to count nested jinja2 {% %} blocks
+ comment_depth = 0 # used to count nested jinja2 {# #} blocks
+
+ # now we loop over each split chunk, coalescing tokens if the white space
+ # split occurred within quotes or a jinja2 block of some kind
+ for itemidx,item in enumerate(items):
+
+ # we split on spaces and newlines separately, so that we
+ # can tell which character we split on for reassembly
+ # inside quotation characters
+ tokens = item.strip().split(' ')
+
+ line_continuation = False
+ for idx,token in enumerate(tokens):
+
+ # if we hit a line continuation character, but
+ # we're not inside quotes, ignore it and continue
+ # on to the next token while setting a flag
+ if token == '\\' and not inside_quotes:
+ line_continuation = True
+ continue
+
+ # store the previous quoting state for checking later
+ was_inside_quotes = inside_quotes
+ quote_char = _get_quote_state(token, quote_char)
+ inside_quotes = quote_char is not None
+
+ # multiple conditions may append a token to the list of params,
+ # so we keep track with this flag to make sure it only happens once
+ # append means add to the end of the list, don't append means concatenate
+ # it to the end of the last token
+ appended = False
+
+ # if we're inside quotes now, but weren't before, append the token
+ # to the end of the list, since we'll tack on more to it later
+ # otherwise, if we're inside any jinja2 block, inside quotes, or we were
+ # inside quotes (but aren't now) concat this token to the last param
+ if inside_quotes and not was_inside_quotes:
+ params.append(token)
+ appended = True
+ elif print_depth or block_depth or comment_depth or inside_quotes or was_inside_quotes:
+ if idx == 0 and was_inside_quotes:
+ params[-1] = "%s%s" % (params[-1], token)
+ elif len(tokens) > 1:
+ spacer = ''
+ if idx > 0:
+ spacer = ' '
+ params[-1] = "%s%s%s" % (params[-1], spacer, token)
+ else:
+ params[-1] = "%s\n%s" % (params[-1], token)
+ appended = True
+
+ # if the number of paired block tags is not the same, the depth has changed, so we calculate that here
+ # and may append the current token to the params (if we haven't previously done so)
+ prev_print_depth = print_depth
+ print_depth = _count_jinja2_blocks(token, print_depth, "{{", "}}")
+ if print_depth != prev_print_depth and not appended:
+ params.append(token)
+ appended = True
+
+ prev_block_depth = block_depth
+ block_depth = _count_jinja2_blocks(token, block_depth, "{%", "%}")
+ if block_depth != prev_block_depth and not appended:
+ params.append(token)
+ appended = True
+
+ prev_comment_depth = comment_depth
+ comment_depth = _count_jinja2_blocks(token, comment_depth, "{#", "#}")
+ if comment_depth != prev_comment_depth and not appended:
+ params.append(token)
+ appended = True
+
+ # finally, if we're at zero depth for all blocks and not inside quotes, and have not
+ # yet appended anything to the list of params, we do so now
+ if not (print_depth or block_depth or comment_depth) and not inside_quotes and not appended and token != '':
+ params.append(token)
+
+ # if this was the last token in the list, and we have more than
+ # one item (meaning we split on newlines), add a newline back here
+ # to preserve the original structure
+ if len(items) > 1 and itemidx != len(items) - 1 and not line_continuation:
+ params[-1] += '\n'
+
+ # always clear the line continuation flag
+ line_continuation = False
+
+ # If we're done and things are not at zero depth or we're still inside quotes,
+ # raise an error to indicate that the args were unbalanced
+ if print_depth or block_depth or comment_depth or inside_quotes:
+ raise Exception("error while splitting arguments, either an unbalanced jinja2 block or quotes")
+
+ return params
+
+def is_quoted(data):
+ return len(data) > 0 and (data[0] == '"' and data[-1] == '"' or data[0] == "'" and data[-1] == "'")
+
+def unquote(data):
+ ''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
+ if is_quoted(data):
+ return data[1:-1]
+ return data
diff --git a/lib/ansible/parsing/utils/__init__.py b/lib/ansible/parsing/utils/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/lib/ansible/parsing/utils/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/lib/ansible/parsing/utils/jsonify.py b/lib/ansible/parsing/utils/jsonify.py
new file mode 100644
index 0000000000..59dbf9f8c4
--- /dev/null
+++ b/lib/ansible/parsing/utils/jsonify.py
@@ -0,0 +1,45 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+def jsonify(result, format=False):
+ ''' format JSON output (uncompressed or uncompressed) '''
+
+ if result is None:
+ return "{}"
+ result2 = result.copy()
+ for key, value in result2.items():
+ if type(value) is str:
+ result2[key] = value.decode('utf-8', 'ignore')
+
+ indent = None
+ if format:
+ indent = 4
+
+ try:
+ return json.dumps(result2, sort_keys=True, indent=indent, ensure_ascii=False)
+ except UnicodeDecodeError:
+ return json.dumps(result2, sort_keys=True, indent=indent)
+
diff --git a/lib/ansible/parsing/vault/__init__.py b/lib/ansible/parsing/vault/__init__.py
new file mode 100644
index 0000000000..e45fddc197
--- /dev/null
+++ b/lib/ansible/parsing/vault/__init__.py
@@ -0,0 +1,603 @@
+# (c) 2014, James Tanner <tanner.jc@gmail.com>
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+#
+# ansible-pull is a script that runs ansible in local mode
+# after checking out a playbooks directory from source repo. There is an
+# example playbook to bootstrap this script in the examples/ dir which
+# installs ansible and sets it up to run on cron.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+import os
+import shlex
+import shutil
+import tempfile
+from io import BytesIO
+from subprocess import call
+from ansible import errors
+from hashlib import sha256
+# Note: Only used for loading obsolete VaultAES files. All files are written
+# using the newer VaultAES256 which does not require md5
+from hashlib import md5
+from binascii import hexlify
+from binascii import unhexlify
+from six import binary_type, byte2int, PY2, text_type
+from ansible import constants as C
+from ansible.utils.unicode import to_unicode, to_bytes
+
+
+try:
+ from Crypto.Hash import SHA256, HMAC
+ HAS_HASH = True
+except ImportError:
+ HAS_HASH = False
+
+# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
+try:
+ from Crypto.Util import Counter
+ HAS_COUNTER = True
+except ImportError:
+ HAS_COUNTER = False
+
+# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
+try:
+ from Crypto.Protocol.KDF import PBKDF2
+ HAS_PBKDF2 = True
+except ImportError:
+ HAS_PBKDF2 = False
+
+# AES IMPORTS
+try:
+ from Crypto.Cipher import AES as AES
+ HAS_AES = True
+except ImportError:
+ HAS_AES = False
+
+CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
+
+HEADER=u'$ANSIBLE_VAULT'
+CIPHER_WHITELIST=['AES', 'AES256']
+
+
+class VaultLib(object):
+
+ def __init__(self, password):
+ self.password = password
+ self.cipher_name = None
+ self.version = '1.1'
+
+ def is_encrypted(self, data):
+ data = to_unicode(data)
+ if data.startswith(HEADER):
+ return True
+ else:
+ return False
+
+ def encrypt(self, data):
+ data = to_unicode(data)
+
+ if self.is_encrypted(data):
+ raise errors.AnsibleError("data is already encrypted")
+
+ if not self.cipher_name:
+ self.cipher_name = "AES256"
+ # raise errors.AnsibleError("the cipher must be set before encrypting data")
+
+ if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
+ cipher = globals()['Vault' + self.cipher_name]
+ this_cipher = cipher()
+ else:
+ raise errors.AnsibleError("{} cipher could not be found".format(self.cipher_name))
+
+ """
+ # combine sha + data
+ this_sha = sha256(data).hexdigest()
+ tmp_data = this_sha + "\n" + data
+ """
+
+ # encrypt sha + data
+ enc_data = this_cipher.encrypt(data, self.password)
+
+ # add header
+ tmp_data = self._add_header(enc_data)
+ return tmp_data
+
+ def decrypt(self, data):
+ data = to_bytes(data)
+
+ if self.password is None:
+ raise errors.AnsibleError("A vault password must be specified to decrypt data")
+
+ if not self.is_encrypted(data):
+ raise errors.AnsibleError("data is not encrypted")
+
+ # clean out header
+ data = self._split_header(data)
+
+ # create the cipher object
+ ciphername = to_unicode(self.cipher_name)
+ if 'Vault' + ciphername in globals() and ciphername in CIPHER_WHITELIST:
+ cipher = globals()['Vault' + ciphername]
+ this_cipher = cipher()
+ else:
+ raise errors.AnsibleError("{} cipher could not be found".format(ciphername))
+
+ # try to unencrypt data
+ data = this_cipher.decrypt(data, self.password)
+ if data is None:
+ raise errors.AnsibleError("Decryption failed")
+
+ return data
+
+ def _add_header(self, data):
+ # combine header and encrypted data in 80 char columns
+
+ #tmpdata = hexlify(data)
+ tmpdata = [to_bytes(data[i:i+80]) for i in range(0, len(data), 80)]
+ if not self.cipher_name:
+ raise errors.AnsibleError("the cipher must be set before adding a header")
+
+ dirty_data = to_bytes(HEADER + ";" + self.version + ";" + self.cipher_name + "\n")
+ for l in tmpdata:
+ dirty_data += l + b'\n'
+
+ return dirty_data
+
+
+ def _split_header(self, data):
+ # used by decrypt
+
+ tmpdata = data.split(b'\n')
+ tmpheader = tmpdata[0].strip().split(b';')
+
+ self.version = to_unicode(tmpheader[1].strip())
+ self.cipher_name = to_unicode(tmpheader[2].strip())
+ clean_data = b'\n'.join(tmpdata[1:])
+
+ """
+ # strip out newline, join, unhex
+ clean_data = [ x.strip() for x in clean_data ]
+ clean_data = unhexlify(''.join(clean_data))
+ """
+
+ return clean_data
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *err):
+ pass
+
+class VaultEditor(object):
+ # uses helper methods for write_file(self, filename, data)
+ # to write a file so that code isn't duplicated for simple
+ # file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
+ # ... "Don't Repeat Yourself", etc.
+
+ def __init__(self, cipher_name, password, filename):
+ # instantiates a member variable for VaultLib
+ self.cipher_name = cipher_name
+ self.password = password
+ self.filename = filename
+
+ def _edit_file_helper(self, existing_data=None, cipher=None):
+ # make sure the umask is set to a sane value
+ old_umask = os.umask(0o077)
+
+ # Create a tempfile
+ _, tmp_path = tempfile.mkstemp()
+
+ if existing_data:
+ self.write_data(existing_data, tmp_path)
+
+ # drop the user into an editor on the tmp file
+ call(self._editor_shell_command(tmp_path))
+ tmpdata = self.read_data(tmp_path)
+
+ # create new vault
+ this_vault = VaultLib(self.password)
+ if cipher:
+ this_vault.cipher_name = cipher
+
+ # encrypt new data and write out to tmp
+ enc_data = this_vault.encrypt(tmpdata)
+ self.write_data(enc_data, tmp_path)
+
+ # shuffle tmp file into place
+ self.shuffle_files(tmp_path, self.filename)
+
+ # and restore umask
+ os.umask(old_umask)
+
+ def create_file(self):
+ """ create a new encrypted file """
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
+
+ # Let the user specify contents and save file
+ self._edit_file_helper(cipher=self.cipher_name)
+
+ def decrypt_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if not os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s does not exist" % self.filename)
+
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ if this_vault.is_encrypted(tmpdata):
+ dec_data = this_vault.decrypt(tmpdata)
+ if dec_data is None:
+ raise errors.AnsibleError("Decryption failed")
+ else:
+ self.write_data(dec_data, self.filename)
+ else:
+ raise errors.AnsibleError("%s is not encrypted" % self.filename)
+
+ def edit_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt to tmpfile
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+
+ # let the user edit the data and save
+ self._edit_file_helper(existing_data=dec_data)
+ ###we want the cipher to default to AES256 (get rid of files
+ # encrypted with the AES cipher)
+ #self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
+
+
+ def view_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt to tmpfile
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+ _, tmp_path = tempfile.mkstemp()
+ self.write_data(dec_data, tmp_path)
+
+ # drop the user into pager on the tmp file
+ call(self._pager_shell_command(tmp_path))
+ os.remove(tmp_path)
+
+ def encrypt_file(self):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ if not os.path.isfile(self.filename):
+ raise errors.AnsibleError("%s does not exist" % self.filename)
+
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ this_vault.cipher_name = self.cipher_name
+ if not this_vault.is_encrypted(tmpdata):
+ enc_data = this_vault.encrypt(tmpdata)
+ self.write_data(enc_data, self.filename)
+ else:
+ raise errors.AnsibleError("%s is already encrypted" % self.filename)
+
+ def rekey_file(self, new_password):
+
+ if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ # decrypt
+ tmpdata = self.read_data(self.filename)
+ this_vault = VaultLib(self.password)
+ dec_data = this_vault.decrypt(tmpdata)
+
+ # create new vault
+ new_vault = VaultLib(new_password)
+
+ # we want to force cipher to the default
+ #new_vault.cipher_name = this_vault.cipher_name
+
+ # re-encrypt data and re-write file
+ enc_data = new_vault.encrypt(dec_data)
+ self.write_data(enc_data, self.filename)
+
+ def read_data(self, filename):
+ f = open(filename, "rb")
+ tmpdata = f.read()
+ f.close()
+ return tmpdata
+
+ def write_data(self, data, filename):
+ if os.path.isfile(filename):
+ os.remove(filename)
+ f = open(filename, "wb")
+ f.write(to_bytes(data))
+ f.close()
+
+ def shuffle_files(self, src, dest):
+ # overwrite dest with src
+ if os.path.isfile(dest):
+ os.remove(dest)
+ shutil.move(src, dest)
+
+ def _editor_shell_command(self, filename):
+ EDITOR = os.environ.get('EDITOR','vim')
+ editor = shlex.split(EDITOR)
+ editor.append(filename)
+
+ return editor
+
+ def _pager_shell_command(self, filename):
+ PAGER = os.environ.get('PAGER','less')
+ pager = shlex.split(PAGER)
+ pager.append(filename)
+
+ return pager
+
+########################################
+# CIPHERS #
+########################################
+
+class VaultAES(object):
+
+ # this version has been obsoleted by the VaultAES256 class
+ # which uses encrypt-then-mac (fixing order) and also improving the KDF used
+ # code remains for upgrade purposes only
+ # http://stackoverflow.com/a/16761459
+
+ def __init__(self):
+ if not HAS_AES:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
+
+ """ Create a key and an initialization vector """
+
+ d = d_i = b''
+ while len(d) < key_length + iv_length:
+ text = "{}{}{}".format(d_i, password, salt)
+ d_i = md5(to_bytes(text)).digest()
+ d += d_i
+
+ key = d[:key_length]
+ iv = d[key_length:key_length+iv_length]
+
+ return key, iv
+
+ def encrypt(self, data, password, key_length=32):
+
+ """ Read plaintext data from in_file and write encrypted to out_file """
+
+
+ # combine sha + data
+ this_sha = sha256(to_bytes(data)).hexdigest()
+ tmp_data = this_sha + "\n" + data
+
+ in_file = BytesIO(to_bytes(tmp_data))
+ in_file.seek(0)
+ out_file = BytesIO()
+
+ bs = AES.block_size
+
+ # Get a block of random data. EL does not have Crypto.Random.new()
+ # so os.urandom is used for cross platform purposes
+ salt = os.urandom(bs - len('Salted__'))
+
+ key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ full = to_bytes(b'Salted__' + salt)
+ out_file.write(full)
+ finished = False
+ while not finished:
+ chunk = in_file.read(1024 * bs)
+ if len(chunk) == 0 or len(chunk) % bs != 0:
+ padding_length = (bs - len(chunk) % bs) or bs
+ chunk += to_bytes(padding_length * chr(padding_length))
+ finished = True
+ out_file.write(cipher.encrypt(chunk))
+
+ out_file.seek(0)
+ enc_data = out_file.read()
+ tmp_data = hexlify(enc_data)
+
+ return tmp_data
+
+
+ def decrypt(self, data, password, key_length=32):
+
+ """ Read encrypted data from in_file and write decrypted to out_file """
+
+ # http://stackoverflow.com/a/14989032
+
+ data = b''.join(data.split(b'\n'))
+ data = unhexlify(data)
+
+ in_file = BytesIO(data)
+ in_file.seek(0)
+ out_file = BytesIO()
+
+ bs = AES.block_size
+ tmpsalt = in_file.read(bs)
+ salt = tmpsalt[len('Salted__'):]
+ key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
+ cipher = AES.new(key, AES.MODE_CBC, iv)
+ next_chunk = b''
+ finished = False
+
+ while not finished:
+ chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
+ if len(next_chunk) == 0:
+ if PY2:
+ padding_length = ord(chunk[-1])
+ else:
+ padding_length = chunk[-1]
+
+ chunk = chunk[:-padding_length]
+ finished = True
+
+ out_file.write(chunk)
+ out_file.flush()
+
+ # reset the stream pointer to the beginning
+ out_file.seek(0)
+ out_data = out_file.read()
+ out_file.close()
+ new_data = to_unicode(out_data)
+
+ # split out sha and verify decryption
+ split_data = new_data.split("\n")
+ this_sha = split_data[0]
+ this_data = '\n'.join(split_data[1:])
+ test_sha = sha256(to_bytes(this_data)).hexdigest()
+
+ if this_sha != test_sha:
+ raise errors.AnsibleError("Decryption failed")
+
+ return this_data
+
+
+class VaultAES256(object):
+
+ """
+ Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
+ Keys are derived using PBKDF2
+ """
+
+ # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
+
+ def __init__(self):
+
+ if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
+ raise errors.AnsibleError(CRYPTO_UPGRADE)
+
+ def gen_key_initctr(self, password, salt):
+ # 16 for AES 128, 32 for AES256
+ keylength = 32
+
+ # match the size used for counter.new to avoid extra work
+ ivlength = 16
+
+ hash_function = SHA256
+
+ # make two keys and one iv
+ pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
+
+
+ derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
+ count=10000, prf=pbkdf2_prf)
+
+ key1 = derivedkey[:keylength]
+ key2 = derivedkey[keylength:(keylength * 2)]
+ iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
+
+ return key1, key2, hexlify(iv)
+
+
+ def encrypt(self, data, password):
+
+ salt = os.urandom(32)
+ key1, key2, iv = self.gen_key_initctr(password, salt)
+
+ # PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
+ bs = AES.block_size
+ padding_length = (bs - len(data) % bs) or bs
+ data += padding_length * chr(padding_length)
+
+ # COUNTER.new PARAMETERS
+ # 1) nbits (integer) - Length of the counter, in bits.
+ # 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
+
+ ctr = Counter.new(128, initial_value=int(iv, 16))
+
+ # AES.new PARAMETERS
+ # 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
+ # 2) MODE_CTR, is the recommended mode
+ # 3) counter=<CounterObject>
+
+ cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
+
+ # ENCRYPT PADDED DATA
+ cryptedData = cipher.encrypt(data)
+
+ # COMBINE SALT, DIGEST AND DATA
+ hmac = HMAC.new(key2, cryptedData, SHA256)
+ message = b''.join([hexlify(salt), b"\n", to_bytes(hmac.hexdigest()), b"\n", hexlify(cryptedData)])
+ message = hexlify(message)
+ return message
+
+ def decrypt(self, data, password):
+
+ # SPLIT SALT, DIGEST, AND DATA
+ data = b''.join(data.split(b"\n"))
+ data = unhexlify(data)
+ salt, cryptedHmac, cryptedData = data.split(b"\n", 2)
+ salt = unhexlify(salt)
+ cryptedData = unhexlify(cryptedData)
+
+ key1, key2, iv = self.gen_key_initctr(password, salt)
+
+ # EXIT EARLY IF DIGEST DOESN'T MATCH
+ hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
+ if not self.is_equal(cryptedHmac, to_bytes(hmacDecrypt.hexdigest())):
+ return None
+
+ # SET THE COUNTER AND THE CIPHER
+ ctr = Counter.new(128, initial_value=int(iv, 16))
+ cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
+
+ # DECRYPT PADDED DATA
+ decryptedData = cipher.decrypt(cryptedData)
+
+ # UNPAD DATA
+ try:
+ padding_length = ord(decryptedData[-1])
+ except TypeError:
+ padding_length = decryptedData[-1]
+
+ decryptedData = decryptedData[:-padding_length]
+
+ return to_unicode(decryptedData)
+
+ def is_equal(self, a, b):
+ """
+ Comparing 2 byte arrrays in constant time
+ to avoid timing attacks.
+
+ It would be nice if there was a library for this but
+ hey.
+ """
+ # http://codahale.com/a-lesson-in-timing-attacks/
+ if len(a) != len(b):
+ return False
+
+ result = 0
+ for x, y in zip(a, b):
+ if PY2:
+ result |= ord(x) ^ ord(y)
+ else:
+ result |= x ^ y
+ return result == 0
diff --git a/lib/ansible/parsing/yaml/__init__.py b/lib/ansible/parsing/yaml/__init__.py
new file mode 100644
index 0000000000..785fc45992
--- /dev/null
+++ b/lib/ansible/parsing/yaml/__init__.py
@@ -0,0 +1,21 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
diff --git a/lib/ansible/parsing/yaml/constructor.py b/lib/ansible/parsing/yaml/constructor.py
new file mode 100644
index 0000000000..d1a2a01bc2
--- /dev/null
+++ b/lib/ansible/parsing/yaml/constructor.py
@@ -0,0 +1,91 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from yaml.constructor import Constructor
+from ansible.parsing.yaml.objects import AnsibleMapping, AnsibleSequence, AnsibleUnicode
+
+class AnsibleConstructor(Constructor):
+ def __init__(self, file_name=None):
+ self._ansible_file_name = file_name
+ super(AnsibleConstructor, self).__init__()
+
+ def construct_yaml_map(self, node):
+ data = AnsibleMapping()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+ data.ansible_pos = self._node_position_info(node)
+
+ def construct_mapping(self, node, deep=False):
+ ret = AnsibleMapping(super(Constructor, self).construct_mapping(node, deep))
+ ret.ansible_pos = self._node_position_info(node)
+
+ return ret
+
+ def construct_yaml_str(self, node):
+ # Override the default string handling function
+ # to always return unicode objects
+ value = self.construct_scalar(node)
+ ret = AnsibleUnicode(value)
+
+ ret.ansible_pos = self._node_position_info(node)
+
+ return ret
+
+ def construct_yaml_seq(self, node):
+ data = AnsibleSequence()
+ yield data
+ data.extend(self.construct_sequence(node))
+ data.ansible_pos = self._node_position_info(node)
+
+ def _node_position_info(self, node):
+ # the line number where the previous token has ended (plus empty lines)
+ # Add one so that the first line is line 1 rather than line 0
+ column = node.start_mark.column + 1
+ line = node.start_mark.line + 1
+
+ # in some cases, we may have pre-read the data and then
+ # passed it to the load() call for YAML, in which case we
+ # want to override the default datasource (which would be
+ # '<string>') to the actual filename we read in
+ datasource = self._ansible_file_name or node.start_mark.name
+
+ return (datasource, line, column)
+
+AnsibleConstructor.add_constructor(
+ u'tag:yaml.org,2002:map',
+ AnsibleConstructor.construct_yaml_map)
+
+AnsibleConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/dict',
+ AnsibleConstructor.construct_yaml_map)
+
+AnsibleConstructor.add_constructor(
+ u'tag:yaml.org,2002:str',
+ AnsibleConstructor.construct_yaml_str)
+
+AnsibleConstructor.add_constructor(
+ u'tag:yaml.org,2002:python/unicode',
+ AnsibleConstructor.construct_yaml_str)
+
+AnsibleConstructor.add_constructor(
+ u'tag:yaml.org,2002:seq',
+ AnsibleConstructor.construct_yaml_seq)
diff --git a/lib/ansible/parsing/yaml/loader.py b/lib/ansible/parsing/yaml/loader.py
new file mode 100644
index 0000000000..e8547ff0d1
--- /dev/null
+++ b/lib/ansible/parsing/yaml/loader.py
@@ -0,0 +1,51 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+try:
+ from _yaml import CParser, CEmitter
+ HAVE_PYYAML_C = True
+except ImportError:
+ HAVE_PYYAML_C = False
+
+from yaml.resolver import Resolver
+
+from ansible.parsing.yaml.constructor import AnsibleConstructor
+
+if HAVE_PYYAML_C:
+ class AnsibleLoader(CParser, AnsibleConstructor, Resolver):
+ def __init__(self, stream, file_name=None):
+ CParser.__init__(self, stream)
+ AnsibleConstructor.__init__(self, file_name=file_name)
+ Resolver.__init__(self)
+else:
+ from yaml.composer import Composer
+ from yaml.reader import Reader
+ from yaml.scanner import Scanner
+ from yaml.parser import Parser
+
+ class AnsibleLoader(Reader, Scanner, Parser, Composer, AnsibleConstructor, Resolver):
+ def __init__(self, stream, file_name=None):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ AnsibleConstructor.__init__(self, file_name=file_name)
+ Resolver.__init__(self)
diff --git a/lib/ansible/parsing/yaml/objects.py b/lib/ansible/parsing/yaml/objects.py
new file mode 100644
index 0000000000..33ea1ad37e
--- /dev/null
+++ b/lib/ansible/parsing/yaml/objects.py
@@ -0,0 +1,65 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from six import text_type
+
+
+class AnsibleBaseYAMLObject(object):
+ '''
+ the base class used to sub-class python built-in objects
+ so that we can add attributes to them during yaml parsing
+
+ '''
+ _data_source = None
+ _line_number = 0
+ _column_number = 0
+
+ def _get_ansible_position(self):
+ return (self._data_source, self._line_number, self._column_number)
+
+ def _set_ansible_position(self, obj):
+ try:
+ (src, line, col) = obj
+ except (TypeError, ValueError):
+ raise AssertionError(
+ 'ansible_pos can only be set with a tuple/list '
+ 'of three values: source, line number, column number'
+ )
+ self._data_source = src
+ self._line_number = line
+ self._column_number = col
+
+ ansible_pos = property(_get_ansible_position, _set_ansible_position)
+
+
+class AnsibleMapping(AnsibleBaseYAMLObject, dict):
+ ''' sub class for dictionaries '''
+ pass
+
+
+class AnsibleUnicode(AnsibleBaseYAMLObject, text_type):
+ ''' sub class for unicode objects '''
+ pass
+
+
+class AnsibleSequence(AnsibleBaseYAMLObject, list):
+ ''' sub class for lists '''
+ pass