summaryrefslogtreecommitdiff
path: root/lib/ansible
diff options
context:
space:
mode:
Diffstat (limited to 'lib/ansible')
-rw-r--r--lib/ansible/constants.py9
-rw-r--r--lib/ansible/executor/module_common.py84
-rw-r--r--lib/ansible/plugins/strategy/__init__.py23
3 files changed, 86 insertions, 30 deletions
diff --git a/lib/ansible/constants.py b/lib/ansible/constants.py
index cd7659a0c6..514fda8160 100644
--- a/lib/ansible/constants.py
+++ b/lib/ansible/constants.py
@@ -20,6 +20,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
+import tempfile
from string import ascii_letters, digits
from ansible.compat.six import string_types
@@ -47,7 +48,7 @@ def shell_expand(path):
path = os.path.expanduser(os.path.expandvars(path))
return path
-def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False, isnone=False, ispath=False, ispathlist=False):
+def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False, islist=False, isnone=False, ispath=False, ispathlist=False, istmppath=False):
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
@@ -65,6 +66,11 @@ def get_config(p, section, key, env_var, default, boolean=False, integer=False,
value = None
elif ispath:
value = shell_expand(value)
+ elif istmppath:
+ value = shell_expand(value)
+ if not os.path.exists(value):
+ os.makedirs(value, 0o700)
+ value = tempfile.mkdtemp(prefix='ansible-local-tmp', dir=value)
elif ispathlist:
if isinstance(value, string_types):
value = [shell_expand(x) for x in value.split(os.pathsep)]
@@ -136,6 +142,7 @@ DEFAULT_HOST_LIST = get_config(p, DEFAULTS,'inventory', 'ANSIBLE_INVENTO
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', None, ispathlist=True)
DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles', ispathlist=True)
DEFAULT_REMOTE_TMP = get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp')
+DEFAULT_LOCAL_TMP = get_config(p, DEFAULTS, 'local_tmp', 'ANSIBLE_LOCAL_TEMP', '$HOME/.ansible/tmp', istmppath=True)
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
diff --git a/lib/ansible/executor/module_common.py b/lib/ansible/executor/module_common.py
index 9685411dc7..144af8c100 100644
--- a/lib/ansible/executor/module_common.py
+++ b/lib/ansible/executor/module_common.py
@@ -33,6 +33,7 @@ from ansible import __version__
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.utils.unicode import to_bytes, to_unicode
+from ansible.plugins.strategy import action_write_locks
try:
from __main__ import display
@@ -275,7 +276,11 @@ def _get_facility(task_vars):
facility = task_vars['ansible_syslog_facility']
return facility
-def meta_finder(data, snippet_names, snippet_data, zf):
+def recursive_finder(data, snippet_names, snippet_data, zf):
+ """
+ Using ModuleDepFinder, make sure we have all of the module_utils files that
+ the module its module_utils files needs.
+ """
tree = ast.parse(data)
finder = ModuleDepFinder()
finder.visit(tree)
@@ -290,7 +295,7 @@ def meta_finder(data, snippet_names, snippet_data, zf):
snippet_names.update(new_snippets)
for snippet_name in tuple(new_snippets):
- meta_finder(snippet_data[snippet_name], snippet_names, snippet_data, zf)
+ recursive_finder(snippet_data[snippet_name], snippet_names, snippet_data, zf)
del snippet_data[snippet_name]
def _find_snippet_imports(module_name, module_data, module_path, module_args, task_vars, module_compression):
@@ -350,23 +355,61 @@ def _find_snippet_imports(module_name, module_data, module_path, module_args, ta
except AttributeError:
display.warning(u'Bad module compression string specified: %s. Using ZIP_STORED (no compression)' % module_compression)
compression_method = zipfile.ZIP_STORED
- zipoutput = BytesIO()
- zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
- zf.writestr('ansible/__init__.py', b''.join((b"__version__ = '", to_bytes(__version__), b"'\n")))
- zf.writestr('ansible/module_utils/__init__.py', b'')
- zf.writestr('ansible/module_exec/__init__.py', b'')
-
- zf.writestr('ansible/module_exec/%s/__init__.py' % module_name, b"")
- zf.writestr('ansible/module_exec/%s/__main__.py' % module_name, module_data)
-
- snippet_data = dict()
- meta_finder(module_data, snippet_names, snippet_data, zf)
- zf.close()
+
+ lookup_path = os.path.join(C.DEFAULT_LOCAL_TMP, 'ziploader_cache')
+ if not os.path.exists(lookup_path):
+ os.mkdir(lookup_path)
+ cached_module_filename = os.path.join(lookup_path, "%s-%s" % (module_name, module_compression))
+
+ zipdata = None
+ # Optimization -- don't lock if the module has already been cached
+ if os.path.exists(cached_module_filename):
+ zipdata = open(cached_module_filename, 'rb').read()
+ # Fool the check later... I think we should just remove the check
+ snippet_names.add('basic')
+ else:
+ with action_write_locks[module_name]:
+ # Check that no other process has created this while we were
+ # waiting for the lock
+ if not os.path.exists(cached_module_filename):
+ # Create the module zip data
+ zipoutput = BytesIO()
+ zf = zipfile.ZipFile(zipoutput, mode='w', compression=compression_method)
+ zf.writestr('ansible/__init__.py', b''.join((b"__version__ = '", to_bytes(__version__), b"'\n")))
+ zf.writestr('ansible/module_utils/__init__.py', b'')
+ zf.writestr('ansible/module_exec/__init__.py', b'')
+
+ zf.writestr('ansible/module_exec/%s/__init__.py' % module_name, b"")
+ zf.writestr('ansible/module_exec/%s/__main__.py' % module_name, module_data)
+
+ snippet_data = dict()
+ recursive_finder(module_data, snippet_names, snippet_data, zf)
+ zf.close()
+ zipdata = base64.b64encode(zipoutput.getvalue())
+
+ # Write the assembled module to a temp file (write to temp
+ # so that no one looking for the file reads a partially
+ # written file)
+ with open(cached_module_filename + '-part', 'w') as f:
+ f.write(zipdata)
+
+ # Rename the file into its final position in the cache so
+ # future users of this module can read it off the
+ # filesystem instead of constructing from scratch.
+ os.rename(cached_module_filename + '-part', cached_module_filename)
+
+ if zipdata is None:
+ # Another process wrote the file while we were waiting for
+ # the write lock. Go ahead and read the data from disk
+ # instead of re-creating it.
+ zipdata = open(cached_module_filename, 'rb').read()
+ # Fool the check later... I think we should just remove the check
+ snippet_names.add('basic')
shebang, interpreter = _get_shebang(u'/usr/bin/python', task_vars)
if shebang is None:
shebang = u'#!/usr/bin/python'
output.write(to_bytes(STRIPPED_ZIPLOADER_TEMPLATE % dict(
- zipdata=base64.b64encode(zipoutput.getvalue()),
+ zipdata=zipdata,
ansible_module=module_name,
args=python_repred_args,
constants=python_repred_constants,
@@ -450,17 +493,6 @@ def modify_module(module_name, module_path, module_args, task_vars=dict(), modul
which results in the inclusion of the common code from powershell.ps1
"""
- ### TODO: Optimization ideas if this code is actually a source of slowness:
- # * Fix comment stripping: Currently doesn't preserve shebangs and encoding info (but we unconditionally add encoding info)
- # * Use pyminifier if installed
- # * comment stripping/pyminifier needs to have config setting to turn it
- # off for debugging purposes (goes along with keep remote but should be
- # separate otherwise users wouldn't be able to get info on what the
- # minifier output)
- # * Only split into lines and recombine into strings once
- # * Cache the modified module? If only the args are different and we do
- # that as the last step we could cache all the work up to that point.
-
with open(module_path, 'rb') as f:
# read in the module source
diff --git a/lib/ansible/plugins/strategy/__init__.py b/lib/ansible/plugins/strategy/__init__.py
index 7b2a40a71a..f06d4f6f75 100644
--- a/lib/ansible/plugins/strategy/__init__.py
+++ b/lib/ansible/plugins/strategy/__init__.py
@@ -19,15 +19,16 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-from ansible.compat.six.moves import queue as Queue
-from ansible.compat.six import iteritems, text_type, string_types
-
import json
import time
import zlib
+from collections import defaultdict
+from multiprocessing import Lock
from jinja2.exceptions import UndefinedError
+from ansible.compat.six.moves import queue as Queue
+from ansible.compat.six import iteritems, text_type, string_types
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
from ansible.executor.play_iterator import PlayIterator
@@ -51,6 +52,8 @@ except ImportError:
__all__ = ['StrategyBase']
+action_write_locks = defaultdict(Lock)
+
# TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on
@@ -141,6 +144,20 @@ class StrategyBase:
display.debug("entering _queue_task() for %s/%s" % (host, task))
+ # Add a write lock for tasks.
+ # Maybe this should be added somewhere further up the call stack but
+ # this is the earliest in the code where we have task (1) extracted
+ # into its own variable and (2) there's only a single code path
+ # leading to the module being run. This is called by three
+ # functions: __init__.py::_do_handler_run(), linear.py::run(), and
+ # free.py::run() so we'd have to add to all three to do it there.
+ # The next common higher level is __init__.py::run() and that has
+ # tasks inside of play_iterator so we'd have to extract them to do it
+ # there.
+ if not action_write_locks[task.action]:
+ display.warning('Python defaultdict did not create the Lock for us. Creating manually')
+ action_write_locks[task.action] = Lock()
+
# and then queue the new task
display.debug("%s - putting task (%s) in queue" % (host, task))
try: