diff options
author | Brian Coca <bcoca@users.noreply.github.com> | 2017-09-07 12:17:16 -0400 |
---|---|---|
committer | GitHub <noreply@github.com> | 2017-09-07 12:17:16 -0400 |
commit | 13d1520f3dde7553cb0f6d4a48a712a44054cecb (patch) | |
tree | 18cd422b4fefd6c1d8d582b417cf00233c1cb598 /lib/ansible | |
parent | f9f0472ba506869a1fbc0b5c97064fe8be85a2c2 (diff) | |
download | ansible-13d1520f3dde7553cb0f6d4a48a712a44054cecb.tar.gz |
Db cache fix (#29048)
* cleaner get for file based caches
* now db based facts behave like file ones
we now keep local in mem cache to avoid race conditions on expiration during ansible runs
Diffstat (limited to 'lib/ansible')
-rw-r--r-- | lib/ansible/plugins/cache/__init__.py | 40 | ||||
-rw-r--r-- | lib/ansible/plugins/cache/memcached.py | 30 | ||||
-rw-r--r-- | lib/ansible/plugins/cache/redis.py | 43 |
3 files changed, 63 insertions, 50 deletions
diff --git a/lib/ansible/plugins/cache/__init__.py b/lib/ansible/plugins/cache/__init__.py index 16e3107e58..6537ffe223 100644 --- a/lib/ansible/plugins/cache/__init__.py +++ b/lib/ansible/plugins/cache/__init__.py @@ -105,29 +105,29 @@ class BaseFileCacheModule(BaseCacheModule): and it would be problematic if the key did expire after some long running tasks and user gets 'undefined' error in the same play """ - if key in self._cache: - return self._cache.get(key) + if key not in self._cache: - if self.has_expired(key) or key == "": - raise KeyError + if self.has_expired(key) or key == "": + raise KeyError - cachefile = "%s/%s" % (self._cache_dir, key) - try: + cachefile = "%s/%s" % (self._cache_dir, key) try: - value = self._load(cachefile) - self._cache[key] = value - return value - except ValueError as e: - display.warning("error in '%s' cache plugin while trying to read %s : %s. " - "Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e))) - self.delete(key) - raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. " - "It has been removed, so you can re-run your command now." % cachefile) - except (OSError, IOError) as e: - display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e))) - raise KeyError - except Exception as e: - raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e))) + try: + value = self._load(cachefile) + self._cache[key] = value + except ValueError as e: + display.warning("error in '%s' cache plugin while trying to read %s : %s. " + "Most likely a corrupt file, so erasing and failing." % (self.plugin_name, cachefile, to_bytes(e))) + self.delete(key) + raise AnsibleError("The cache file %s was corrupt, or did not otherwise contain valid data. " + "It has been removed, so you can re-run your command now." % cachefile) + except (OSError, IOError) as e: + display.warning("error in '%s' cache plugin while trying to read %s : %s" % (self.plugin_name, cachefile, to_bytes(e))) + raise KeyError + except Exception as e: + raise AnsibleError("Error while decoding the cache file %s: %s" % (cachefile, to_bytes(e))) + + return self._cache.get(key) def set(self, key, value): diff --git a/lib/ansible/plugins/cache/memcached.py b/lib/ansible/plugins/cache/memcached.py index 9d2bb0a86d..0b74895376 100644 --- a/lib/ansible/plugins/cache/memcached.py +++ b/lib/ansible/plugins/cache/memcached.py @@ -147,8 +147,9 @@ class CacheModule(BaseCacheModule): self._timeout = C.CACHE_PLUGIN_TIMEOUT self._prefix = C.CACHE_PLUGIN_PREFIX - self._cache = ProxyClientPool(connection, debug=0) - self._keys = CacheModuleKeys(self._cache, self._cache.get(CacheModuleKeys.PREFIX) or []) + self._cache = {} + self._db = ProxyClientPool(connection, debug=0) + self._keys = CacheModuleKeys(self._db, self._db.get(CacheModuleKeys.PREFIX) or []) def _make_key(self, key): return "{0}{1}".format(self._prefix, key) @@ -159,17 +160,21 @@ class CacheModule(BaseCacheModule): self._keys.remove_by_timerange(0, expiry_age) def get(self, key): - value = self._cache.get(self._make_key(key)) - # guard against the key not being removed from the keyset; - # this could happen in cases where the timeout value is changed - # between invocations - if value is None: - self.delete(key) - raise KeyError - return value + if key not in self._cache: + value = self._db.get(self._make_key(key)) + # guard against the key not being removed from the keyset; + # this could happen in cases where the timeout value is changed + # between invocations + if value is None: + self.delete(key) + raise KeyError + self._cache[key] = value + + return self._cache.get(key) def set(self, key, value): - self._cache.set(self._make_key(key), value, time=self._timeout, min_compress_len=1) + self._db.set(self._make_key(key), value, time=self._timeout, min_compress_len=1) + self._cache[key] = value self._keys.add(key) def keys(self): @@ -181,7 +186,8 @@ class CacheModule(BaseCacheModule): return key in self._keys def delete(self, key): - self._cache.delete(self._make_key(key)) + del self._cache[key] + self._db.delete(self._make_key(key)) self._keys.discard(key) def flush(self): diff --git a/lib/ansible/plugins/cache/redis.py b/lib/ansible/plugins/cache/redis.py index 8e51b6b5bf..7eea4d440a 100644 --- a/lib/ansible/plugins/cache/redis.py +++ b/lib/ansible/plugins/cache/redis.py @@ -17,7 +17,6 @@ from __future__ import (absolute_import, division, print_function) __metaclass__ = type -import sys import time import json @@ -48,47 +47,55 @@ class CacheModule(BaseCacheModule): self._timeout = float(C.CACHE_PLUGIN_TIMEOUT) self._prefix = C.CACHE_PLUGIN_PREFIX - self._cache = StrictRedis(*connection) + self._cache = {} + self._db = StrictRedis(*connection) self._keys_set = 'ansible_cache_keys' def _make_key(self, key): return self._prefix + key def get(self, key): - value = self._cache.get(self._make_key(key)) - # guard against the key not being removed from the zset; - # this could happen in cases where the timeout value is changed - # between invocations - if value is None: - self.delete(key) - raise KeyError - return json.loads(value) + + if key not in self._cache: + value = self._db.get(self._make_key(key)) + # guard against the key not being removed from the zset; + # this could happen in cases where the timeout value is changed + # between invocations + if value is None: + self.delete(key) + raise KeyError + self._cache[key] = json.loads(value) + + return self._cache.get(key) def set(self, key, value): + value2 = json.dumps(value) if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire' - self._cache.setex(self._make_key(key), int(self._timeout), value2) + self._db.setex(self._make_key(key), int(self._timeout), value2) else: - self._cache.set(self._make_key(key), value2) + self._db.set(self._make_key(key), value2) - self._cache.zadd(self._keys_set, time.time(), key) + self._db.zadd(self._keys_set, time.time(), key) + self._cache[key] = value def _expire_keys(self): if self._timeout > 0: expiry_age = time.time() - self._timeout - self._cache.zremrangebyscore(self._keys_set, 0, expiry_age) + self._db.zremrangebyscore(self._keys_set, 0, expiry_age) def keys(self): self._expire_keys() - return self._cache.zrange(self._keys_set, 0, -1) + return self._db.zrange(self._keys_set, 0, -1) def contains(self, key): self._expire_keys() - return (self._cache.zrank(self._keys_set, key) is not None) + return (self._db.zrank(self._keys_set, key) is not None) def delete(self, key): - self._cache.delete(self._make_key(key)) - self._cache.zrem(self._keys_set, key) + del self.cache[key] + self._db.delete(self._make_key(key)) + self._db.zrem(self._keys_set, key) def flush(self): for key in self.keys(): |