summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2014-05-23 13:35:12 +0000
committerGerrit Code Review <review@openstack.org>2014-05-23 13:35:12 +0000
commitd68002f9fb714e1897662fc4e89f261a9b8aacaa (patch)
tree614d4b054bdb510ab4c70d0d265cbccd1fd616c4
parent966d4410a1a69e0a3af678442a1a965dae80d720 (diff)
parenta34945fd020088148b91b06bf694c4c05130a786 (diff)
downloadpycadf-d68002f9fb714e1897662fc4e89f261a9b8aacaa.tar.gz
Merge "sync oslo"0.5.1
-rw-r--r--pycadf/openstack/common/__init__.py15
-rw-r--r--pycadf/openstack/common/context.py13
-rw-r--r--pycadf/openstack/common/excutils.py32
-rw-r--r--pycadf/openstack/common/fileutils.py3
-rw-r--r--pycadf/openstack/common/fixture/lockutils.py2
-rw-r--r--pycadf/openstack/common/fixture/mockpatch.py11
-rw-r--r--pycadf/openstack/common/fixture/moxstubout.py13
-rw-r--r--pycadf/openstack/common/gettextutils.py232
-rw-r--r--pycadf/openstack/common/jsonutils.py26
-rw-r--r--pycadf/openstack/common/lockutils.py156
-rw-r--r--pycadf/openstack/common/log.py76
-rw-r--r--requirements.txt1
12 files changed, 377 insertions, 203 deletions
diff --git a/pycadf/openstack/common/__init__.py b/pycadf/openstack/common/__init__.py
index 2a00f3b..d1223ea 100644
--- a/pycadf/openstack/common/__init__.py
+++ b/pycadf/openstack/common/__init__.py
@@ -1,2 +1,17 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
import six
+
+
six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff --git a/pycadf/openstack/common/context.py b/pycadf/openstack/common/context.py
index 182b044..3eeb445 100644
--- a/pycadf/openstack/common/context.py
+++ b/pycadf/openstack/common/context.py
@@ -25,7 +25,7 @@ import uuid
def generate_request_id():
- return 'req-%s' % str(uuid.uuid4())
+ return b'req-' + str(uuid.uuid4()).encode('ascii')
class RequestContext(object):
@@ -98,3 +98,14 @@ def get_context_from_function_and_args(function, args, kwargs):
return arg
return None
+
+
+def is_user_context(context):
+ """Indicates if the request context is a normal user."""
+ if not context:
+ return False
+ if context.is_admin:
+ return False
+ if not context.user_id or not context.project_id:
+ return False
+ return True
diff --git a/pycadf/openstack/common/excutils.py b/pycadf/openstack/common/excutils.py
index e17a138..eb8e160 100644
--- a/pycadf/openstack/common/excutils.py
+++ b/pycadf/openstack/common/excutils.py
@@ -24,7 +24,7 @@ import traceback
import six
-from pycadf.openstack.common.gettextutils import _
+from pycadf.openstack.common.gettextutils import _LE
class save_and_reraise_exception(object):
@@ -49,9 +49,22 @@ class save_and_reraise_exception(object):
decide_if_need_reraise()
if not should_be_reraised:
ctxt.reraise = False
+
+ If another exception occurs and reraise flag is False,
+ the saved exception will not be logged.
+
+ If the caller wants to raise new exception during exception handling
+ he/she sets reraise to False initially with an ability to set it back to
+ True if needed::
+
+ except Exception:
+ with save_and_reraise_exception(reraise=False) as ctxt:
+ [if statements to determine whether to raise a new exception]
+ # Not raising a new exception, so reraise
+ ctxt.reraise = True
"""
- def __init__(self):
- self.reraise = True
+ def __init__(self, reraise=True):
+ self.reraise = reraise
def __enter__(self):
self.type_, self.value, self.tb, = sys.exc_info()
@@ -59,10 +72,11 @@ class save_and_reraise_exception(object):
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
- logging.error(_('Original exception being dropped: %s'),
- traceback.format_exception(self.type_,
- self.value,
- self.tb))
+ if self.reraise:
+ logging.error(_LE('Original exception being dropped: %s'),
+ traceback.format_exception(self.type_,
+ self.value,
+ self.tb))
return False
if self.reraise:
six.reraise(self.type_, self.value, self.tb)
@@ -88,8 +102,8 @@ def forever_retry_uncaught_exceptions(infunc):
if (cur_time - last_log_time > 60 or
this_exc_message != last_exc_message):
logging.exception(
- _('Unexpected exception occurred %d time(s)... '
- 'retrying.') % exc_count)
+ _LE('Unexpected exception occurred %d time(s)... '
+ 'retrying.') % exc_count)
last_log_time = cur_time
last_exc_message = this_exc_message
exc_count = 0
diff --git a/pycadf/openstack/common/fileutils.py b/pycadf/openstack/common/fileutils.py
index a15b163..e2506bd 100644
--- a/pycadf/openstack/common/fileutils.py
+++ b/pycadf/openstack/common/fileutils.py
@@ -19,7 +19,6 @@ import os
import tempfile
from pycadf.openstack.common import excutils
-from pycadf.openstack.common.gettextutils import _
from pycadf.openstack.common import log as logging
LOG = logging.getLogger(__name__)
@@ -59,7 +58,7 @@ def read_cached_file(filename, force_reload=False):
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
- LOG.debug(_("Reloading cached file %s") % filename)
+ LOG.debug("Reloading cached file %s" % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
diff --git a/pycadf/openstack/common/fixture/lockutils.py b/pycadf/openstack/common/fixture/lockutils.py
index e229c68..647305f 100644
--- a/pycadf/openstack/common/fixture/lockutils.py
+++ b/pycadf/openstack/common/fixture/lockutils.py
@@ -48,4 +48,4 @@ class LockFixture(fixtures.Fixture):
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
- self.mgr.__enter__()
+ self.lock = self.mgr.__enter__()
diff --git a/pycadf/openstack/common/fixture/mockpatch.py b/pycadf/openstack/common/fixture/mockpatch.py
index a8ffeb3..4ca6bea 100644
--- a/pycadf/openstack/common/fixture/mockpatch.py
+++ b/pycadf/openstack/common/fixture/mockpatch.py
@@ -15,6 +15,17 @@
# License for the specific language governing permissions and limitations
# under the License.
+##############################################################################
+##############################################################################
+##
+## DO NOT MODIFY THIS FILE
+##
+## This file is being graduated to the oslotest library. Please make all
+## changes there, and only backport critical fixes here. - dhellmann
+##
+##############################################################################
+##############################################################################
+
import fixtures
import mock
diff --git a/pycadf/openstack/common/fixture/moxstubout.py b/pycadf/openstack/common/fixture/moxstubout.py
index d7e118e..c76c4bd 100644
--- a/pycadf/openstack/common/fixture/moxstubout.py
+++ b/pycadf/openstack/common/fixture/moxstubout.py
@@ -15,8 +15,19 @@
# License for the specific language governing permissions and limitations
# under the License.
+##############################################################################
+##############################################################################
+##
+## DO NOT MODIFY THIS FILE
+##
+## This file is being graduated to the oslotest library. Please make all
+## changes there, and only backport critical fixes here. - dhellmann
+##
+##############################################################################
+##############################################################################
+
import fixtures
-from six.moves import mox # noqa
+from six.moves import mox
class MoxStubout(fixtures.Fixture):
diff --git a/pycadf/openstack/common/gettextutils.py b/pycadf/openstack/common/gettextutils.py
index 1012c14..d137ec5 100644
--- a/pycadf/openstack/common/gettextutils.py
+++ b/pycadf/openstack/common/gettextutils.py
@@ -28,70 +28,135 @@ import gettext
import locale
from logging import handlers
import os
-import re
from babel import localedata
import six
-_localedir = os.environ.get('pycadf'.upper() + '_LOCALEDIR')
-_t = gettext.translation('pycadf', localedir=_localedir, fallback=True)
-
-# We use separate translation catalogs for each log level, so set up a
-# mapping between the log level name and the translator. The domain
-# for the log level is project_name + "-log-" + log_level so messages
-# for each level end up in their own catalog.
-_t_log_levels = dict(
- (level, gettext.translation('pycadf' + '-log-' + level,
- localedir=_localedir,
- fallback=True))
- for level in ['info', 'warning', 'error', 'critical']
-)
-
_AVAILABLE_LANGUAGES = {}
+
+# FIXME(dhellmann): Remove this when moving to oslo.i18n.
USE_LAZY = False
-def enable_lazy():
- """Convenience function for configuring _() to use lazy gettext
-
- Call this at the start of execution to enable the gettextutils._
- function to use lazy gettext functionality. This is useful if
- your project is importing _ directly instead of using the
- gettextutils.install() way of importing the _ function.
+class TranslatorFactory(object):
+ """Create translator functions
"""
- global USE_LAZY
- USE_LAZY = True
+ def __init__(self, domain, lazy=False, localedir=None):
+ """Establish a set of translation functions for the domain.
+
+ :param domain: Name of translation domain,
+ specifying a message catalog.
+ :type domain: str
+ :param lazy: Delays translation until a message is emitted.
+ Defaults to False.
+ :type lazy: Boolean
+ :param localedir: Directory with translation catalogs.
+ :type localedir: str
+ """
+ self.domain = domain
+ self.lazy = lazy
+ if localedir is None:
+ localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
+ self.localedir = localedir
-def _(msg):
- if USE_LAZY:
- return Message(msg, domain='pycadf')
- else:
- if six.PY3:
- return _t.gettext(msg)
- return _t.ugettext(msg)
+ def _make_translation_func(self, domain=None):
+ """Return a new translation function ready for use.
+ Takes into account whether or not lazy translation is being
+ done.
-def _log_translation(msg, level):
- """Build a single translation of a log message
- """
- if USE_LAZY:
- return Message(msg, domain='pycadf' + '-log-' + level)
- else:
- translator = _t_log_levels[level]
+ The domain can be specified to override the default from the
+ factory, but the localedir from the factory is always used
+ because we assume the log-level translation catalogs are
+ installed in the same directory as the main application
+ catalog.
+
+ """
+ if domain is None:
+ domain = self.domain
+ if self.lazy:
+ return functools.partial(Message, domain=domain)
+ t = gettext.translation(
+ domain,
+ localedir=self.localedir,
+ fallback=True,
+ )
if six.PY3:
- return translator.gettext(msg)
- return translator.ugettext(msg)
+ return t.gettext
+ return t.ugettext
+
+ @property
+ def primary(self):
+ "The default translation function."
+ return self._make_translation_func()
+
+ def _make_log_translation_func(self, level):
+ return self._make_translation_func(self.domain + '-log-' + level)
+
+ @property
+ def log_info(self):
+ "Translate info-level log messages."
+ return self._make_log_translation_func('info')
+
+ @property
+ def log_warning(self):
+ "Translate warning-level log messages."
+ return self._make_log_translation_func('warning')
+
+ @property
+ def log_error(self):
+ "Translate error-level log messages."
+ return self._make_log_translation_func('error')
+
+ @property
+ def log_critical(self):
+ "Translate critical-level log messages."
+ return self._make_log_translation_func('critical')
+
+
+# NOTE(dhellmann): When this module moves out of the incubator into
+# oslo.i18n, these global variables can be moved to an integration
+# module within each application.
+
+# Create the global translation functions.
+_translators = TranslatorFactory('pycadf')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
-_LI = functools.partial(_log_translation, level='info')
-_LW = functools.partial(_log_translation, level='warning')
-_LE = functools.partial(_log_translation, level='error')
-_LC = functools.partial(_log_translation, level='critical')
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
+
+# NOTE(dhellmann): End of globals that will move to the application's
+# integration module.
+
+
+def enable_lazy():
+ """Convenience function for configuring _() to use lazy gettext
+
+ Call this at the start of execution to enable the gettextutils._
+ function to use lazy gettext functionality. This is useful if
+ your project is importing _ directly instead of using the
+ gettextutils.install() way of importing the _ function.
+ """
+ # FIXME(dhellmann): This function will be removed in oslo.i18n,
+ # because the TranslatorFactory makes it superfluous.
+ global _, _LI, _LW, _LE, _LC, USE_LAZY
+ tf = TranslatorFactory('pycadf', lazy=True)
+ _ = tf.primary
+ _LI = tf.log_info
+ _LW = tf.log_warning
+ _LE = tf.log_error
+ _LC = tf.log_critical
+ USE_LAZY = True
def install(domain, lazy=False):
@@ -113,26 +178,9 @@ def install(domain, lazy=False):
any available locale.
"""
if lazy:
- # NOTE(mrodden): Lazy gettext functionality.
- #
- # The following introduces a deferred way to do translations on
- # messages in OpenStack. We override the standard _() function
- # and % (format string) operation to build Message objects that can
- # later be translated when we have more information.
- def _lazy_gettext(msg):
- """Create and return a Message object.
-
- Lazy gettext function for a given domain, it is a factory method
- for a project/module to get a lazy gettext function for its own
- translation domain (i.e. nova, glance, cinder, etc.)
-
- Message encapsulates a string so that we can translate
- it later when needed.
- """
- return Message(msg, domain=domain)
-
from six import moves
- moves.builtins.__dict__['_'] = _lazy_gettext
+ tf = TranslatorFactory(domain, lazy=True)
+ moves.builtins.__dict__['_'] = tf.primary
else:
localedir = '%s_LOCALEDIR' % domain.upper()
if six.PY3:
@@ -248,47 +296,22 @@ class Message(six.text_type):
if other is None:
params = (other,)
elif isinstance(other, dict):
- params = self._trim_dictionary_parameters(other)
- else:
- params = self._copy_param(other)
- return params
-
- def _trim_dictionary_parameters(self, dict_param):
- """Return a dict that only has matching entries in the msgid."""
- # NOTE(luisg): Here we trim down the dictionary passed as parameters
- # to avoid carrying a lot of unnecessary weight around in the message
- # object, for example if someone passes in Message() % locals() but
- # only some params are used, and additionally we prevent errors for
- # non-deepcopyable objects by unicoding() them.
-
- # Look for %(param) keys in msgid;
- # Skip %% and deal with the case where % is first character on the line
- keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', self.msgid)
-
- # If we don't find any %(param) keys but have a %s
- if not keys and re.findall('(?:[^%]|^)%[a-z]', self.msgid):
- # Apparently the full dictionary is the parameter
- params = self._copy_param(dict_param)
- else:
+ # Merge the dictionaries
+ # Copy each item in case one does not support deep copy.
params = {}
- # Save our existing parameters as defaults to protect
- # ourselves from losing values if we are called through an
- # (erroneous) chain that builds a valid Message with
- # arguments, and then does something like "msg % kwds"
- # where kwds is an empty dictionary.
- src = {}
if isinstance(self.params, dict):
- src.update(self.params)
- src.update(dict_param)
- for key in keys:
- params[key] = self._copy_param(src[key])
-
+ for key, val in self.params.items():
+ params[key] = self._copy_param(val)
+ for key, val in other.items():
+ params[key] = self._copy_param(val)
+ else:
+ params = self._copy_param(other)
return params
def _copy_param(self, param):
try:
return copy.deepcopy(param)
- except TypeError:
+ except Exception:
# Fallback to casting to unicode this will handle the
# python code-like objects that can't be deep-copied
return six.text_type(param)
@@ -300,13 +323,14 @@ class Message(six.text_type):
def __radd__(self, other):
return self.__add__(other)
- def __str__(self):
- # NOTE(luisg): Logging in python 2.6 tries to str() log records,
- # and it expects specifically a UnicodeError in order to proceed.
- msg = _('Message objects do not support str() because they may '
- 'contain non-ascii characters. '
- 'Please use unicode() or translate() instead.')
- raise UnicodeError(msg)
+ if six.PY2:
+ def __str__(self):
+ # NOTE(luisg): Logging in python 2.6 tries to str() log records,
+ # and it expects specifically a UnicodeError in order to proceed.
+ msg = _('Message objects do not support str() because they may '
+ 'contain non-ascii characters. '
+ 'Please use unicode() or translate() instead.')
+ raise UnicodeError(msg)
def get_available_languages(domain):
diff --git a/pycadf/openstack/common/jsonutils.py b/pycadf/openstack/common/jsonutils.py
index 72b928e..623dfc3 100644
--- a/pycadf/openstack/common/jsonutils.py
+++ b/pycadf/openstack/common/jsonutils.py
@@ -35,18 +35,20 @@ import datetime
import functools
import inspect
import itertools
-import json
-try:
- import xmlrpclib
-except ImportError:
- # NOTE(jaypipes): xmlrpclib was renamed to xmlrpc.client in Python3
- # however the function and object call signatures
- # remained the same. This whole try/except block should
- # be removed and replaced with a call to six.moves once
- # six 1.4.2 is released. See http://bit.ly/1bqrVzu
- import xmlrpc.client as xmlrpclib
+import sys
+
+if sys.version_info < (2, 7):
+ # On Python <= 2.6, json module is not C boosted, so try to use
+ # simplejson module if available
+ try:
+ import simplejson as json
+ except ImportError:
+ import json
+else:
+ import json
import six
+import six.moves.xmlrpc_client as xmlrpclib
from pycadf.openstack.common import gettextutils
from pycadf.openstack.common import importutils
@@ -168,8 +170,8 @@ def loads(s):
return json.loads(s)
-def load(s):
- return json.load(s)
+def load(fp):
+ return json.load(fp)
try:
diff --git a/pycadf/openstack/common/lockutils.py b/pycadf/openstack/common/lockutils.py
index 3a7a4ec..7bbb1d2 100644
--- a/pycadf/openstack/common/lockutils.py
+++ b/pycadf/openstack/common/lockutils.py
@@ -15,6 +15,7 @@
import contextlib
import errno
+import fcntl
import functools
import os
import shutil
@@ -28,7 +29,7 @@ import weakref
from oslo.config import cfg
from pycadf.openstack.common import fileutils
-from pycadf.openstack.common.gettextutils import _
+from pycadf.openstack.common.gettextutils import _, _LE, _LI
from pycadf.openstack.common import log as logging
@@ -37,10 +38,10 @@ LOG = logging.getLogger(__name__)
util_opts = [
cfg.BoolOpt('disable_process_locking', default=False,
- help='Whether to disable inter-process locks'),
+ help='Enables or disables inter-process locks.'),
cfg.StrOpt('lock_path',
default=os.environ.get("PYCADF_LOCK_PATH"),
- help=('Directory to use for lock files.'))
+ help='Directory to use for lock files.')
]
@@ -52,7 +53,7 @@ def set_defaults(lock_path):
cfg.set_defaults(util_opts, lock_path=lock_path)
-class _InterProcessLock(object):
+class _FileLock(object):
"""Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
@@ -79,7 +80,7 @@ class _InterProcessLock(object):
if not os.path.exists(basedir):
fileutils.ensure_tree(basedir)
- LOG.info(_('Created lock path: %s'), basedir)
+ LOG.info(_LI('Created lock path: %s'), basedir)
self.lockfile = open(self.fname, 'w')
@@ -90,7 +91,7 @@ class _InterProcessLock(object):
# Also upon reading the MSDN docs for locking(), it seems
# to have a laughable 10 attempts "blocking" mechanism.
self.trylock()
- LOG.debug(_('Got file lock "%s"'), self.fname)
+ LOG.debug('Got file lock "%s"', self.fname)
return True
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
@@ -114,14 +115,17 @@ class _InterProcessLock(object):
try:
self.unlock()
self.lockfile.close()
- LOG.debug(_('Released file lock "%s"'), self.fname)
+ LOG.debug('Released file lock "%s"', self.fname)
except IOError:
- LOG.exception(_("Could not release the acquired lock `%s`"),
+ LOG.exception(_LE("Could not release the acquired lock `%s`"),
self.fname)
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
+ def exists(self):
+ return os.path.exists(self.fname)
+
def trylock(self):
raise NotImplementedError()
@@ -129,7 +133,7 @@ class _InterProcessLock(object):
raise NotImplementedError()
-class _WindowsLock(_InterProcessLock):
+class _WindowsLock(_FileLock):
def trylock(self):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
@@ -137,7 +141,7 @@ class _WindowsLock(_InterProcessLock):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
-class _PosixLock(_InterProcessLock):
+class _FcntlLock(_FileLock):
def trylock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
@@ -145,35 +149,106 @@ class _PosixLock(_InterProcessLock):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
+class _PosixLock(object):
+ def __init__(self, name):
+ # Hash the name because it's not valid to have POSIX semaphore
+ # names with things like / in them. Then use base64 to encode
+ # the digest() instead taking the hexdigest() because the
+ # result is shorter and most systems can't have shm sempahore
+ # names longer than 31 characters.
+ h = hashlib.sha1()
+ h.update(name.encode('ascii'))
+ self.name = str((b'/' + base64.urlsafe_b64encode(
+ h.digest())).decode('ascii'))
+
+ def acquire(self, timeout=None):
+ self.semaphore = posix_ipc.Semaphore(self.name,
+ flags=posix_ipc.O_CREAT,
+ initial_value=1)
+ self.semaphore.acquire(timeout)
+ return self
+
+ def __enter__(self):
+ self.acquire()
+ return self
+
+ def release(self):
+ self.semaphore.release()
+ self.semaphore.close()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.release()
+
+ def exists(self):
+ try:
+ semaphore = posix_ipc.Semaphore(self.name)
+ except posix_ipc.ExistentialError:
+ return False
+ else:
+ semaphore.close()
+ return True
+
+
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
+ FileLock = _WindowsLock
else:
- import fcntl
+ import base64
+ import hashlib
+ import posix_ipc
InterProcessLock = _PosixLock
+ FileLock = _FcntlLock
_semaphores = weakref.WeakValueDictionary()
_semaphores_lock = threading.Lock()
-def external_lock(name, lock_file_prefix=None):
- with internal_lock(name):
- LOG.debug(_('Attempting to grab external lock "%(lock)s"'),
- {'lock': name})
+def _get_lock_path(name, lock_file_prefix, lock_path=None):
+ # NOTE(mikal): the lock name cannot contain directory
+ # separators
+ name = name.replace(os.sep, '_')
+ if lock_file_prefix:
+ sep = '' if lock_file_prefix.endswith('-') else '-'
+ name = '%s%s%s' % (lock_file_prefix, sep, name)
- # NOTE(mikal): the lock name cannot contain directory
- # separators
- name = name.replace(os.sep, '_')
- if lock_file_prefix:
- sep = '' if lock_file_prefix.endswith('-') else '-'
- name = '%s%s%s' % (lock_file_prefix, sep, name)
+ local_lock_path = lock_path or CONF.lock_path
- if not CONF.lock_path:
+ if not local_lock_path:
+ # NOTE(bnemec): Create a fake lock path for posix locks so we don't
+ # unnecessarily raise the RequiredOptError below.
+ if InterProcessLock is not _PosixLock:
raise cfg.RequiredOptError('lock_path')
+ local_lock_path = 'posixlock:/'
+
+ return os.path.join(local_lock_path, name)
- lock_file_path = os.path.join(CONF.lock_path, name)
- return InterProcessLock(lock_file_path)
+def external_lock(name, lock_file_prefix=None, lock_path=None):
+ LOG.debug('Attempting to grab external lock "%(lock)s"',
+ {'lock': name})
+
+ lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
+
+ # NOTE(bnemec): If an explicit lock_path was passed to us then it
+ # means the caller is relying on file-based locking behavior, so
+ # we can't use posix locks for those calls.
+ if lock_path:
+ return FileLock(lock_file_path)
+ return InterProcessLock(lock_file_path)
+
+
+def remove_external_lock_file(name, lock_file_prefix=None):
+ """Remove a external lock file when it's not used anymore
+ This will be helpful when we have a lot of lock files
+ """
+ with internal_lock(name):
+ lock_file_path = _get_lock_path(name, lock_file_prefix)
+ try:
+ os.remove(lock_file_path)
+ except OSError:
+ LOG.info(_LI('Failed to remove file %(file)s'),
+ {'file': lock_file_path})
def internal_lock(name):
@@ -184,12 +259,12 @@ def internal_lock(name):
sem = threading.Semaphore()
_semaphores[name] = sem
- LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name})
+ LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
return sem
@contextlib.contextmanager
-def lock(name, lock_file_prefix=None, external=False):
+def lock(name, lock_file_prefix=None, external=False, lock_path=None):
"""Context based lock
This function yields a `threading.Semaphore` instance (if we don't use
@@ -201,18 +276,21 @@ def lock(name, lock_file_prefix=None, external=False):
:param external: The external keyword argument denotes whether this lock
should work across multiple processes. This means that if two different
- workers both run a a method decorated with @synchronized('mylock',
+ workers both run a method decorated with @synchronized('mylock',
external=True), only one of them will execute at a time.
"""
- if external and not CONF.disable_process_locking:
- lock = external_lock(name, lock_file_prefix)
- else:
- lock = internal_lock(name)
- with lock:
- yield lock
-
-
-def synchronized(name, lock_file_prefix=None, external=False):
+ int_lock = internal_lock(name)
+ with int_lock:
+ if external and not CONF.disable_process_locking:
+ ext_lock = external_lock(name, lock_file_prefix, lock_path)
+ with ext_lock:
+ yield ext_lock
+ else:
+ yield int_lock
+ LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
+
+
+def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
"""Synchronization decorator.
Decorating a method like so::
@@ -240,12 +318,12 @@ def synchronized(name, lock_file_prefix=None, external=False):
@functools.wraps(f)
def inner(*args, **kwargs):
try:
- with lock(name, lock_file_prefix, external):
- LOG.debug(_('Got semaphore / lock "%(function)s"'),
+ with lock(name, lock_file_prefix, external, lock_path):
+ LOG.debug('Got semaphore / lock "%(function)s"',
{'function': f.__name__})
return f(*args, **kwargs)
finally:
- LOG.debug(_('Semaphore / lock released "%(function)s"'),
+ LOG.debug('Semaphore / lock released "%(function)s"',
{'function': f.__name__})
return inner
return wrap
diff --git a/pycadf/openstack/common/log.py b/pycadf/openstack/common/log.py
index 9fcbce5..90f9c03 100644
--- a/pycadf/openstack/common/log.py
+++ b/pycadf/openstack/common/log.py
@@ -15,7 +15,7 @@
# License for the specific language governing permissions and limitations
# under the License.
-"""Openstack logging handler.
+"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
@@ -59,7 +59,10 @@ _SANITIZE_PATTERNS = []
_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
r'(<%(key)s>).*?(</%(key)s>)',
r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
- r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])']
+ r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
+ r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
+ '.*?([\'"])',
+ r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
for key in _SANITIZE_KEYS:
for pattern in _FORMAT_PATTERNS:
@@ -84,12 +87,10 @@ logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
- help='The name of logging configuration file. It does not '
- 'disable existing loggers, but just appends specified '
- 'logging configuration to any other existing logging '
- 'options. Please see the Python logging module '
- 'documentation for details on logging configuration '
- 'files.'),
+ help='The name of a logging configuration file. This file '
+ 'is appended to any existing logging configuration '
+ 'files. For details about logging configuration files, '
+ 'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
default=None,
metavar='FORMAT',
@@ -103,7 +104,7 @@ logging_cli_opts = [
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
- 'Default: %(default)s'),
+ 'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
@@ -112,30 +113,30 @@ logging_cli_opts = [
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
- '--log-file paths'),
+ '--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
- 'and then will be changed in J to honor RFC5424'),
+ 'and will chang in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
- help='(Optional) Use syslog rfc5424 format for logging. '
- 'If enabled, will add APP-NAME (RFC5424) before the '
- 'MSG part of the syslog message. The old format '
- 'without APP-NAME is deprecated in I, '
+ help='(Optional) Enables or disables syslog rfc5424 format '
+ 'for logging. If enabled, prefixes the MSG part of the '
+ 'syslog message with APP-NAME (RFC5424). The '
+ 'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
- help='Syslog facility to receive log lines')
+ help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
- help='Log output to standard error')
+ help='Log output to standard error.')
]
log_opts = [
@@ -143,18 +144,18 @@ log_opts = [
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
- help='Format string to use for log messages with context'),
+ help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
- help='Format string to use for log messages without context'),
+ help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
- help='Data to append to log format when level is DEBUG'),
+ help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
- help='Prefix each line of exception output with this format'),
+ help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=[
'amqp=WARN',
@@ -163,28 +164,29 @@ log_opts = [
'qpid=WARN',
'sqlalchemy=WARN',
'suds=INFO',
+ 'oslo.messaging=INFO',
'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN'
],
- help='List of logger=LEVEL pairs'),
+ help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
- help='Publish error events'),
+ help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
- help='Make deprecations fatal'),
+ help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
- help='If an instance is passed with the log message, format '
- 'it like this'),
+ help='The format for an instance that is passed with the log '
+ 'message. '),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
- help='If an instance UUID is passed with the log message, '
- 'format it like this'),
+ help='The format for an instance UUID that is passed with the '
+ 'log message. '),
]
CONF = cfg.CONF
@@ -357,7 +359,7 @@ class ContextAdapter(BaseLoggerAdapter):
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
- instance_uuid = (extra.get('instance_uuid', None) or
+ instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
@@ -450,7 +452,7 @@ def _load_log_config(log_config_append):
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except moves.configparser.Error as exc:
- raise LogConfigError(log_config_append, str(exc))
+ raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
@@ -495,10 +497,16 @@ def _find_facility_from_conf():
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
- super(RFCSysLogHandler, self).__init__(*args, **kwargs)
+ # Do not use super() unless type(logging.handlers.SysLogHandler)
+ # is 'type' (Python 2.7).
+ # Use old style calls, if the type is 'classobj' (Python 2.6)
+ logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
- msg = super(RFCSysLogHandler, self).format(record)
+ # Do not use super() unless type(logging.handlers.SysLogHandler)
+ # is 'type' (Python 2.7).
+ # Use old style calls, if the type is 'classobj' (Python 2.6)
+ msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
@@ -650,11 +658,11 @@ class ContextFormatter(logging.Formatter):
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
- for key in ('instance', 'color'):
+ for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
- if record.__dict__.get('request_id', None):
+ if record.__dict__.get('request_id'):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
diff --git a/requirements.txt b/requirements.txt
index 7afb091..61188b9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,6 +3,7 @@ iso8601>=0.1.9
netaddr>=0.7.6
oslo.config>=1.2.0
oslo.messaging>=1.3.0
+posix_ipc
pytz>=2010h
six>=1.6.0
WebOb>=1.2.3