summaryrefslogtreecommitdiff
path: root/pycadf
diff options
context:
space:
mode:
authorSteve Martinelli <stevemar@ca.ibm.com>2014-10-20 10:54:17 -0400
committerSteve Martinelli <stevemar@ca.ibm.com>2014-10-20 13:34:01 -0400
commit8819865131de03218fe29cb77f8da824f83010f1 (patch)
treefb9a7ff19b89ed6860a54b434dcc801b83cafb7f /pycadf
parentfd916997e36c1f9a6bc773a9a6df79fce22a60b2 (diff)
downloadpycadf-8819865131de03218fe29cb77f8da824f83010f1.tar.gz
Sync oslo libraries
performed update.sh ../pycadf from oslo-incubator sync'ed to level: 368daacaa3f4a65352f3e0c66a939a496c14ecca Change-Id: I1956702bea76eab3fecbf3ba04682451e5c4786a
Diffstat (limited to 'pycadf')
-rw-r--r--pycadf/openstack/common/__init__.py17
-rw-r--r--pycadf/openstack/common/_i18n.py40
-rw-r--r--pycadf/openstack/common/context.py15
-rw-r--r--pycadf/openstack/common/fileutils.py22
-rw-r--r--pycadf/openstack/common/fixture/__init__.py17
-rw-r--r--pycadf/openstack/common/lockutils.py92
-rw-r--r--pycadf/openstack/common/log.py202
7 files changed, 205 insertions, 200 deletions
diff --git a/pycadf/openstack/common/__init__.py b/pycadf/openstack/common/__init__.py
index d1223ea..e69de29 100644
--- a/pycadf/openstack/common/__init__.py
+++ b/pycadf/openstack/common/__init__.py
@@ -1,17 +0,0 @@
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import six
-
-
-six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff --git a/pycadf/openstack/common/_i18n.py b/pycadf/openstack/common/_i18n.py
new file mode 100644
index 0000000..e9993ad
--- /dev/null
+++ b/pycadf/openstack/common/_i18n.py
@@ -0,0 +1,40 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""oslo.i18n integration module.
+
+See http://docs.openstack.org/developer/oslo.i18n/usage.html
+
+"""
+
+import oslo.i18n
+
+
+# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
+# application name when this module is synced into the separate
+# repository. It is OK to have more than one translation function
+# using the same domain, since there will still only be one message
+# catalog.
+_translators = oslo.i18n.TranslatorFactory(domain='pycadf')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
diff --git a/pycadf/openstack/common/context.py b/pycadf/openstack/common/context.py
index 3eeb445..b612db7 100644
--- a/pycadf/openstack/common/context.py
+++ b/pycadf/openstack/common/context.py
@@ -77,6 +77,21 @@ class RequestContext(object):
'instance_uuid': self.instance_uuid,
'user_identity': user_idt}
+ @classmethod
+ def from_dict(cls, ctx):
+ return cls(
+ auth_token=ctx.get("auth_token"),
+ user=ctx.get("user"),
+ tenant=ctx.get("tenant"),
+ domain=ctx.get("domain"),
+ user_domain=ctx.get("user_domain"),
+ project_domain=ctx.get("project_domain"),
+ is_admin=ctx.get("is_admin", False),
+ read_only=ctx.get("read_only", False),
+ show_deleted=ctx.get("show_deleted", False),
+ request_id=ctx.get("request_id"),
+ instance_uuid=ctx.get("instance_uuid"))
+
def get_admin_context(show_deleted=False):
context = RequestContext(None,
diff --git a/pycadf/openstack/common/fileutils.py b/pycadf/openstack/common/fileutils.py
index e2506bd..426c570 100644
--- a/pycadf/openstack/common/fileutils.py
+++ b/pycadf/openstack/common/fileutils.py
@@ -18,7 +18,8 @@ import errno
import os
import tempfile
-from pycadf.openstack.common import excutils
+from oslo.utils import excutils
+
from pycadf.openstack.common import log as logging
LOG = logging.getLogger(__name__)
@@ -50,8 +51,8 @@ def read_cached_file(filename, force_reload=False):
"""
global _FILE_CACHE
- if force_reload and filename in _FILE_CACHE:
- del _FILE_CACHE[filename]
+ if force_reload:
+ delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
@@ -66,6 +67,17 @@ def read_cached_file(filename, force_reload=False):
return (reloaded, cache_info['data'])
+def delete_cached_file(filename):
+ """Delete cached file if present.
+
+ :param filename: filename to delete
+ """
+ global _FILE_CACHE
+
+ if filename in _FILE_CACHE:
+ del _FILE_CACHE[filename]
+
+
def delete_if_exists(path, remove=os.unlink):
"""Delete a file, but ignore file not found error.
@@ -99,13 +111,13 @@ def remove_path_on_error(path, remove=delete_if_exists):
def file_open(*args, **kwargs):
"""Open file
- see built-in file() documentation for more details
+ see built-in open() documentation for more details
Note: The reason this is kept in a separate module is to easily
be able to provide a stub module that doesn't alter system
state at all (for unit tests)
"""
- return file(*args, **kwargs)
+ return open(*args, **kwargs)
def write_to_tempfile(content, path=None, suffix='', prefix='tmp'):
diff --git a/pycadf/openstack/common/fixture/__init__.py b/pycadf/openstack/common/fixture/__init__.py
index e69de29..d1223ea 100644
--- a/pycadf/openstack/common/fixture/__init__.py
+++ b/pycadf/openstack/common/fixture/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+
+six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff --git a/pycadf/openstack/common/lockutils.py b/pycadf/openstack/common/lockutils.py
index 7bbb1d2..9814891 100644
--- a/pycadf/openstack/common/lockutils.py
+++ b/pycadf/openstack/common/lockutils.py
@@ -15,8 +15,8 @@
import contextlib
import errno
-import fcntl
import functools
+import logging
import os
import shutil
import subprocess
@@ -29,8 +29,7 @@ import weakref
from oslo.config import cfg
from pycadf.openstack.common import fileutils
-from pycadf.openstack.common.gettextutils import _, _LE, _LI
-from pycadf.openstack.common import log as logging
+from pycadf.openstack.common._i18n import _, _LE, _LI
LOG = logging.getLogger(__name__)
@@ -102,10 +101,8 @@ class _FileLock(object):
raise threading.ThreadError(_("Unable to acquire lock on"
" `%(filename)s` due to"
" %(exception)s") %
- {
- 'filename': self.fname,
- 'exception': e,
- })
+ {'filename': self.fname,
+ 'exception': e})
def __enter__(self):
self.acquire()
@@ -149,56 +146,12 @@ class _FcntlLock(_FileLock):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
-class _PosixLock(object):
- def __init__(self, name):
- # Hash the name because it's not valid to have POSIX semaphore
- # names with things like / in them. Then use base64 to encode
- # the digest() instead taking the hexdigest() because the
- # result is shorter and most systems can't have shm sempahore
- # names longer than 31 characters.
- h = hashlib.sha1()
- h.update(name.encode('ascii'))
- self.name = str((b'/' + base64.urlsafe_b64encode(
- h.digest())).decode('ascii'))
-
- def acquire(self, timeout=None):
- self.semaphore = posix_ipc.Semaphore(self.name,
- flags=posix_ipc.O_CREAT,
- initial_value=1)
- self.semaphore.acquire(timeout)
- return self
-
- def __enter__(self):
- self.acquire()
- return self
-
- def release(self):
- self.semaphore.release()
- self.semaphore.close()
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.release()
-
- def exists(self):
- try:
- semaphore = posix_ipc.Semaphore(self.name)
- except posix_ipc.ExistentialError:
- return False
- else:
- semaphore.close()
- return True
-
-
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
- FileLock = _WindowsLock
else:
- import base64
- import hashlib
- import posix_ipc
- InterProcessLock = _PosixLock
- FileLock = _FcntlLock
+ import fcntl
+ InterProcessLock = _FcntlLock
_semaphores = weakref.WeakValueDictionary()
_semaphores_lock = threading.Lock()
@@ -215,11 +168,7 @@ def _get_lock_path(name, lock_file_prefix, lock_path=None):
local_lock_path = lock_path or CONF.lock_path
if not local_lock_path:
- # NOTE(bnemec): Create a fake lock path for posix locks so we don't
- # unnecessarily raise the RequiredOptError below.
- if InterProcessLock is not _PosixLock:
- raise cfg.RequiredOptError('lock_path')
- local_lock_path = 'posixlock:/'
+ raise cfg.RequiredOptError('lock_path')
return os.path.join(local_lock_path, name)
@@ -230,16 +179,11 @@ def external_lock(name, lock_file_prefix=None, lock_path=None):
lock_file_path = _get_lock_path(name, lock_file_prefix, lock_path)
- # NOTE(bnemec): If an explicit lock_path was passed to us then it
- # means the caller is relying on file-based locking behavior, so
- # we can't use posix locks for those calls.
- if lock_path:
- return FileLock(lock_file_path)
return InterProcessLock(lock_file_path)
def remove_external_lock_file(name, lock_file_prefix=None):
- """Remove a external lock file when it's not used anymore
+ """Remove an external lock file when it's not used anymore
This will be helpful when we have a lot of lock files
"""
with internal_lock(name):
@@ -255,11 +199,12 @@ def internal_lock(name):
with _semaphores_lock:
try:
sem = _semaphores[name]
+ LOG.debug('Using existing semaphore "%s"', name)
except KeyError:
sem = threading.Semaphore()
_semaphores[name] = sem
+ LOG.debug('Created new semaphore "%s"', name)
- LOG.debug('Got semaphore "%(lock)s"', {'lock': name})
return sem
@@ -281,13 +226,16 @@ def lock(name, lock_file_prefix=None, external=False, lock_path=None):
"""
int_lock = internal_lock(name)
with int_lock:
- if external and not CONF.disable_process_locking:
- ext_lock = external_lock(name, lock_file_prefix, lock_path)
- with ext_lock:
- yield ext_lock
- else:
- yield int_lock
- LOG.debug('Released semaphore "%(lock)s"', {'lock': name})
+ LOG.debug('Acquired semaphore "%(lock)s"', {'lock': name})
+ try:
+ if external and not CONF.disable_process_locking:
+ ext_lock = external_lock(name, lock_file_prefix, lock_path)
+ with ext_lock:
+ yield ext_lock
+ else:
+ yield int_lock
+ finally:
+ LOG.debug('Releasing semaphore "%(lock)s"', {'lock': name})
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
diff --git a/pycadf/openstack/common/log.py b/pycadf/openstack/common/log.py
index 90f9c03..2197609 100644
--- a/pycadf/openstack/common/log.py
+++ b/pycadf/openstack/common/log.py
@@ -33,42 +33,24 @@ import logging
import logging.config
import logging.handlers
import os
-import re
+import socket
import sys
import traceback
from oslo.config import cfg
+from oslo.serialization import jsonutils
+from oslo.utils import importutils
import six
from six import moves
-from pycadf.openstack.common.gettextutils import _
-from pycadf.openstack.common import importutils
-from pycadf.openstack.common import jsonutils
+_PY26 = sys.version_info[0:2] == (2, 6)
+
+from pycadf.openstack.common._i18n import _
from pycadf.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
-_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
-
-# NOTE(ldbragst): Let's build a list of regex objects using the list of
-# _SANITIZE_KEYS we already have. This way, we only have to add the new key
-# to the list of _SANITIZE_KEYS and we can generate regular expressions
-# for XML and JSON automatically.
-_SANITIZE_PATTERNS = []
-_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
- r'(<%(key)s>).*?(</%(key)s>)',
- r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
- r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
- r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
- '.*?([\'"])',
- r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
-
-for key in _SANITIZE_KEYS:
- for pattern in _FORMAT_PATTERNS:
- reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
- _SANITIZE_PATTERNS.append(reg_ex)
-
common_cli_opts = [
cfg.BoolOpt('debug',
@@ -92,7 +74,6 @@ logging_cli_opts = [
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
- default=None,
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
@@ -118,7 +99,7 @@ logging_cli_opts = [
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
- 'and will chang in J to honor RFC5424.'),
+ 'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
@@ -139,6 +120,14 @@ generic_log_opts = [
help='Log output to standard error.')
]
+DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
+ 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
+ 'oslo.messaging=INFO', 'iso8601=WARN',
+ 'requests.packages.urllib3.connectionpool=WARN',
+ 'urllib3.connectionpool=WARN', 'websocket=WARN',
+ "keystonemiddleware=WARN", "routes.middleware=WARN",
+ "stevedore=WARN"]
+
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
@@ -157,17 +146,7 @@ log_opts = [
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
- default=[
- 'amqp=WARN',
- 'amqplib=WARN',
- 'boto=WARN',
- 'qpid=WARN',
- 'sqlalchemy=WARN',
- 'suds=INFO',
- 'oslo.messaging=INFO',
- 'iso8601=WARN',
- 'requests.packages.urllib3.connectionpool=WARN'
- ],
+ default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
@@ -182,11 +161,11 @@ log_opts = [
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
- 'message. '),
+ 'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
- 'log message. '),
+ 'log message.'),
]
CONF = cfg.CONF
@@ -245,45 +224,20 @@ def _get_log_file_path(binary=None):
return None
-def mask_password(message, secret="***"):
- """Replace password with 'secret' in message.
-
- :param message: The string which includes security information.
- :param secret: value with which to replace passwords.
- :returns: The unicode value of message with the password fields masked.
-
- For example:
-
- >>> mask_password("'adminPass' : 'aaaaa'")
- "'adminPass' : '***'"
- >>> mask_password("'admin_pass' : 'aaaaa'")
- "'admin_pass' : '***'"
- >>> mask_password('"password" : "aaaaa"')
- '"password" : "***"'
- >>> mask_password("'original_password' : 'aaaaa'")
- "'original_password' : '***'"
- >>> mask_password("u'original_password' : u'aaaaa'")
- "u'original_password' : u'***'"
- """
- message = six.text_type(message)
-
- # NOTE(ldbragst): Check to see if anything in message contains any key
- # specified in _SANITIZE_KEYS, if not then just return the message since
- # we don't have to mask any passwords.
- if not any(key in message for key in _SANITIZE_KEYS):
- return message
-
- secret = r'\g<1>' + secret + r'\g<2>'
- for pattern in _SANITIZE_PATTERNS:
- message = re.sub(pattern, secret, message)
- return message
-
-
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
+ def isEnabledFor(self, level):
+ if _PY26:
+ # This method was added in python 2.7 (and it does the exact
+ # same logic, so we need to do the exact same logic so that
+ # python 2.6 has this capability as well).
+ return self.logger.isEnabledFor(level)
+ else:
+ return super(BaseLoggerAdapter, self).isEnabledFor(level)
+
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
@@ -296,6 +250,11 @@ class LazyAdapter(BaseLoggerAdapter):
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
+ if six.PY3:
+ # In Python 3, the code fails because the 'manager' attribute
+ # cannot be found when using a LoggerAdapter as the
+ # underlying logger. Work around this issue.
+ self._logger.manager = self._logger.logger.manager
return self._logger
@@ -341,11 +300,10 @@ class ContextAdapter(BaseLoggerAdapter):
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
- # NOTE(mrodden): catch any Message/other object and
- # coerce to unicode before they can get
- # to the python logging and possibly
- # cause string encoding trouble
- if not isinstance(msg, six.string_types):
+ # NOTE(jecarey): If msg is not unicode, coerce it into unicode
+ # before it can get to the python logging and
+ # possibly cause string encoding trouble
+ if not isinstance(msg, six.text_type):
msg = six.text_type(msg)
if 'extra' not in kwargs:
@@ -425,9 +383,7 @@ class JSONFormatter(logging.Formatter):
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
- extra = {}
- if CONF.verbose or CONF.debug:
- extra['exc_info'] = (exc_type, value, tb)
+ extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
@@ -451,7 +407,7 @@ def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
- except moves.configparser.Error as exc:
+ except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
@@ -464,10 +420,20 @@ def setup(product_name, version='unknown'):
sys.excepthook = _create_logging_excepthook(product_name)
-def set_defaults(logging_context_format_string):
- cfg.set_defaults(log_opts,
- logging_context_format_string=
- logging_context_format_string)
+def set_defaults(logging_context_format_string=None,
+ default_log_levels=None):
+ # Just in case the caller is not setting the
+ # default_log_level. This is insurance because
+ # we introduced the default_log_level parameter
+ # later in a backwards in-compatible change
+ if default_log_levels is not None:
+ cfg.set_defaults(
+ log_opts,
+ default_log_levels=default_log_levels)
+ if logging_context_format_string is not None:
+ cfg.set_defaults(
+ log_opts,
+ logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
@@ -516,18 +482,6 @@ def _setup_logging_from_conf(project, version):
for handler in log_root.handlers:
log_root.removeHandler(handler)
- if CONF.use_syslog:
- facility = _find_facility_from_conf()
- # TODO(bogdando) use the format provided by RFCSysLogHandler
- # after existing syslog format deprecation in J
- if CONF.use_syslog_rfc_format:
- syslog = RFCSysLogHandler(address='/dev/log',
- facility=facility)
- else:
- syslog = logging.handlers.SysLogHandler(address='/dev/log',
- facility=facility)
- log_root.addHandler(syslog)
-
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
@@ -544,9 +498,14 @@ def _setup_logging_from_conf(project, version):
log_root.addHandler(streamlog)
if CONF.publish_errors:
- handler = importutils.import_object(
- "pycadf.openstack.common.log_handler.PublishErrorsHandler",
- logging.ERROR)
+ try:
+ handler = importutils.import_object(
+ "pycadf.openstack.common.log_handler.PublishErrorsHandler",
+ logging.ERROR)
+ except ImportError:
+ handler = importutils.import_object(
+ "oslo.messaging.notify.log_handler.PublishErrorsHandler",
+ logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
@@ -572,9 +531,29 @@ def _setup_logging_from_conf(project, version):
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
- level = logging.getLevelName(level_name)
logger = logging.getLogger(mod)
- logger.setLevel(level)
+ # NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
+ # to integer code.
+ if sys.version_info < (2, 7):
+ level = logging.getLevelName(level_name)
+ logger.setLevel(level)
+ else:
+ logger.setLevel(level_name)
+
+ if CONF.use_syslog:
+ try:
+ facility = _find_facility_from_conf()
+ # TODO(bogdando) use the format provided by RFCSysLogHandler
+ # after existing syslog format deprecation in J
+ if CONF.use_syslog_rfc_format:
+ syslog = RFCSysLogHandler(facility=facility)
+ else:
+ syslog = logging.handlers.SysLogHandler(facility=facility)
+ log_root.addHandler(syslog)
+ except socket.error:
+ log_root.error('Unable to add syslog handler. Verify that syslog '
+ 'is running.')
+
_loggers = {}
@@ -644,6 +623,12 @@ class ContextFormatter(logging.Formatter):
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
+ # NOTE(jecarey): If msg is not unicode, coerce it into unicode
+ # before it can get to the python logging and
+ # possibly cause string encoding trouble
+ if not isinstance(record.msg, six.text_type):
+ record.msg = six.text_type(record.msg)
+
# store project info
record.project = self.project
record.version = self.version
@@ -663,14 +648,19 @@ class ContextFormatter(logging.Formatter):
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
- self._fmt = CONF.logging_context_format_string
+ fmt = CONF.logging_context_format_string
else:
- self._fmt = CONF.logging_default_format_string
+ fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
- self._fmt += " " + CONF.logging_debug_format_suffix
+ fmt += " " + CONF.logging_debug_format_suffix
+ if sys.version_info < (3, 2):
+ self._fmt = fmt
+ else:
+ self._style = logging.PercentStyle(fmt)
+ self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)