diff options
author | Gordon Chung <chungg@ca.ibm.com> | 2013-08-07 16:36:33 -0400 |
---|---|---|
committer | Gordon Chung <chungg@ca.ibm.com> | 2013-08-07 16:36:33 -0400 |
commit | 959e7fc797f368b9809ca2cde704ef0c32de0e70 (patch) | |
tree | c23fbe2b19c74f7ed20c4c9fbc867e54fa0d57a9 | |
parent | 7bf0f20abe7e721f28d9638c32e05e3e40d95356 (diff) | |
download | pycadf-0.1.tar.gz |
- need to update attribute validation to test against basestring
instead of str
- drop use of openstack logger
Change-Id: I2167cf8e468c0e932c476238120d768e5d4d443f
Fixes: bug1209387
-rw-r--r-- | openstack-common.conf | 3 | ||||
-rw-r--r-- | pycadf/attachment.py | 6 | ||||
-rw-r--r-- | pycadf/audit/api.py | 11 | ||||
-rw-r--r-- | pycadf/event.py | 3 | ||||
-rw-r--r-- | pycadf/geolocation.py | 23 | ||||
-rw-r--r-- | pycadf/identifier.py | 2 | ||||
-rw-r--r-- | pycadf/metric.py | 4 | ||||
-rw-r--r-- | pycadf/openstack/common/gettextutils.py | 2 | ||||
-rw-r--r-- | pycadf/openstack/common/local.py | 47 | ||||
-rw-r--r-- | pycadf/openstack/common/log.py | 559 | ||||
-rw-r--r-- | pycadf/path.py | 2 | ||||
-rw-r--r-- | pycadf/reason.py | 20 | ||||
-rw-r--r-- | pycadf/reporterstep.py | 2 | ||||
-rw-r--r-- | pycadf/resource.py | 6 | ||||
-rw-r--r-- | pycadf/tag.py | 2 | ||||
-rw-r--r-- | pycadf/timestamp.py | 9 |
16 files changed, 54 insertions, 647 deletions
diff --git a/openstack-common.conf b/openstack-common.conf index c503cff..692b9cb 100644 --- a/openstack-common.conf +++ b/openstack-common.conf @@ -1,5 +1,6 @@ [DEFAULT] module=config.generator +module=gettextutils +module=importutils module=jsonutils -module=log base=pycadf diff --git a/pycadf/attachment.py b/pycadf/attachment.py index 77cf425..bfd9b95 100644 --- a/pycadf/attachment.py +++ b/pycadf/attachment.py @@ -33,10 +33,12 @@ class Attachment(cadftype.CADFAbstractType): # the set of approved attachment types in order to # limit and validate them. typeURI = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_TYPEURI, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) content = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_CONTENT) name = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_NAME, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) def __init__(self, typeURI=None, content=None, name=None): # Attachment.typeURI diff --git a/pycadf/audit/api.py b/pycadf/audit/api.py index 3f252e4..71e11c2 100644 --- a/pycadf/audit/api.py +++ b/pycadf/audit/api.py @@ -24,7 +24,6 @@ import urlparse from pycadf import cadftaxonomy as taxonomy from pycadf import cadftype from pycadf import eventfactory as factory -from pycadf.openstack.common import log as logging from pycadf import reason from pycadf import reporterstep from pycadf import resource @@ -32,8 +31,6 @@ from pycadf import tag from pycadf import timestamp cfg.CONF.import_opt('api_audit_map', 'pycadf.audit', group='audit') - -LOG = logging.getLogger(__name__) CONF = cfg.CONF @@ -65,6 +62,10 @@ class ClientResource(resource.Resource): self.status = status +class PycadfAuditApiConfigError(Exception): + """Error raised when pyCADF fails to configure correctly.""" + + class OpenStackAuditApi(object): _API_PATHS = [] @@ -80,7 +81,6 @@ class OpenStackAuditApi(object): cfg_file = CONF.audit.api_audit_map if not os.path.exists(CONF.audit.api_audit_map): cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map) - LOG.debug("API path config file: %s", cfg_file) if cfg_file: try: @@ -104,7 +104,8 @@ class OpenStackAuditApi(object): except ConfigParser.NoSectionError: pass except ConfigParser.ParsingError as err: - LOG.error('Error parsing audit map file: %s' % err) + raise PycadfAuditApiConfigError( + 'Error parsing audit map file: %s' % err) def _get_action(self, req): """Take a given Request, parse url path to calculate action type. diff --git a/pycadf/event.py b/pycadf/event.py index 2bfb7e8..916b4c3 100644 --- a/pycadf/event.py +++ b/pycadf/event.py @@ -92,7 +92,8 @@ class Event(cadftype.CADFAbstractType): EVENT_KEYNAME_REASON, lambda x: isinstance(x, reason.Reason) and x.is_valid()) severity = cadftype.ValidatorDescriptor(EVENT_KEYNAME_SEVERITY, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) def __init__(self, eventType=cadftype.EVENTTYPE_ACTIVITY, id=identifier.generate_uuid(), diff --git a/pycadf/geolocation.py b/pycadf/geolocation.py index 86222ea..e2ffd82 100644 --- a/pycadf/geolocation.py +++ b/pycadf/geolocation.py @@ -52,19 +52,26 @@ class Geolocation(cadftype.CADFAbstractType): # TODO(mrutkows): we may want to do more validation to make # sure numeric range represented by string is valid latitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LATITUDE, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) longitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LONGITUDE, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) elevation = cadftype.ValidatorDescriptor(GEO_KEYNAME_ELEVATION, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) accuracy = cadftype.ValidatorDescriptor(GEO_KEYNAME_ACCURACY, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) city = cadftype.ValidatorDescriptor(GEO_KEYNAME_CITY, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) state = cadftype.ValidatorDescriptor(GEO_KEYNAME_STATE, - lambda x: isinstance(x, str)) - regionICANN = cadftype.ValidatorDescriptor(GEO_KEYNAME_REGIONICANN, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, + basestring)) + regionICANN = cadftype.ValidatorDescriptor( + GEO_KEYNAME_REGIONICANN, + lambda x: isinstance(x, basestring)) def __init__(self, id=None, latitude=None, longitude=None, elevation=None, accuracy=None, city=None, state=None, diff --git a/pycadf/identifier.py b/pycadf/identifier.py index bf7684d..69354a2 100644 --- a/pycadf/identifier.py +++ b/pycadf/identifier.py @@ -37,6 +37,6 @@ def generate_uuid(): # TODO(mrutkows): validate any cadf:Identifier (type) record against # CADF schema. This would include schema validation as an optional parm. def is_valid(value): - if not isinstance(value, str): + if not isinstance(value, basestring): raise TypeError return True diff --git a/pycadf/metric.py b/pycadf/metric.py index 34376eb..262c89a 100644 --- a/pycadf/metric.py +++ b/pycadf/metric.py @@ -40,9 +40,9 @@ class Metric(cadftype.CADFAbstractType): metricId = cadftype.ValidatorDescriptor(METRIC_KEYNAME_METRICID, lambda x: identifier.is_valid(x)) unit = cadftype.ValidatorDescriptor(METRIC_KEYNAME_UNIT, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, basestring)) name = cadftype.ValidatorDescriptor(METRIC_KEYNAME_NAME, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, basestring)) def __init__(self, metricId=identifier.generate_uuid(), unit=None, name=None): diff --git a/pycadf/openstack/common/gettextutils.py b/pycadf/openstack/common/gettextutils.py index f2a0044..9f5524b 100644 --- a/pycadf/openstack/common/gettextutils.py +++ b/pycadf/openstack/common/gettextutils.py @@ -137,7 +137,7 @@ class Message(UserString.UserString, object): # look for %(blah) fields in string; # ignore %% and deal with the # case where % is first character on the line - keys = re.findall('(?:[^%]|^)%\((\w*)\)[a-z]', full_msg) + keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg) # if we don't find any %(blah) blocks but have a %s if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg): diff --git a/pycadf/openstack/common/local.py b/pycadf/openstack/common/local.py deleted file mode 100644 index e82f17d..0000000 --- a/pycadf/openstack/common/local.py +++ /dev/null @@ -1,47 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Local storage of variables using weak references""" - -import threading -import weakref - - -class WeakLocal(threading.local): - def __getattribute__(self, attr): - rval = super(WeakLocal, self).__getattribute__(attr) - if rval: - # NOTE(mikal): this bit is confusing. What is stored is a weak - # reference, not the value itself. We therefore need to lookup - # the weak reference and return the inner value here. - rval = rval() - return rval - - def __setattr__(self, attr, value): - value = weakref.ref(value) - return super(WeakLocal, self).__setattr__(attr, value) - - -# NOTE(mikal): the name "store" should be deprecated in the future -store = WeakLocal() - -# A "weak" store uses weak references and allows an object to fall out of scope -# when it falls out of scope in the code that uses the thread local storage. A -# "strong" store will hold a reference to the object so that it never falls out -# of scope. -weak_store = WeakLocal() -strong_store = threading.local() diff --git a/pycadf/openstack/common/log.py b/pycadf/openstack/common/log.py deleted file mode 100644 index 216c918..0000000 --- a/pycadf/openstack/common/log.py +++ /dev/null @@ -1,559 +0,0 @@ -# vim: tabstop=4 shiftwidth=4 softtabstop=4 - -# Copyright 2011 OpenStack Foundation. -# Copyright 2010 United States Government as represented by the -# Administrator of the National Aeronautics and Space Administration. -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -"""Openstack logging handler. - -This module adds to logging functionality by adding the option to specify -a context object when calling the various log methods. If the context object -is not specified, default formatting is used. Additionally, an instance uuid -may be passed as part of the log message, which is intended to make it easier -for admins to find messages related to a specific instance. - -It also allows setting of formatting information through conf. - -""" - -import inspect -import itertools -import logging -import logging.config -import logging.handlers -import os -import sys -import traceback - -from oslo.config import cfg -from six import moves - -from pycadf.openstack.common.gettextutils import _ # noqa -from pycadf.openstack.common import importutils -from pycadf.openstack.common import jsonutils -from pycadf.openstack.common import local - - -_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" - -common_cli_opts = [ - cfg.BoolOpt('debug', - short='d', - default=False, - help='Print debugging output (set logging level to ' - 'DEBUG instead of default WARNING level).'), - cfg.BoolOpt('verbose', - short='v', - default=False, - help='Print more verbose output (set logging level to ' - 'INFO instead of default WARNING level).'), -] - -logging_cli_opts = [ - cfg.StrOpt('log-config', - metavar='PATH', - help='If this option is specified, the logging configuration ' - 'file specified is used and overrides any other logging ' - 'options specified. Please see the Python logging module ' - 'documentation for details on logging configuration ' - 'files.'), - cfg.StrOpt('log-format', - default=None, - metavar='FORMAT', - help='DEPRECATED. ' - 'A logging.Formatter log message format string which may ' - 'use any of the available logging.LogRecord attributes. ' - 'This option is deprecated. Please use ' - 'logging_context_format_string and ' - 'logging_default_format_string instead.'), - cfg.StrOpt('log-date-format', - default=_DEFAULT_LOG_DATE_FORMAT, - metavar='DATE_FORMAT', - help='Format string for %%(asctime)s in log records. ' - 'Default: %(default)s'), - cfg.StrOpt('log-file', - metavar='PATH', - deprecated_name='logfile', - help='(Optional) Name of log file to output to. ' - 'If no default is set, logging will go to stdout.'), - cfg.StrOpt('log-dir', - deprecated_name='logdir', - help='(Optional) The base directory used for relative ' - '--log-file paths'), - cfg.BoolOpt('use-syslog', - default=False, - help='Use syslog for logging.'), - cfg.StrOpt('syslog-log-facility', - default='LOG_USER', - help='syslog facility to receive log lines') -] - -generic_log_opts = [ - cfg.BoolOpt('use_stderr', - default=True, - help='Log output to standard error') -] - -log_opts = [ - cfg.StrOpt('logging_context_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [%(request_id)s %(user)s %(tenant)s] ' - '%(instance)s%(message)s', - help='format string to use for log messages with context'), - cfg.StrOpt('logging_default_format_string', - default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s ' - '%(name)s [-] %(instance)s%(message)s', - help='format string to use for log messages without context'), - cfg.StrOpt('logging_debug_format_suffix', - default='%(funcName)s %(pathname)s:%(lineno)d', - help='data to append to log format when level is DEBUG'), - cfg.StrOpt('logging_exception_prefix', - default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s ' - '%(instance)s', - help='prefix each line of exception output with this format'), - cfg.ListOpt('default_log_levels', - default=[ - 'amqplib=WARN', - 'sqlalchemy=WARN', - 'boto=WARN', - 'suds=INFO', - 'keystone=INFO', - 'eventlet.wsgi.server=WARN' - ], - help='list of logger=LEVEL pairs'), - cfg.BoolOpt('publish_errors', - default=False, - help='publish error events'), - cfg.BoolOpt('fatal_deprecations', - default=False, - help='make deprecations fatal'), - - # NOTE(mikal): there are two options here because sometimes we are handed - # a full instance (and could include more information), and other times we - # are just handed a UUID for the instance. - cfg.StrOpt('instance_format', - default='[instance: %(uuid)s] ', - help='If an instance is passed with the log message, format ' - 'it like this'), - cfg.StrOpt('instance_uuid_format', - default='[instance: %(uuid)s] ', - help='If an instance UUID is passed with the log message, ' - 'format it like this'), -] - -CONF = cfg.CONF -CONF.register_cli_opts(common_cli_opts) -CONF.register_cli_opts(logging_cli_opts) -CONF.register_opts(generic_log_opts) -CONF.register_opts(log_opts) - -# our new audit level -# NOTE(jkoelker) Since we synthesized an audit level, make the logging -# module aware of it so it acts like other levels. -logging.AUDIT = logging.INFO + 1 -logging.addLevelName(logging.AUDIT, 'AUDIT') - - -try: - NullHandler = logging.NullHandler -except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7 - class NullHandler(logging.Handler): - def handle(self, record): - pass - - def emit(self, record): - pass - - def createLock(self): - self.lock = None - - -def _dictify_context(context): - if context is None: - return None - if not isinstance(context, dict) and getattr(context, 'to_dict', None): - context = context.to_dict() - return context - - -def _get_binary_name(): - return os.path.basename(inspect.stack()[-1][1]) - - -def _get_log_file_path(binary=None): - logfile = CONF.log_file - logdir = CONF.log_dir - - if logfile and not logdir: - return logfile - - if logfile and logdir: - return os.path.join(logdir, logfile) - - if logdir: - binary = binary or _get_binary_name() - return '%s.log' % (os.path.join(logdir, binary),) - - -class BaseLoggerAdapter(logging.LoggerAdapter): - - def audit(self, msg, *args, **kwargs): - self.log(logging.AUDIT, msg, *args, **kwargs) - - -class LazyAdapter(BaseLoggerAdapter): - def __init__(self, name='unknown', version='unknown'): - self._logger = None - self.extra = {} - self.name = name - self.version = version - - @property - def logger(self): - if not self._logger: - self._logger = getLogger(self.name, self.version) - return self._logger - - -class ContextAdapter(BaseLoggerAdapter): - warn = logging.LoggerAdapter.warning - - def __init__(self, logger, project_name, version_string): - self.logger = logger - self.project = project_name - self.version = version_string - - @property - def handlers(self): - return self.logger.handlers - - def deprecated(self, msg, *args, **kwargs): - stdmsg = _("Deprecated: %s") % msg - if CONF.fatal_deprecations: - self.critical(stdmsg, *args, **kwargs) - raise DeprecatedConfig(msg=stdmsg) - else: - self.warn(stdmsg, *args, **kwargs) - - def process(self, msg, kwargs): - if 'extra' not in kwargs: - kwargs['extra'] = {} - extra = kwargs['extra'] - - context = kwargs.pop('context', None) - if not context: - context = getattr(local.store, 'context', None) - if context: - extra.update(_dictify_context(context)) - - instance = kwargs.pop('instance', None) - instance_extra = '' - if instance: - instance_extra = CONF.instance_format % instance - else: - instance_uuid = kwargs.pop('instance_uuid', None) - if instance_uuid: - instance_extra = (CONF.instance_uuid_format - % {'uuid': instance_uuid}) - extra.update({'instance': instance_extra}) - - extra.update({"project": self.project}) - extra.update({"version": self.version}) - extra['extra'] = extra.copy() - return msg, kwargs - - -class JSONFormatter(logging.Formatter): - def __init__(self, fmt=None, datefmt=None): - # NOTE(jkoelker) we ignore the fmt argument, but its still there - # since logging.config.fileConfig passes it. - self.datefmt = datefmt - - def formatException(self, ei, strip_newlines=True): - lines = traceback.format_exception(*ei) - if strip_newlines: - lines = [itertools.ifilter( - lambda x: x, - line.rstrip().splitlines()) for line in lines] - lines = list(itertools.chain(*lines)) - return lines - - def format(self, record): - message = {'message': record.getMessage(), - 'asctime': self.formatTime(record, self.datefmt), - 'name': record.name, - 'msg': record.msg, - 'args': record.args, - 'levelname': record.levelname, - 'levelno': record.levelno, - 'pathname': record.pathname, - 'filename': record.filename, - 'module': record.module, - 'lineno': record.lineno, - 'funcname': record.funcName, - 'created': record.created, - 'msecs': record.msecs, - 'relative_created': record.relativeCreated, - 'thread': record.thread, - 'thread_name': record.threadName, - 'process_name': record.processName, - 'process': record.process, - 'traceback': None} - - if hasattr(record, 'extra'): - message['extra'] = record.extra - - if record.exc_info: - message['traceback'] = self.formatException(record.exc_info) - - return jsonutils.dumps(message) - - -def _create_logging_excepthook(product_name): - def logging_excepthook(type, value, tb): - extra = {} - if CONF.verbose: - extra['exc_info'] = (type, value, tb) - getLogger(product_name).critical(str(value), **extra) - return logging_excepthook - - -class LogConfigError(Exception): - - message = _('Error loading logging config %(log_config)s: %(err_msg)s') - - def __init__(self, log_config, err_msg): - self.log_config = log_config - self.err_msg = err_msg - - def __str__(self): - return self.message % dict(log_config=self.log_config, - err_msg=self.err_msg) - - -def _load_log_config(log_config): - try: - logging.config.fileConfig(log_config) - except moves.configparser.Error as exc: - raise LogConfigError(log_config, str(exc)) - - -def setup(product_name): - """Setup logging.""" - if CONF.log_config: - _load_log_config(CONF.log_config) - else: - _setup_logging_from_conf() - sys.excepthook = _create_logging_excepthook(product_name) - - -def set_defaults(logging_context_format_string): - cfg.set_defaults(log_opts, - logging_context_format_string= - logging_context_format_string) - - -def _find_facility_from_conf(): - facility_names = logging.handlers.SysLogHandler.facility_names - facility = getattr(logging.handlers.SysLogHandler, - CONF.syslog_log_facility, - None) - - if facility is None and CONF.syslog_log_facility in facility_names: - facility = facility_names.get(CONF.syslog_log_facility) - - if facility is None: - valid_facilities = facility_names.keys() - consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON', - 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS', - 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP', - 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3', - 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7'] - valid_facilities.extend(consts) - raise TypeError(_('syslog facility must be one of: %s') % - ', '.join("'%s'" % fac - for fac in valid_facilities)) - - return facility - - -def _setup_logging_from_conf(): - log_root = getLogger(None).logger - for handler in log_root.handlers: - log_root.removeHandler(handler) - - if CONF.use_syslog: - facility = _find_facility_from_conf() - syslog = logging.handlers.SysLogHandler(address='/dev/log', - facility=facility) - log_root.addHandler(syslog) - - logpath = _get_log_file_path() - if logpath: - filelog = logging.handlers.WatchedFileHandler(logpath) - log_root.addHandler(filelog) - - if CONF.use_stderr: - streamlog = ColorHandler() - log_root.addHandler(streamlog) - - elif not CONF.log_file: - # pass sys.stdout as a positional argument - # python2.6 calls the argument strm, in 2.7 it's stream - streamlog = logging.StreamHandler(sys.stdout) - log_root.addHandler(streamlog) - - if CONF.publish_errors: - handler = importutils.import_object( - "pycadf.openstack.common.log_handler.PublishErrorsHandler", - logging.ERROR) - log_root.addHandler(handler) - - datefmt = CONF.log_date_format - for handler in log_root.handlers: - # NOTE(alaski): CONF.log_format overrides everything currently. This - # should be deprecated in favor of context aware formatting. - if CONF.log_format: - handler.setFormatter(logging.Formatter(fmt=CONF.log_format, - datefmt=datefmt)) - log_root.info('Deprecated: log_format is now deprecated and will ' - 'be removed in the next release') - else: - handler.setFormatter(ContextFormatter(datefmt=datefmt)) - - if CONF.debug: - log_root.setLevel(logging.DEBUG) - elif CONF.verbose: - log_root.setLevel(logging.INFO) - else: - log_root.setLevel(logging.WARNING) - - for pair in CONF.default_log_levels: - mod, _sep, level_name = pair.partition('=') - level = logging.getLevelName(level_name) - logger = logging.getLogger(mod) - logger.setLevel(level) - -_loggers = {} - - -def getLogger(name='unknown', version='unknown'): - if name not in _loggers: - _loggers[name] = ContextAdapter(logging.getLogger(name), - name, - version) - return _loggers[name] - - -def getLazyLogger(name='unknown', version='unknown'): - """Returns lazy logger. - - Creates a pass-through logger that does not create the real logger - until it is really needed and delegates all calls to the real logger - once it is created. - """ - return LazyAdapter(name, version) - - -class WritableLogger(object): - """A thin wrapper that responds to `write` and logs.""" - - def __init__(self, logger, level=logging.INFO): - self.logger = logger - self.level = level - - def write(self, msg): - self.logger.log(self.level, msg) - - -class ContextFormatter(logging.Formatter): - """A context.RequestContext aware formatter configured through flags. - - The flags used to set format strings are: logging_context_format_string - and logging_default_format_string. You can also specify - logging_debug_format_suffix to append extra formatting if the log level is - debug. - - For information about what variables are available for the formatter see: - http://docs.python.org/library/logging.html#formatter - - """ - - def format(self, record): - """Uses contextstring if request_id is set, otherwise default.""" - # NOTE(sdague): default the fancier formating params - # to an empty string so we don't throw an exception if - # they get used - for key in ('instance', 'color'): - if key not in record.__dict__: - record.__dict__[key] = '' - - if record.__dict__.get('request_id', None): - self._fmt = CONF.logging_context_format_string - else: - self._fmt = CONF.logging_default_format_string - - if (record.levelno == logging.DEBUG and - CONF.logging_debug_format_suffix): - self._fmt += " " + CONF.logging_debug_format_suffix - - # Cache this on the record, Logger will respect our formated copy - if record.exc_info: - record.exc_text = self.formatException(record.exc_info, record) - return logging.Formatter.format(self, record) - - def formatException(self, exc_info, record=None): - """Format exception output with CONF.logging_exception_prefix.""" - if not record: - return logging.Formatter.formatException(self, exc_info) - - stringbuffer = moves.StringIO() - traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], - None, stringbuffer) - lines = stringbuffer.getvalue().split('\n') - stringbuffer.close() - - if CONF.logging_exception_prefix.find('%(asctime)') != -1: - record.asctime = self.formatTime(record, self.datefmt) - - formatted_lines = [] - for line in lines: - pl = CONF.logging_exception_prefix % record.__dict__ - fl = '%s%s' % (pl, line) - formatted_lines.append(fl) - return '\n'.join(formatted_lines) - - -class ColorHandler(logging.StreamHandler): - LEVEL_COLORS = { - logging.DEBUG: '\033[00;32m', # GREEN - logging.INFO: '\033[00;36m', # CYAN - logging.AUDIT: '\033[01;36m', # BOLD CYAN - logging.WARN: '\033[01;33m', # BOLD YELLOW - logging.ERROR: '\033[01;31m', # BOLD RED - logging.CRITICAL: '\033[01;31m', # BOLD RED - } - - def format(self, record): - record.color = self.LEVEL_COLORS[record.levelno] - return logging.StreamHandler.format(self, record) - - -class DeprecatedConfig(Exception): - message = _("Fatal call to deprecated config: %(msg)s") - - def __init__(self, msg): - super(Exception, self).__init__(self.message % dict(msg=msg)) diff --git a/pycadf/path.py b/pycadf/path.py index d9934ff..439d72f 100644 --- a/pycadf/path.py +++ b/pycadf/path.py @@ -32,7 +32,7 @@ class Path(cadftype.CADFAbstractType): # TODO(mrutkows): validate any cadf:Path (type) record against CADF schema @staticmethod def is_valid(value): - if not isinstance(value, str): + if not isinstance(value, basestring): raise TypeError return True diff --git a/pycadf/reason.py b/pycadf/reason.py index 57f559f..cf41f8c 100644 --- a/pycadf/reason.py +++ b/pycadf/reason.py @@ -33,14 +33,18 @@ REASON_KEYNAMES = [REASON_KEYNAME_REASONTYPE, class Reason(cadftype.CADFAbstractType): - reasonType = cadftype.ValidatorDescriptor(REASON_KEYNAME_REASONTYPE, - lambda x: isinstance(x, str)) - reasonCode = cadftype.ValidatorDescriptor(REASON_KEYNAME_REASONCODE, - lambda x: isinstance(x, str)) - policyType = cadftype.ValidatorDescriptor(REASON_KEYNAME_POLICYTYPE, - lambda x: isinstance(x, str)) - policyId = cadftype.ValidatorDescriptor(REASON_KEYNAME_POLICYID, - lambda x: isinstance(x, str)) + reasonType = cadftype.ValidatorDescriptor( + REASON_KEYNAME_REASONTYPE, + lambda x: isinstance(x, basestring)) + reasonCode = cadftype.ValidatorDescriptor( + REASON_KEYNAME_REASONCODE, + lambda x: isinstance(x, basestring)) + policyType = cadftype.ValidatorDescriptor( + REASON_KEYNAME_POLICYTYPE, + lambda x: isinstance(x, basestring)) + policyId = cadftype.ValidatorDescriptor( + REASON_KEYNAME_POLICYID, + lambda x: isinstance(x, basestring)) def __init__(self, reasonType=None, reasonCode=None, policyType=None, policyId=None): diff --git a/pycadf/reporterstep.py b/pycadf/reporterstep.py index 81d70a7..29cab96 100644 --- a/pycadf/reporterstep.py +++ b/pycadf/reporterstep.py @@ -43,7 +43,7 @@ class Reporterstep(cadftype.CADFAbstractType): reporter = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_REPORTER, (lambda x: isinstance(x, resource.Resource) or - (isinstance(x, str) and + (isinstance(x, basestring) and (x == 'initiator' or x == 'target')))) reporterId = cadftype.ValidatorDescriptor( REPORTERSTEP_KEYNAME_REPORTERID, lambda x: identifier.is_valid(x)) diff --git a/pycadf/resource.py b/pycadf/resource.py index 3b2a42d..0d30a42 100644 --- a/pycadf/resource.py +++ b/pycadf/resource.py @@ -50,16 +50,16 @@ class Resource(cadftype.CADFAbstractType): id = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_ID, lambda x: identifier.is_valid(x)) name = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_NAME, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, basestring)) domain = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_DOMAIN, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, basestring)) # TODO(mrutkows): validate the "ref" attribute is indeed a URI (format), # If it is a URL, we do not need to validate it is accessible/working, # for audit purposes this could have been a valid URL at some point # in the past or a URL that is only valid within some domain (e.g. a # private cloud) ref = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_REF, - lambda x: isinstance(x, str)) + lambda x: isinstance(x, basestring)) geolocation = cadftype.ValidatorDescriptor( RESOURCE_KEYNAME_GEO, lambda x: isinstance(x, geolocation.Geolocation)) diff --git a/pycadf/tag.py b/pycadf/tag.py index 2539978..fbe7532 100644 --- a/pycadf/tag.py +++ b/pycadf/tag.py @@ -29,6 +29,6 @@ def generate_name_value_tag(name, value): # TODO(mrutkows): validate any Tag's name?value= format def is_valid(value): - if not isinstance(value, str): + if not isinstance(value, basestring): raise TypeError return True diff --git a/pycadf/timestamp.py b/pycadf/timestamp.py index 15d87d0..0079c1b 100644 --- a/pycadf/timestamp.py +++ b/pycadf/timestamp.py @@ -19,9 +19,6 @@ import datetime import pytz -from pycadf.openstack.common import log as logging - -LOG = logging.getLogger(__name__) TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z" @@ -30,15 +27,15 @@ def get_utc_now(timezone=None): if timezone is not None: try: utc_datetime = utc_datetime.astimezone(pytz.timezone(timezone)) - except Exception as e: - LOG.error('Unknown timezone: %s' % e) + except Exception: + utc_datetime.strftime(TIME_FORMAT) return utc_datetime.strftime(TIME_FORMAT) # TODO(mrutkows): validate any cadf:Timestamp (type) record against # CADF schema def is_valid(value): - if not isinstance(value, str): + if not isinstance(value, basestring): raise ValueError('Timestamp should be a String') return True |