diff options
Diffstat (limited to 'ceilometer/publisher/gnocchi.py')
-rw-r--r-- | ceilometer/publisher/gnocchi.py | 38 |
1 files changed, 30 insertions, 8 deletions
diff --git a/ceilometer/publisher/gnocchi.py b/ceilometer/publisher/gnocchi.py index 530013f9..19f186c7 100644 --- a/ceilometer/publisher/gnocchi.py +++ b/ceilometer/publisher/gnocchi.py @@ -24,6 +24,7 @@ import uuid from gnocchiclient import exceptions as gnocchi_exc from keystoneauth1 import exceptions as ka_exceptions +import oslo_cache from oslo_log import log from oslo_utils import timeutils from stevedore import extension @@ -214,7 +215,6 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): self.cache = None try: - import oslo_cache oslo_cache.configure(conf) # NOTE(cdent): The default cache backend is a real but # noop backend. We don't want to use that here because @@ -225,8 +225,6 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): self.cache = oslo_cache.configure_cache_region( conf, cache_region) self.cache.key_mangler = cache_key_mangler - except ImportError: - pass except oslo_cache.exception.ConfigurationError as exc: LOG.warning('unable to configure oslo_cache: %s', exc) @@ -327,7 +325,18 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): # NOTE(sileht): skip sample generated by gnocchi itself data = [s for s in data if not self._is_gnocchi_activity(s)] - data.sort(key=operator.attrgetter('resource_id')) + + def value_to_sort(object_to_sort): + value = object_to_sort.resource_id + if not value: + LOG.debug("Resource ID was not defined for sample data [%s]. " + "Therefore, we will use an empty string as the " + "resource ID.", object_to_sort) + value = '' + + return value + + data.sort(key=value_to_sort) resource_grouped_samples = itertools.groupby( data, key=operator.attrgetter('resource_id')) @@ -371,9 +380,13 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): try: self.batch_measures(measures, gnocchi_data) except gnocchi_exc.ClientException as e: - LOG.error(str(e)) + LOG.error("Gnocchi client exception while pushing measures [%s] " + "for gnocchi data [%s]: [%s].", measures, gnocchi_data, + str(e)) except Exception as e: - LOG.error(str(e), exc_info=True) + LOG.error("Unexpected exception while pushing measures [%s] for " + "gnocchi data [%s]: [%s].", measures, gnocchi_data, + str(e), exc_info=True) for info in gnocchi_data.values(): resource = info["resource"] @@ -385,9 +398,15 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): self._if_not_cached(resource_type, resource['id'], resource_extra) except gnocchi_exc.ClientException as e: - LOG.error(str(e)) + LOG.error("Gnocchi client exception updating resource type " + "[%s] with ID [%s] for resource data [%s]: [%s].", + resource_type, resource.get('id'), resource_extra, + str(e)) except Exception as e: - LOG.error(str(e), exc_info=True) + LOG.error("Unexpected exception updating resource type [%s] " + "with ID [%s] for resource data [%s]: [%s].", + resource_type, resource.get('id'), resource_extra, + str(e), exc_info=True) @staticmethod def _extract_resources_from_error(e, resource_infos): @@ -402,6 +421,9 @@ class GnocchiPublisher(publisher.ConfigPublisherBase): # NOTE(sileht): We don't care about error here, we want # resources metadata always been updated try: + LOG.debug("Executing batch resource metrics measures for resource " + "[%s] and measures [%s].", resource_infos, measures) + self._gnocchi.metric.batch_resources_metrics_measures( measures, create_metrics=True) except gnocchi_exc.BadRequest as e: |