summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--setup.py3
-rw-r--r--src/server/__init__.py18
-rw-r--r--src/server/newrelic/__init__.py0
-rw-r--r--src/server/newrelic/interface.py189
-rw-r--r--src/server/newrelic/main.py62
-rw-r--r--src/server/newrelic/sampler.py601
-rw-r--r--src/server/wsgi_interp.c8
-rw-r--r--src/server/wsgi_metrics.c14
-rw-r--r--src/server/wsgi_metrics.h4
-rw-r--r--tests/environ.wsgi26
-rw-r--r--tox.ini4
11 files changed, 49 insertions, 880 deletions
diff --git a/setup.py b/setup.py
index 309ad3c..872f437 100644
--- a/setup.py
+++ b/setup.py
@@ -179,7 +179,7 @@ setup(name = 'mod_wsgi',
],
packages = ['mod_wsgi', 'mod_wsgi.server', 'mod_wsgi.server.management',
'mod_wsgi.server.management.commands', 'mod_wsgi.docs',
- 'mod_wsgi.images', 'mod_wsgi.server.newrelic'],
+ 'mod_wsgi.images'],
package_dir = {'mod_wsgi': 'src', 'mod_wsgi.docs': 'docs/_build/html',
'mod_wsgi.images': 'images'},
package_data = {'mod_wsgi.docs': _documentation(),
@@ -187,4 +187,5 @@ setup(name = 'mod_wsgi',
ext_modules = [extension],
entry_points = { 'console_scripts':
['mod_wsgi-express = mod_wsgi.server:main'],},
+ install_requires=['mod_wsgi-metrics >= 1.0.0'],
)
diff --git a/src/server/__init__.py b/src/server/__init__.py
index aa08b3c..c64ef4f 100644
--- a/src/server/__init__.py
+++ b/src/server/__init__.py
@@ -651,9 +651,23 @@ def generate_wsgi_handler_script(options):
print(WSGI_DEFAULT_SCRIPT % options, file=fp)
SERVER_METRICS_SCRIPT = """
-from mod_wsgi.server.newrelic.main import start
+import logging
-start('%(host)s:%(port)s')
+logging.basicConfig(level=logging.INFO,
+ format='%%(name)s (pid=%%(process)d, level=%%(levelname)s): %%(message)s')
+
+_logger = logging.getLogger(__name__)
+
+try:
+ from mod_wsgi.metrics.newrelic import Agent
+
+ agent = Agent()
+ agent.start()
+
+except ImportError:
+ _logger.fatal('The module mod_wsgi.metrics.newrelic is not available. '
+ 'The New Relic platform plugin has been disabled. Install the '
+ '"mod_wsgi-metrics" package.')
"""
def generate_server_metrics_script(options):
diff --git a/src/server/newrelic/__init__.py b/src/server/newrelic/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/server/newrelic/__init__.py
+++ /dev/null
diff --git a/src/server/newrelic/interface.py b/src/server/newrelic/interface.py
deleted file mode 100644
index a9db15f..0000000
--- a/src/server/newrelic/interface.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import zlib
-import sys
-import socket
-import os
-import types
-import json
-import logging
-
-try:
- import http.client as httplib
-except ImportError:
- import httplib
-
-_logger = logging.getLogger(__name__)
-
-# Python 3 compatibility helpers.
-
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-
-if PY3:
- def b(s):
- return s.encode('latin-1')
-else:
- def b(s):
- return s
-
-# Helpers for json encoding and decoding.
-
-def json_encode(obj, **kwargs):
- _kwargs = {}
-
- if type(b'') is type(''):
- _kwargs['encoding'] = 'latin-1'
-
- def _encode(o):
- if isinstance(o, bytes):
- return o.decode('latin-1')
- elif isinstance(o, types.GeneratorType):
- return list(o)
- elif hasattr(o, '__iter__'):
- return list(iter(o))
- raise TypeError(repr(o) + ' is not JSON serializable')
-
- _kwargs['default'] = _encode
- _kwargs['separators'] = (',', ':')
-
- _kwargs.update(kwargs)
-
- return json.dumps(obj, **_kwargs)
-
-def json_decode(s, **kwargs):
- return json.loads(s, **kwargs)
-
-# Platform plugin interface.
-
-class Interface(object):
-
- class NetworkInterfaceException(Exception): pass
- class DiscardDataForRequest(NetworkInterfaceException): pass
- class RetryDataForRequest(NetworkInterfaceException): pass
- class ServerIsUnavailable(RetryDataForRequest): pass
-
- USER_AGENT = 'ModWsgi-PythonPlugin/%s (Python %s %s)' % (
- '1.0.0', sys.version.split()[0], sys.platform)
-
- HOST = 'platform-api.newrelic.com'
- URL = '/platform/v1/metrics'
-
- def __init__(self, license_key):
- self.license_key = license_key
-
- def send_request(self, payload=()):
- headers = {}
- config = {}
-
- license_key = self.license_key
-
- if not self.license_key:
- license_key = 'INVALID LICENSE KEY'
-
- headers['User-Agent'] = self.USER_AGENT
- headers['Content-Encoding'] = 'identity'
- headers['X-License-Key'] = license_key
-
- try:
- data = json_encode(payload)
-
- except Exception as exc:
- _logger.exception('Error encoding data for JSON payload '
- 'with payload of %r.', payload)
-
- raise Interface.DiscardDataForRequest(str(exc))
-
- if len(data) > 64*1024:
- headers['Content-Encoding'] = 'deflate'
- level = (len(data) < 2000000) and 1 or 9
- data = zlib.compress(b(data), level)
-
- try:
- connection = httplib.HTTPSConnection(self.HOST, timeout=30.0)
- connection.request('POST', self.URL, data, headers)
- response = connection.getresponse()
- content = response.read()
-
- except httplib.HTTPException as exc:
- raise Interface.RetryDataForRequest(str(exc))
-
- finally:
- connection.close()
-
- if response.status != 200:
- _logger.debug('Received a non 200 HTTP response from the data '
- 'collector where headers=%r, status=%r and content=%r.',
- headers, response.status, content)
-
- if response.status == 400:
- if headers['Content-Encoding'] == 'deflate':
- data = zlib.decompress(data)
-
- _logger.error('Data collector is indicating that a bad '
- 'request has been submitted for headers of %r and '
- 'payload of %r with response of %r.', headers, data,
- content)
-
- raise Interface.DiscardDataForRequest()
-
- elif response.status == 403:
- _logger.error('Data collector is indicating that the license '
- 'key %r is not valid.', license_key)
-
- raise Interface.DiscardDataForRequest()
-
- elif response.status == 413:
- _logger.warning('Data collector is indicating that a request '
- 'was received where the request content size was over '
- 'the maximum allowed size limit. The length of the '
- 'request content was %d.', len(data))
-
- raise Interface.DiscardDataForRequest()
-
- elif response.status in (503, 504):
- _logger.warning('Data collector is unavailable.')
-
- raise Interface.ServerIsUnavailable()
-
- elif response.status != 200:
- _logger.warning('An unexpected HTTP response was received '
- 'from the data collector of %r. The payload for '
- 'the request was %r.', respnse.status, payload)
-
- raise Interface.DiscardDataForRequest()
-
- try:
- if PY3:
- content = content.decode('UTF-8')
-
- result = json_decode(content)
-
- except Exception as exc:
- _logger.exception('Error decoding data for JSON payload '
- 'with payload of %r.', content)
-
- raise Interface.DiscardDataForRequest(str(exc))
-
- if 'status' in result:
- return result['status']
-
- error_message = result['error']
-
- raise Interface.DiscardDataForRequest(error_message)
-
- def send_metrics(self, name, guid, version, duration, metrics):
- agent = {}
- agent['host'] = socket.gethostname()
- agent['pid'] = os.getpid()
- agent['version'] = version or '0.0.0.'
-
- component = {}
- component['name'] = name
- component['guid'] = guid
- component['duration'] = duration
- component['metrics'] = metrics
-
- payload = {}
- payload['agent'] = agent
- payload['components'] = [component]
-
- return self.send_request(payload)
diff --git a/src/server/newrelic/main.py b/src/server/newrelic/main.py
deleted file mode 100644
index afa8972..0000000
--- a/src/server/newrelic/main.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import os
-import logging
-
-try:
- from ConfigParser import RawConfigParser, NoOptionError
-except ImportError:
- from configparser import RawConfigParser, NoOptionError
-
-from .interface import Interface
-from .sampler import Sampler
-
-import apache
-
-LOG_LEVEL = {
- 'CRITICAL': logging.CRITICAL,
- 'ERROR': logging.ERROR,
- 'WARNING': logging.WARNING,
- 'INFO': logging.INFO,
- 'DEBUG': logging.DEBUG,
-}
-
-LOG_FORMAT = '%(asctime)s (%(process)d/%(threadName)s) ' \
- '%(name)s %(levelname)s - %(message)s'
-
-def start(name):
- if apache.server_metrics() is None:
- return
-
- config_object = RawConfigParser()
-
- config_file = os.environ.get('NEW_RELIC_CONFIG_FILE')
-
- if config_file:
- config_object.read([config_file])
-
- def option(name, section='newrelic', type=None, **kwargs):
- try:
- getter = 'get%s' % (type or '')
- return getattr(config_object, getter)(section, name)
- except NoOptionError:
- if 'default' in kwargs:
- return kwargs['default']
- else:
- raise
-
- log_level = os.environ.get('NEW_RELIC_LOG_LEVEL', 'INFO').upper()
- log_level = option('log_level', default=log_level).upper()
-
- if log_level in LOG_LEVEL:
- log_level = LOG_LEVEL[log_level]
- else:
- log_level = logging.INFO
-
- logging.basicConfig(level=log_level, format=LOG_FORMAT)
-
- license_key = os.environ.get('NEW_RELIC_LICENSE_KEY')
- license_key = option('license_key', default=license_key)
-
- interface = Interface(license_key)
- sampler = Sampler(interface, name)
-
- sampler.start()
diff --git a/src/server/newrelic/sampler.py b/src/server/newrelic/sampler.py
deleted file mode 100644
index 73eab83..0000000
--- a/src/server/newrelic/sampler.py
+++ /dev/null
@@ -1,601 +0,0 @@
-import threading
-import atexit
-import os
-import sys
-import json
-import socket
-import time
-import math
-
-try:
- import Queue as queue
-except ImportError:
- import queue
-
-import apache
-
-SERVER_READY = '_'
-SERVER_STARTING = 'S'
-SERVER_BUSY_READ = 'R'
-SERVER_BUSY_WRITE = 'W'
-SERVER_BUST_KEEPALIVE = 'K'
-SERVER_BUSY_LOG = 'L'
-SERVER_BUSY_DNS = 'D'
-SERVER_CLOSING = 'C'
-SERVER_GRACEFUL = 'G'
-SERVER_IDLE_KILL = 'I'
-SERVER_DEAD = '.'
-
-STATUS_FLAGS = {
- SERVER_READY: 'Ready',
- SERVER_STARTING: 'Starting',
- SERVER_BUSY_READ: 'Read',
- SERVER_BUSY_WRITE: 'Write',
- SERVER_BUST_KEEPALIVE: 'Keepalive',
- SERVER_BUSY_LOG: 'Logging',
- SERVER_BUSY_DNS: 'DNS lookup',
- SERVER_CLOSING: 'Closing',
- SERVER_GRACEFUL: 'Graceful',
- SERVER_IDLE_KILL: 'Dying',
- SERVER_DEAD: 'Dead'
-}
-
-class Sample(dict):
-
- def __init__(self, count=0, total=0.0, min=0.0, max=0.0,
- sum_of_squares=0.0):
- self.count = count
- self.total = total
- self.min = min
- self.max = max
- self.sum_of_squares = sum_of_squares
-
- def __setattr__(self, name, value):
- self[name] = value
-
- def __getattr__(self, name):
- return self[name]
-
- def merge_stats(self, other):
- self.total += other.total
- self.min = self.count and min(self.min, other.min) or other.min
- self.max = max(self.max, other.max)
- self.sum_of_squares += other.sum_of_squares
- self.count += other.count
-
- def merge_value(self, value):
- self.total += value
- self.min = self.count and min(self.min, value) or value
- self.max = max(self.max, value)
- self.sum_of_squares += value ** 2
- self.count += 1
-
-class Samples(object):
-
- def __init__(self):
- self.samples = {}
-
- def __iter__(self):
- return iter(self.samples.items())
-
- def sample_name(self, name):
- return 'Component/' + name
-
- def _assign_value(self, value):
- if isinstance(value, Sample):
- sample = value
- self.samples[name] = sample
- else:
- sample = Sample()
- self.samples[name] = sample
- sample.merge_value(value)
-
- return sample
-
- def assign_value(self, value):
- name = self.sample_name(name)
-
- return self._assign_value(name)
-
- def _merge_value(self, name, value):
- sample = self.samples.get(name)
-
- if sample is None:
- sample = Sample()
- self.samples[name] = sample
-
- if isinstance(value, Sample):
- sample.merge_stats(value)
- else:
- sample.merge_value(value)
-
- return sample
-
- def merge_value(self, name, value):
- name = self.sample_name(name)
-
- return self._merge_value(name, value)
-
- def fetch_sample(self, name):
- name = self.sample_name(name)
-
- sample = self.samples.get(name)
-
- if sample is None:
- sample = Sample()
- self.samples[name] = sample
-
- return sample
-
- def merge_samples(self, samples):
- for name, sample in samples:
- self._merge_value(name, sample)
-
- def assign_samples(self, samples):
- for name, sample in samples:
- self._assign_value(name, sample)
-
- def clear_samples(self):
- self.samples.clear()
-
-class Sampler(object):
-
- guid = 'au.com.dscpl.wsgi.mod_wsgi'
- version = '1.0.0'
-
- def __init__(self, interface, name):
- self.interface = interface
- self.name = name
-
- self.running = False
- self.lock = threading.Lock()
-
- self.period_start = 0
- self.access_count = 0
- self.bytes_served = 0
-
- self.request_samples = []
-
- self.metric_data = Samples()
-
- self.report_queue = queue.Queue()
-
- self.report_thread = threading.Thread(target=self.report_main_loop)
- self.report_thread.setDaemon(True)
-
- self.report_start = 0
- self.report_metrics = Samples()
-
- self.monitor_queue = queue.Queue()
-
- self.monitor_thread = threading.Thread(target=self.monitor_main_loop)
- self.monitor_thread.setDaemon(True)
-
- self.monitor_count = 0
-
- def upload_report(self, start, end, metrics):
- try:
- self.interface.send_metrics(self.name, self.guid, self.version,
- end-start, metrics.samples)
-
- except self.interface.RetryDataForRequest:
- return True
-
- except Exception:
- pass
-
- return False
-
- def generate_request_metrics(self, harvest_data):
- metrics = Samples()
-
- # Chart as 'Throughput'.
-
- metrics.merge_value('Requests/Throughput[|requests]',
- Sample(count=harvest_data['access_count'],
- total=harvest_data['access_count']))
-
- # Calculate from the set of sampled requests the average
- # and percentile metrics.
-
- requests = harvest_data['request_samples']
-
- if requests:
- for request in requests:
- # Chart as 'Average'.
-
- metrics.merge_value('Requests/Response Time[seconds|request]',
- request['duration'])
-
- requests.sort(key=lambda e: e['duration'])
-
- total = sum([x['duration'] for x in requests])
-
- # Chart as 'Average'.
-
- metrics.merge_value('Requests/Percentiles/Average[seconds]',
- total/len(requests))
-
- idx50 = int(0.50 * len(requests))
- metrics.merge_value('Requests/Percentiles/Median[seconds]',
- requests[idx50]['duration'])
-
- idx95 = int(0.95 * len(requests))
- metrics.merge_value('Requests/Percentiles/95%[seconds]',
- requests[idx95]['duration'])
-
- idx99 = int(0.99 * len(requests))
- metrics.merge_value('Requests/Percentiles/99%[seconds]',
- requests[idx99]['duration'])
-
- # Chart as 'Rate'.
-
- metrics.merge_value('Requests/Bytes Served[bytes]',
- harvest_data['bytes_served'])
-
- return metrics
-
- def generate_process_metrics(self, harvest_data):
- metrics = Samples()
-
- # Chart as 'Count'. Round to Integer.
-
- metrics.merge_value('Processes/Instances[|processes]',
- Sample(count=math.ceil(float(
- harvest_data['processes_running']) /
- harvest_data['sample_count'])))
-
- metrics.merge_value('Processes/Lifecycle/Starting[|processes]',
- Sample(count=harvest_data['processes_started']))
-
- metrics.merge_value('Processes/Lifecycle/Stopping[|processes]',
- Sample(count=harvest_data['processes_stopped']))
-
- metrics.merge_value('Workers/Availability/Idle[|workers]',
- Sample(count=math.ceil(float(
- harvest_data['idle_workers']) /
- harvest_data['sample_count'])))
- metrics.merge_value('Workers/Availability/Busy[|workers]',
- Sample(count=math.ceil(float(
- harvest_data['busy_workers']) /
- harvest_data['sample_count'])))
-
- # Chart as 'Percentage'.
-
- metrics.merge_value('Workers/Utilization[server]',
- (float(harvest_data['busy_workers']) /
- harvest_data['sample_count']) / (
- harvest_data['server_limit']*harvest_data['thread_limit']))
-
- total = 0
- for value in harvest_data['worker_status'].values():
- value = float(value)/harvest_data['sample_count']
- total += value
-
- if total:
- for key, value in harvest_data['worker_status'].items():
- if key != SERVER_DEAD and value != 0:
- label = STATUS_FLAGS.get(key, 'Unknown')
-
- # Chart as 'Average'. Round to Integer.
-
- value = float(value)/harvest_data['sample_count']
-
- metrics.merge_value('Workers/Status/%s[workers]' %
- label, (value/total)*total)
-
- return metrics
-
- def report_main_loop(self):
- # We need a set of cached metrics for the case where
- # we fail in uploading the metric data and need to
- # retain it for the next attempt to upload data.
-
- retries = 0
- retained_start = 0
- retained = Samples()
-
- # We simply wait to be passed the metric data to be
- # reported for the current sample period.
-
- while True:
- harvest_data = self.report_queue.get()
-
- # If samples is None then we are being told to
- # exit as the process is being shutdown. Otherwise
- # we should be passed the cumulative metric data
- # and the set of sampled requests.
-
- if harvest_data is None:
- return
-
- start = harvest_data['period_start']
- end = harvest_data['period_end']
-
- metrics = harvest_data['metrics']
-
- # Add metric to track how many Apache server instances
- # are reporting for each sample period.
-
- # Chart as 'Count'. Round to Integer.
-
- metrics.merge_value('Server/Instances[|servers]', 0)
-
- # Generate percentiles metrics for request samples.
-
- metrics.merge_samples(self.generate_request_metrics(harvest_data))
- metrics.merge_samples(self.generate_process_metrics(harvest_data))
-
- # If we had metrics from a previous reporting period
- # because we couldn't upload the metric data, we need
- # to merge the data from the current reporting period
- # with that for the previous period.
-
- if retained.samples:
- start = retained_start
- retained.merge_samples(metrics)
- metrics = retained
-
- # Now attempt to upload the metric data.
-
- retry = self.upload_report(start, end, metrics)
-
- # If a failure occurred but failure type was such that we
- # could try again to upload the data, then retain them. If
- # have two many failed attempts though we give up.
-
- if retry:
- retries += 1
-
- if retries == 5:
- retries = 0
-
- else:
- retained = metrics
-
- else:
- retries = 0
-
- if retries == 0:
- retained_start = 0
- retained.clear_samples()
-
- else:
- retained_start = start
- retained = metrics
-
- def generate_scoreboard(self, sample_start=None):
- busy_workers = 0
- idle_workers = 0
- access_count = 0
- bytes_served = 0
-
- active_processes = 0
-
- scoreboard = apache.server_metrics()
-
- if sample_start is None:
- sample_start = scoreboard['current_time']
-
- scoreboard['request_samples'] = request_samples = []
-
- for process in scoreboard['processes']:
- process['active_workers'] = 0
-
- for worker in process['workers']:
- status = worker['status']
-
- if not process['quiescing'] and process['pid']:
- if (status == SERVER_READY and process['generation'] ==
- scoreboard['running_generation']):
-
- process['active_workers'] += 1
- idle_workers += 1
-
- elif status not in (SERVER_DEAD, SERVER_STARTING,
- SERVER_IDLE_KILL):
-
- process['active_workers'] += 1
- busy_workers += 1
-
- count = worker['access_count']
-
- if count or status not in (SERVER_READY, SERVER_DEAD):
- access_count += count
- bytes_served += worker['bytes_served']
-
- current_time = scoreboard['current_time']
-
- start_time = worker['start_time']
- stop_time = worker['stop_time']
-
- if (stop_time > start_time and sample_start < stop_time
- and stop_time <= current_time):
-
- duration = stop_time - start_time
- thread_num = worker['thread_num']
-
- request_samples.append(dict(start_time=start_time,
- duration=duration, thread_num=thread_num))
-
- if process['active_workers']:
- active_processes += 1
-
- scoreboard['busy_workers'] = busy_workers
- scoreboard['idle_workers'] = idle_workers
- scoreboard['access_count'] = access_count
- scoreboard['bytes_served'] = bytes_served
-
- scoreboard['active_processes'] = active_processes
-
- return scoreboard
-
- def record_process_statistics(self, scoreboard, harvest_data):
- current_active_processes = scoreboard['active_processes']
- previous_active_processes = harvest_data['active_processes']
-
- harvest_data['active_processes'] = current_active_processes
- harvest_data['processes_running'] += current_active_processes
-
- if current_active_processes > previous_active_processes:
- harvest_data['processes_started'] += (current_active_processes -
- previous_active_processes)
-
- elif current_active_processes < previous_active_processes:
- harvest_data['processes_stopped'] += (previous_active_processes -
- current_active_processes)
-
- harvest_data['idle_workers'] += scoreboard['idle_workers']
- harvest_data['busy_workers'] += scoreboard['busy_workers']
-
- for process in scoreboard['processes']:
- for worker in process['workers']:
- harvest_data['worker_status'][worker['status']] += 1
-
- def monitor_main_loop(self):
- scoreboard = self.generate_scoreboard()
-
- harvest_start = scoreboard['current_time']
- sample_start = harvest_start
- sample_duration = 0.0
-
- access_count = scoreboard['access_count']
- bytes_served = scoreboard['bytes_served']
-
- harvest_data = {}
-
- harvest_data['sample_count'] = 0
- harvest_data['period_start'] = harvest_start
-
- harvest_data['metrics'] = Samples()
-
- harvest_data['request_samples'] = []
-
- harvest_data['active_processes'] = 0
-
- harvest_data['processes_running'] = 0
- harvest_data['processes_started'] = 0
- harvest_data['processes_stopped'] = 0
-
- harvest_data['idle_workers'] = 0
- harvest_data['busy_workers'] = 0
-
- harvest_data['server_limit'] = scoreboard['server_limit']
- harvest_data['thread_limit'] = scoreboard['thread_limit']
-
- harvest_data['worker_status'] = {}
-
- for status in STATUS_FLAGS.keys():
- harvest_data['worker_status'][status] = 0
-
- harvest_data['access_count'] = 0
- harvest_data['bytes_served'] = 0
-
- # Chart as 'Count'. Round to Integer.
-
- harvest_data['metrics'].merge_value('Server/Restarts[|servers]', 0)
-
- start = time.time()
- end = start + 60.0
-
- while True:
- try:
- # We want to collect metrics on a regular second
- # interval so we need to align the timeout value.
-
- now = time.time()
- start += 1.0
- timeout = start - now
-
- return self.monitor_queue.get(timeout=timeout)
-
- except queue.Empty:
- pass
-
- harvest_data['sample_count'] += 1
-
- scoreboard = self.generate_scoreboard(sample_start)
-
- harvest_end = scoreboard['current_time']
- sample_end = harvest_end
-
- sample_duration = sample_end - sample_start
-
- self.record_process_statistics(scoreboard, harvest_data)
-
- harvest_data['request_samples'].extend(
- scoreboard['request_samples'])
-
- access_count_delta = scoreboard['access_count']
- access_count_delta -= access_count
- access_count = scoreboard['access_count']
-
- harvest_data['access_count'] += access_count_delta
-
- bytes_served_delta = scoreboard['bytes_served']
- bytes_served_delta -= bytes_served
- bytes_served = scoreboard['bytes_served']
-
- harvest_data['bytes_served'] += bytes_served_delta
-
- now = time.time()
-
- if now >= end:
- harvest_data['period_end'] = harvest_end
-
- self.report_queue.put(harvest_data)
-
- harvest_start = harvest_end
- end += 60.0
-
- _harvest_data = {}
-
- _harvest_data['sample_count'] = 0
- _harvest_data['period_start'] = harvest_start
-
- _harvest_data['metrics'] = Samples()
-
- _harvest_data['request_samples'] = []
-
- _harvest_data['active_processes'] = (
- harvest_data['active_processes'])
-
- _harvest_data['processes_running'] = 0
- _harvest_data['processes_started'] = 0
- _harvest_data['processes_stopped'] = 0
-
- _harvest_data['idle_workers'] = 0
- _harvest_data['busy_workers'] = 0
-
- _harvest_data['server_limit'] = scoreboard['server_limit']
- _harvest_data['thread_limit'] = scoreboard['thread_limit']
-
- _harvest_data['worker_status'] = {}
-
- for status in STATUS_FLAGS.keys():
- _harvest_data['worker_status'][status] = 0
-
- _harvest_data['access_count'] = 0
- _harvest_data['bytes_served'] = 0
-
- harvest_data = _harvest_data
-
- sample_start = sample_end
-
- def terminate(self):
- try:
- self.report_queue.put(None)
- self.monitor_queue.put(None)
- except Exception:
- pass
-
- self.monitor_thread.join()
- self.report_thread.join()
-
- def start(self):
- with self.lock:
- if not self.running:
- self.running = True
- atexit.register(self.terminate)
- self.monitor_thread.start()
- self.report_thread.start()
diff --git a/src/server/wsgi_interp.c b/src/server/wsgi_interp.c
index 7dd460c..f74eb9e 100644
--- a/src/server/wsgi_interp.c
+++ b/src/server/wsgi_interp.c
@@ -1070,7 +1070,10 @@ InterpreterObject *newInterpreterObject(const char *name)
#endif
PyModule_AddObject(module, "server_metrics", PyCFunction_New(
- &wsgi_process_server_metrics_method[0], NULL));
+ &wsgi_server_metrics_method[0], NULL));
+
+ PyModule_AddObject(module, "process_metrics", PyCFunction_New(
+ &wsgi_process_metrics_method[0], NULL));
/* Done with the 'mod_wsgi' module. */
@@ -1189,9 +1192,6 @@ InterpreterObject *newInterpreterObject(const char *name)
#endif
PyModule_AddObject(module, "build_date", object);
- PyModule_AddObject(module, "server_metrics", PyCFunction_New(
- &wsgi_apache_server_metrics_method[0], NULL));
-
/* Done with the 'apache' module. */
Py_DECREF(module);
diff --git a/src/server/wsgi_metrics.c b/src/server/wsgi_metrics.c
index 0f1c0b3..cee2031 100644
--- a/src/server/wsgi_metrics.c
+++ b/src/server/wsgi_metrics.c
@@ -80,7 +80,7 @@ double wsgi_end_request(void)
return wsgi_utilization_time(-1);
}
-static PyObject *wsgi_process_server_metrics(void)
+static PyObject *wsgi_process_metrics(void)
{
PyObject *result = NULL;
@@ -104,14 +104,14 @@ static PyObject *wsgi_process_server_metrics(void)
result = PyDict_New();
object = PyFloat_FromDouble(wsgi_utilization_time(0));
- PyDict_SetItemString(result, "thread_utilization", object);
+ PyDict_SetItemString(result, "utilization", object);
Py_DECREF(object);
return result;
}
-PyMethodDef wsgi_process_server_metrics_method[] = {
- { "server_metrics", (PyCFunction)wsgi_process_server_metrics,
+PyMethodDef wsgi_process_metrics_method[] = {
+ { "process_metrics", (PyCFunction)wsgi_process_metrics,
METH_NOARGS, 0 },
{ NULL },
};
@@ -146,7 +146,7 @@ static PyObject *wsgi_status_flags[SERVER_NUM_STATUS];
#define WSGI_CREATE_STATUS_FLAG(name, val) \
wsgi_status_flags[name] = wsgi_PyString_InternFromString(val)
-static PyObject *wsgi_apache_server_metrics(void)
+static PyObject *wsgi_server_metrics(void)
{
PyObject *scoreboard_dict = NULL;
@@ -400,8 +400,8 @@ static PyObject *wsgi_apache_server_metrics(void)
/* ------------------------------------------------------------------------- */
-PyMethodDef wsgi_apache_server_metrics_method[] = {
- { "server_metrics", (PyCFunction)wsgi_apache_server_metrics,
+PyMethodDef wsgi_server_metrics_method[] = {
+ { "server_metrics", (PyCFunction)wsgi_server_metrics,
METH_NOARGS, 0 },
{ NULL },
};
diff --git a/src/server/wsgi_metrics.h b/src/server/wsgi_metrics.h
index 6f2008d..2143806 100644
--- a/src/server/wsgi_metrics.h
+++ b/src/server/wsgi_metrics.h
@@ -31,12 +31,12 @@ extern int wsgi_dump_stack_traces;
extern apr_thread_mutex_t* wsgi_monitor_lock;
-extern PyMethodDef wsgi_process_server_metrics_method[];
+extern PyMethodDef wsgi_process_metrics_method[];
extern double wsgi_start_request(void);
extern double wsgi_end_request(void);
-extern PyMethodDef wsgi_apache_server_metrics_method[];
+extern PyMethodDef wsgi_server_metrics_method[];
/* ------------------------------------------------------------------------- */
diff --git a/tests/environ.wsgi b/tests/environ.wsgi
index e31bd83..6ee1723 100644
--- a/tests/environ.wsgi
+++ b/tests/environ.wsgi
@@ -24,6 +24,10 @@ def application(environ, start_response):
print('GID: %s' % os.getgid(), file=output)
print(file=output)
+ print('apache.version: %r' % (apache.version,), file=output)
+ print('mod_wsgi.version: %r' % (mod_wsgi.version,), file=output)
+ print(file=output)
+
print('mod_wsgi.process_group: %s' % mod_wsgi.process_group,
file=output)
print('mod_wsgi.application_group: %s' % mod_wsgi.application_group,
@@ -34,10 +38,21 @@ def application(environ, start_response):
file=output)
print('mod_wsgi.threads_per_process: %s' % mod_wsgi.threads_per_process,
file=output)
+ print('mod_wsgi.process_metrics: %s' % mod_wsgi.process_metrics(),
+ file=output)
print('mod_wsgi.server_metrics: %s' % mod_wsgi.server_metrics(),
file=output)
print(file=output)
+ metrics = mod_wsgi.server_metrics()
+
+ if metrics:
+ for process in metrics['processes']:
+ for worker in process['workers']:
+ print(worker['status'], file=output, end='')
+ print(file=output)
+ print(file=output)
+
print('apache.description: %s' % apache.description, file=output)
print('apache.build_date: %s' % apache.build_date, file=output)
print('apache.mpm_name: %s' % apache.mpm_name, file=output)
@@ -45,19 +60,8 @@ def application(environ, start_response):
file=output)
print('apache.threads_per_process: %s' % apache.threads_per_process,
file=output)
- print('apache.server_metrics: %s' % apache.server_metrics(),
- file=output)
print(file=output)
- scoreboard = apache.server_metrics()
-
- if scoreboard:
- for process in scoreboard['processes']:
- for worker in process['workers']:
- print(worker['status'], file=output, end='')
- print(file=output)
- print(file=output)
-
print('PATH: %s' % sys.path, file=output)
print(file=output)
diff --git a/tox.ini b/tox.ini
index 6cbcac4..e1cf3c3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,4 +2,6 @@
envlist = py26,py27,py33
[testenv]
-deps = newrelic
+deps =
+ newrelic
+ mod_wsgi-metrics