summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJason R. Coombs <jaraco@jaraco.com>2012-12-26 13:43:16 -0500
committerJason R. Coombs <jaraco@jaraco.com>2012-12-26 13:43:16 -0500
commit48c7cb66a71efbb6f0d95ccc8038ea6971fbac45 (patch)
tree80f04a7d29b00756f16cb88b24ba5ec9be7a0175
parent21a591d64a2c7f180ffcea1a8e4236a20a964657 (diff)
parentd1094379b70094507c8303546fad9eed1b31432c (diff)
downloadcherrypy-48c7cb66a71efbb6f0d95ccc8038ea6971fbac45.tar.gz
Merge 3.2.3
-rw-r--r--cherrypy/__init__.py4
-rw-r--r--cherrypy/_cpdispatch.py4
-rw-r--r--cherrypy/_cpreqbody.py2
-rw-r--r--cherrypy/_cptools.py4
-rw-r--r--cherrypy/lib/auth_digest.py30
-rw-r--r--cherrypy/lib/caching.py2
-rw-r--r--cherrypy/lib/httputil.py2
-rw-r--r--cherrypy/lib/reprconf.py140
-rw-r--r--cherrypy/lib/sessions.py240
-rw-r--r--cherrypy/process/plugins.py2
-rw-r--r--cherrypy/process/wspbus.py6
-rw-r--r--cherrypy/test/test_config_server.py2
-rw-r--r--cherrypy/test/test_conn.py2
-rw-r--r--cherrypy/test/test_core.py2
-rw-r--r--cherrypy/test/test_http.py2
-rw-r--r--cherrypy/test/test_mime.py2
-rw-r--r--cherrypy/test/test_objectmapping.py4
-rw-r--r--cherrypy/test/test_proxy.py2
-rw-r--r--cherrypy/test/test_request_obj.py10
-rw-r--r--cherrypy/test/test_xmlrpc.py2
-rw-r--r--cherrypy/wsgiserver/wsgiserver2.py530
-rw-r--r--cherrypy/wsgiserver/wsgiserver3.py526
-rw-r--r--setup.cfg9
-rw-r--r--sphinx/source/concepts/config.rst2
-rw-r--r--sphinx/source/concepts/dispatching.rst26
-rw-r--r--sphinx/source/refman/process/plugins/dropprivileges.rst2
26 files changed, 789 insertions, 770 deletions
diff --git a/cherrypy/__init__.py b/cherrypy/__init__.py
index 395c1972..92fe62ce 100644
--- a/cherrypy/__init__.py
+++ b/cherrypy/__init__.py
@@ -54,7 +54,7 @@ with customized or extended components. The core API's are:
* WSGI API
These API's are described in the CherryPy specification:
-http://www.cherrypy.org/wiki/CherryPySpec
+https://bitbucket.org/cherrypy/cherrypy/wiki/CherryPySpec
"""
__version__ = "3.2.3"
@@ -295,7 +295,7 @@ class _GlobalLogManager(_cplogging.LogManager):
def __call__(self, *args, **kwargs):
"""Log the given message to the app.log or global log as appropriate."""
- # Do NOT use try/except here. See http://www.cherrypy.org/ticket/945
+ # Do NOT use try/except here. See https://bitbucket.org/cherrypy/cherrypy/issue/945
if hasattr(request, 'app') and hasattr(request.app, 'log'):
log = request.app.log
else:
diff --git a/cherrypy/_cpdispatch.py b/cherrypy/_cpdispatch.py
index e92d9306..a6774cd1 100644
--- a/cherrypy/_cpdispatch.py
+++ b/cherrypy/_cpdispatch.py
@@ -379,7 +379,7 @@ class Dispatcher(object):
conf = getattr(defhandler, "_cp_config", {})
object_trail.insert(i+1, ["default", defhandler, conf, segleft])
request.config = set_conf()
- # See http://www.cherrypy.org/ticket/613
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/613
request.is_index = path.endswith("/")
return defhandler, fullpath[fullpath_len - segleft:-1]
@@ -625,7 +625,7 @@ def VirtualHost(next_dispatcher=Dispatcher(), use_x_forwarded_host=True, **domai
result = next_dispatcher(path_info)
- # Touch up staticdir config. See http://www.cherrypy.org/ticket/614.
+ # Touch up staticdir config. See https://bitbucket.org/cherrypy/cherrypy/issue/614.
section = request.config.get('tools.staticdir.section')
if section:
section = section[len(prefix):]
diff --git a/cherrypy/_cpreqbody.py b/cherrypy/_cpreqbody.py
index 9ee8d846..04ad7e38 100644
--- a/cherrypy/_cpreqbody.py
+++ b/cherrypy/_cpreqbody.py
@@ -895,7 +895,7 @@ class RequestBody(Entity):
"""The buffer size used when reading the socket."""
# Don't parse the request body at all if the client didn't provide
- # a Content-Type header. See http://www.cherrypy.org/ticket/790
+ # a Content-Type header. See https://bitbucket.org/cherrypy/cherrypy/issue/790
default_content_type = ''
"""This defines a default ``Content-Type`` to use if no Content-Type header
is given. The empty string is used for RequestBody, which results in the
diff --git a/cherrypy/_cptools.py b/cherrypy/_cptools.py
index 2f24e65f..97faec2f 100644
--- a/cherrypy/_cptools.py
+++ b/cherrypy/_cptools.py
@@ -258,7 +258,7 @@ class SessionTool(Tool):
When 'early', the session will be locked before reading the request
body. This is off by default for safety reasons; for example,
a large upload would block the session, denying an AJAX
- progress meter (see http://www.cherrypy.org/ticket/630).
+ progress meter (see https://bitbucket.org/cherrypy/cherrypy/issue/630).
When 'explicit' (or any other value), you need to call
cherrypy.session.acquire_lock() yourself before using
@@ -364,7 +364,7 @@ class XMLRPCController(object):
body = subhandler(*(vpath + rpcparams), **params)
else:
- # http://www.cherrypy.org/ticket/533
+ # https://bitbucket.org/cherrypy/cherrypy/issue/533
# if a method is not found, an xmlrpclib.Fault should be returned
# raising an exception here will do that; see
# cherrypy.lib.xmlrpcutil.on_error
diff --git a/cherrypy/lib/auth_digest.py b/cherrypy/lib/auth_digest.py
index 67578e00..4bdaccd6 100644
--- a/cherrypy/lib/auth_digest.py
+++ b/cherrypy/lib/auth_digest.py
@@ -67,7 +67,7 @@ def get_ha1_dict(user_ha1_dict):
argument to digest_auth().
"""
def get_ha1(realm, username):
- return user_ha1_dict.get(user)
+ return user_ha1_dict.get(username)
return get_ha1
@@ -107,10 +107,10 @@ def synthesize_nonce(s, key, timestamp=None):
key
A secret string known only to the server.
-
+
timestamp
An integer seconds-since-the-epoch timestamp
-
+
"""
if timestamp is None:
timestamp = int(time.time())
@@ -190,10 +190,10 @@ class HttpDigestAuthorization (object):
s
A string related to the resource, such as the hostname of the server.
-
+
key
A secret string known only to the server.
-
+
Both s and key must be the same values which were used to synthesize the nonce
we are trying to validate.
"""
@@ -256,7 +256,7 @@ class HttpDigestAuthorization (object):
4.3. This refers to the entity the user agent sent in the request which
has the Authorization header. Typically GET requests don't have an entity,
and POST requests do.
-
+
"""
ha2 = self.HA2(entity_body)
# Request-Digest -- RFC 2617 3.2.2.1
@@ -302,16 +302,16 @@ def www_authenticate(realm, key, algorithm='MD5', nonce=None, qop=qop_auth, stal
def digest_auth(realm, get_ha1, key, debug=False):
"""A CherryPy tool which hooks at before_handler to perform
HTTP Digest Access Authentication, as specified in :rfc:`2617`.
-
+
If the request has an 'authorization' header with a 'Digest' scheme, this
tool authenticates the credentials supplied in that header. If
the request has no 'authorization' header, or if it does but the scheme is
not "Digest", or if authentication fails, the tool sends a 401 response with
a 'WWW-Authenticate' Digest header.
-
+
realm
A string containing the authentication realm.
-
+
get_ha1
A callable which looks up a username in a credentials store
and returns the HA1 string, which is defined in the RFC to be
@@ -320,13 +320,13 @@ def digest_auth(realm, get_ha1, key, debug=False):
where username is obtained from the request's 'authorization' header.
If username is not found in the credentials store, get_ha1() returns
None.
-
+
key
A secret string known only to the server, used in the synthesis of nonces.
-
+
"""
request = cherrypy.serving.request
-
+
auth_header = request.headers.get('authorization')
nonce_is_stale = False
if auth_header is not None:
@@ -334,10 +334,10 @@ def digest_auth(realm, get_ha1, key, debug=False):
auth = HttpDigestAuthorization(auth_header, request.method, debug=debug)
except ValueError:
raise cherrypy.HTTPError(400, "The Authorization header could not be parsed.")
-
+
if debug:
TRACE(str(auth))
-
+
if auth.validate_nonce(realm, key):
ha1 = get_ha1(realm, auth.username)
if ha1 is not None:
@@ -355,7 +355,7 @@ def digest_auth(realm, get_ha1, key, debug=False):
if debug:
TRACE("authentication of %s successful" % auth.username)
return
-
+
# Respond with 401 status and a WWW-Authenticate header
header = www_authenticate(realm, key, stale=nonce_is_stale)
if debug:
diff --git a/cherrypy/lib/caching.py b/cherrypy/lib/caching.py
index fd6a2c98..42626fc5 100644
--- a/cherrypy/lib/caching.py
+++ b/cherrypy/lib/caching.py
@@ -348,7 +348,7 @@ def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
request.cacheable = True
return False
- # Copy the response headers. See http://www.cherrypy.org/ticket/721.
+ # Copy the response headers. See https://bitbucket.org/cherrypy/cherrypy/issue/721.
response.headers = rh = httputil.HeaderMap()
for k in h:
dict.__setitem__(rh, k, dict.__getitem__(h, k))
diff --git a/cherrypy/lib/httputil.py b/cherrypy/lib/httputil.py
index 51342820..5d5cffbf 100644
--- a/cherrypy/lib/httputil.py
+++ b/cherrypy/lib/httputil.py
@@ -12,7 +12,7 @@ from cherrypy._cpcompat import BaseHTTPRequestHandler, HTTPDate, ntob, ntou, rev
from cherrypy._cpcompat import basestring, bytestr, iteritems, nativestr, unicodestr, unquote_qs
response_codes = BaseHTTPRequestHandler.responses.copy()
-# From http://www.cherrypy.org/ticket/361
+# From https://bitbucket.org/cherrypy/cherrypy/issue/361
response_codes[500] = ('Internal Server Error',
'The server encountered an unexpected condition '
'which prevented it from fulfilling the request.')
diff --git a/cherrypy/lib/reprconf.py b/cherrypy/lib/reprconf.py
index 83ca78f2..3ec47e7f 100644
--- a/cherrypy/lib/reprconf.py
+++ b/cherrypy/lib/reprconf.py
@@ -55,25 +55,25 @@ def as_dict(config):
class NamespaceSet(dict):
"""A dict of config namespace names and handlers.
-
+
Each config entry should begin with a namespace name; the corresponding
namespace handler will be called once for each config entry in that
namespace, and will be passed two arguments: the config key (with the
namespace removed) and the config value.
-
+
Namespace handlers may be any Python callable; they may also be
Python 2.5-style 'context managers', in which case their __enter__
method should return a callable to be used as the handler.
See cherrypy.tools (the Toolbox class) for an example.
"""
-
+
def __call__(self, config):
"""Iterate through config and pass it to each namespace handler.
-
+
config
A flat dict, where keys use dots to separate
namespaces, and values are arbitrary.
-
+
The first name in each config key is used to look up the corresponding
namespace handler. For example, a config entry of {'tools.gzip.on': v}
will call the 'tools' namespace handler with the args: ('gzip.on', v)
@@ -85,7 +85,7 @@ class NamespaceSet(dict):
ns, name = k.split(".", 1)
bucket = ns_confs.setdefault(ns, {})
bucket[name] = config[k]
-
+
# I chose __enter__ and __exit__ so someday this could be
# rewritten using Python 2.5's 'with' statement:
# for ns, handler in self.iteritems():
@@ -116,11 +116,11 @@ class NamespaceSet(dict):
else:
for k, v in ns_confs.get(ns, {}).items():
handler(k, v)
-
+
def __repr__(self):
return "%s.%s(%s)" % (self.__module__, self.__class__.__name__,
dict.__repr__(self))
-
+
def __copy__(self):
newobj = self.__class__()
newobj.update(self)
@@ -130,26 +130,26 @@ class NamespaceSet(dict):
class Config(dict):
"""A dict-like set of configuration data, with defaults and namespaces.
-
+
May take a file, filename, or dict.
"""
-
+
defaults = {}
environments = {}
namespaces = NamespaceSet()
-
+
def __init__(self, file=None, **kwargs):
self.reset()
if file is not None:
self.update(file)
if kwargs:
self.update(kwargs)
-
+
def reset(self):
"""Reset self to default values."""
self.clear()
dict.update(self, self.defaults)
-
+
def update(self, config):
"""Update self from a dict, file or filename."""
if isinstance(config, basestring):
@@ -161,7 +161,7 @@ class Config(dict):
else:
config = config.copy()
self._apply(config)
-
+
def _apply(self, config):
"""Update self from a dict."""
which_env = config.get('environment')
@@ -170,23 +170,23 @@ class Config(dict):
for k in env:
if k not in config:
config[k] = env[k]
-
+
dict.update(self, config)
self.namespaces(config)
-
+
def __setitem__(self, k, v):
dict.__setitem__(self, k, v)
self.namespaces({k: v})
class Parser(ConfigParser):
- """Sub-class of ConfigParser that keeps the case of options and that
+ """Sub-class of ConfigParser that keeps the case of options and that
raises an exception if the file cannot be read.
"""
-
+
def optionxform(self, optionstr):
return optionstr
-
+
def read(self, filenames):
if isinstance(filenames, basestring):
filenames = [filenames]
@@ -200,7 +200,7 @@ class Parser(ConfigParser):
self._read(fp, filename)
finally:
fp.close()
-
+
def as_dict(self, raw=False, vars=None):
"""Convert an INI file to a dictionary"""
# Load INI file into a dict
@@ -220,7 +220,7 @@ class Parser(ConfigParser):
raise ValueError(msg, x.__class__.__name__, x.args)
result[section][option] = value
return result
-
+
def dict_from_file(self, file):
if hasattr(file, 'read'):
self.readfp(file)
@@ -233,14 +233,14 @@ class Parser(ConfigParser):
class _Builder2:
-
+
def build(self, o):
m = getattr(self, 'build_' + o.__class__.__name__, None)
if m is None:
raise TypeError("unrepr does not recognize %s" %
repr(o.__class__.__name__))
return m(o)
-
+
def astnode(self, s):
"""Return a Python2 ast Node compiled from a string."""
try:
@@ -249,16 +249,16 @@ class _Builder2:
# Fallback to eval when compiler package is not available,
# e.g. IronPython 1.0.
return eval(s)
-
+
p = compiler.parse("__tempvalue__ = " + s)
return p.getChildren()[1].getChildren()[0].getChildren()[1]
-
+
def build_Subscript(self, o):
expr, flags, subs = o.getChildren()
expr = self.build(expr)
subs = self.build(subs)
return expr[subs]
-
+
def build_CallFunc(self, o):
children = map(self.build, o.getChildren())
callee = children.pop(0)
@@ -266,23 +266,23 @@ class _Builder2:
starargs = children.pop() or ()
args = tuple(children) + tuple(starargs)
return callee(*args, **kwargs)
-
+
def build_List(self, o):
return map(self.build, o.getChildren())
-
+
def build_Const(self, o):
return o.value
-
+
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = i.next()
return d
-
+
def build_Tuple(self, o):
return tuple(self.build_List(o))
-
+
def build_Name(self, o):
name = o.name
if name == 'None':
@@ -291,21 +291,21 @@ class _Builder2:
return True
if name == 'False':
return False
-
+
# See if the Name is a package or module. If it is, import it.
try:
return modules(name)
except ImportError:
pass
-
+
# See if the Name is in builtins.
try:
return getattr(builtins, name)
except AttributeError:
pass
-
+
raise TypeError("unrepr could not resolve the name %s" % repr(name))
-
+
def build_Add(self, o):
left, right = map(self.build, o.getChildren())
return left + right
@@ -313,30 +313,30 @@ class _Builder2:
def build_Mul(self, o):
left, right = map(self.build, o.getChildren())
return left * right
-
+
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
-
+
def build_NoneType(self, o):
return None
-
+
def build_UnarySub(self, o):
return -self.build(o.getChildren()[0])
-
+
def build_UnaryAdd(self, o):
return self.build(o.getChildren()[0])
class _Builder3:
-
+
def build(self, o):
m = getattr(self, 'build_' + o.__class__.__name__, None)
if m is None:
raise TypeError("unrepr does not recognize %s" %
repr(o.__class__.__name__))
return m(o)
-
+
def astnode(self, s):
"""Return a Python3 ast Node compiled from a string."""
try:
@@ -351,46 +351,46 @@ class _Builder3:
def build_Subscript(self, o):
return self.build(o.value)[self.build(o.slice)]
-
+
def build_Index(self, o):
return self.build(o.value)
-
+
def build_Call(self, o):
callee = self.build(o.func)
-
+
if o.args is None:
args = ()
- else:
- args = tuple([self.build(a) for a in o.args])
-
+ else:
+ args = tuple([self.build(a) for a in o.args])
+
if o.starargs is None:
starargs = ()
else:
starargs = self.build(o.starargs)
-
+
if o.kwargs is None:
kwargs = {}
else:
kwargs = self.build(o.kwargs)
-
+
return callee(*(args + starargs), **kwargs)
-
+
def build_List(self, o):
return list(map(self.build, o.elts))
-
+
def build_Str(self, o):
return o.s
-
+
def build_Num(self, o):
return o.n
-
+
def build_Dict(self, o):
return dict([(self.build(k), self.build(v))
for k, v in zip(o.keys, o.values)])
-
+
def build_Tuple(self, o):
return tuple(self.build_List(o))
-
+
def build_Name(self, o):
name = o.id
if name == 'None':
@@ -399,28 +399,28 @@ class _Builder3:
return True
if name == 'False':
return False
-
+
# See if the Name is a package or module. If it is, import it.
try:
return modules(name)
except ImportError:
pass
-
+
# See if the Name is in builtins.
try:
import builtins
return getattr(builtins, name)
except AttributeError:
pass
-
+
raise TypeError("unrepr could not resolve the name %s" % repr(name))
-
+
def build_UnaryOp(self, o):
op, operand = map(self.build, [o.op, o.operand])
return op(operand)
-
+
def build_BinOp(self, o):
- left, op, right = map(self.build, [o.left, o.op, o.right])
+ left, op, right = map(self.build, [o.left, o.op, o.right])
return op(left, right)
def build_Add(self, o):
@@ -428,7 +428,7 @@ class _Builder3:
def build_Mult(self, o):
return _operator.mul
-
+
def build_USub(self, o):
return _operator.neg
@@ -454,23 +454,17 @@ def unrepr(s):
def modules(modulePath):
"""Load a module and retrieve a reference to that module."""
- try:
- mod = sys.modules[modulePath]
- if mod is None:
- raise KeyError()
- except KeyError:
- __import__(modulePath)
- mod = sys.modules[modulePath]
- return mod
+ __import__(modulePath)
+ return sys.modules[modulePath]
def attributes(full_attribute_name):
"""Load a module and retrieve an attribute of that module."""
-
+
# Parse out the path, module, and attribute
last_dot = full_attribute_name.rfind(".")
attr_name = full_attribute_name[last_dot + 1:]
mod_path = full_attribute_name[:last_dot]
-
+
mod = modules(mod_path)
# Let an AttributeError propagate outward.
try:
@@ -478,7 +472,7 @@ def attributes(full_attribute_name):
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
-
+
# Return a reference to the attribute.
return attr
diff --git a/cherrypy/lib/sessions.py b/cherrypy/lib/sessions.py
index 9763f120..d45118ca 100644
--- a/cherrypy/lib/sessions.py
+++ b/cherrypy/lib/sessions.py
@@ -101,12 +101,12 @@ missing = object()
class Session(object):
"""A CherryPy dict-like Session object (one per request)."""
-
+
_id = None
-
+
id_observers = None
"A list of callbacks to which to pass new id's."
-
+
def _get_id(self):
return self._id
def _set_id(self, value):
@@ -114,46 +114,46 @@ class Session(object):
for o in self.id_observers:
o(value)
id = property(_get_id, _set_id, doc="The current session ID.")
-
+
timeout = 60
"Number of minutes after which to delete session data."
-
+
locked = False
"""
If True, this session instance has exclusive read/write access
to session data."""
-
+
loaded = False
"""
If True, data has been retrieved from storage. This should happen
automatically on the first attempt to access session data."""
-
+
clean_thread = None
"Class-level Monitor which calls self.clean_up."
-
+
clean_freq = 5
"The poll rate for expired session cleanup in minutes."
-
+
originalid = None
"The session id passed by the client. May be missing or unsafe."
-
+
missing = False
"True if the session requested by the client did not exist."
-
+
regenerated = False
"""
True if the application called session.regenerate(). This is not set by
internal calls to regenerate the session id."""
-
+
debug=False
-
+
def __init__(self, id=None, **kwargs):
self.id_observers = []
self._data = {}
-
+
for k, v in kwargs.items():
setattr(self, k, v)
-
+
self.originalid = id
self.missing = False
if id is None:
@@ -167,7 +167,7 @@ class Session(object):
cherrypy.log('Expired or malicious session %r; '
'making a new one' % id, 'TOOLS.SESSIONS')
# Expired or malicious session. Make a new one.
- # See http://www.cherrypy.org/ticket/709.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/709.
self.id = None
self.missing = True
self._regenerate()
@@ -184,33 +184,33 @@ class Session(object):
"""Replace the current session (with a new id)."""
self.regenerated = True
self._regenerate()
-
+
def _regenerate(self):
if self.id is not None:
self.delete()
-
+
old_session_was_locked = self.locked
if old_session_was_locked:
self.release_lock()
-
+
self.id = None
while self.id is None:
self.id = self.generate_id()
# Assert that the generated id is not already stored.
if self._exists():
self.id = None
-
+
if old_session_was_locked:
self.acquire_lock()
-
+
def clean_up(self):
"""Clean up expired sessions."""
pass
-
+
def generate_id(self):
"""Return a new session id."""
return random20()
-
+
def save(self):
"""Save session data."""
try:
@@ -223,12 +223,12 @@ class Session(object):
cherrypy.log('Saving with expiry %s' % expiration_time,
'TOOLS.SESSIONS')
self._save(expiration_time)
-
+
finally:
if self.locked:
# Always release the lock if the user didn't release it
self.release_lock()
-
+
def load(self):
"""Copy stored session data into this session instance."""
data = self._load()
@@ -240,7 +240,7 @@ class Session(object):
else:
self._data = data[0]
self.loaded = True
-
+
# Stick the clean_thread in the class, not the instance.
# The instances are created and destroyed per-request.
cls = self.__class__
@@ -253,23 +253,23 @@ class Session(object):
t.subscribe()
cls.clean_thread = t
t.start()
-
+
def delete(self):
"""Delete stored session data."""
self._delete()
-
+
def __getitem__(self, key):
if not self.loaded: self.load()
return self._data[key]
-
+
def __setitem__(self, key, value):
if not self.loaded: self.load()
self._data[key] = value
-
+
def __delitem__(self, key):
if not self.loaded: self.load()
del self._data[key]
-
+
def pop(self, key, default=missing):
"""Remove the specified key and return the corresponding value.
If key is not found, default is returned if given,
@@ -280,47 +280,47 @@ class Session(object):
return self._data.pop(key)
else:
return self._data.pop(key, default)
-
+
def __contains__(self, key):
if not self.loaded: self.load()
return key in self._data
-
+
if hasattr({}, 'has_key'):
def has_key(self, key):
"""D.has_key(k) -> True if D has a key k, else False."""
if not self.loaded: self.load()
return key in self._data
-
+
def get(self, key, default=None):
"""D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None."""
if not self.loaded: self.load()
return self._data.get(key, default)
-
+
def update(self, d):
"""D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]."""
if not self.loaded: self.load()
self._data.update(d)
-
+
def setdefault(self, key, default=None):
"""D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D."""
if not self.loaded: self.load()
return self._data.setdefault(key, default)
-
+
def clear(self):
"""D.clear() -> None. Remove all items from D."""
if not self.loaded: self.load()
self._data.clear()
-
+
def keys(self):
"""D.keys() -> list of D's keys."""
if not self.loaded: self.load()
return self._data.keys()
-
+
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples."""
if not self.loaded: self.load()
return self._data.items()
-
+
def values(self):
"""D.values() -> list of D's values."""
if not self.loaded: self.load()
@@ -328,11 +328,11 @@ class Session(object):
class RamSession(Session):
-
+
# Class-level objects. Don't rebind these!
cache = {}
locks = {}
-
+
def clean_up(self):
"""Clean up expired sessions."""
now = self.now()
@@ -346,34 +346,34 @@ class RamSession(Session):
del self.locks[id]
except KeyError:
pass
-
+
# added to remove obsolete lock objects
for id in list(self.locks):
if id not in self.cache:
self.locks.pop(id, None)
-
+
def _exists(self):
return self.id in self.cache
-
+
def _load(self):
return self.cache.get(self.id)
-
+
def _save(self, expiration_time):
self.cache[self.id] = (self._data, expiration_time)
-
+
def _delete(self):
self.cache.pop(self.id, None)
-
+
def acquire_lock(self):
"""Acquire an exclusive lock on the currently-loaded session data."""
self.locked = True
self.locks.setdefault(self.id, threading.RLock()).acquire()
-
+
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
self.locks[self.id].release()
self.locked = False
-
+
def __len__(self):
"""Return the number of active sessions."""
return len(self.cache)
@@ -381,35 +381,35 @@ class RamSession(Session):
class FileSession(Session):
"""Implementation of the File backend for sessions
-
+
storage_path
The folder where session data will be saved. Each session
will be saved as pickle.dump(data, expiration_time) in its own file;
the filename will be self.SESSION_PREFIX + self.id.
-
+
"""
-
+
SESSION_PREFIX = 'session-'
LOCK_SUFFIX = '.lock'
pickle_protocol = pickle.HIGHEST_PROTOCOL
-
+
def __init__(self, id=None, **kwargs):
# The 'storage_path' arg is required for file-based sessions.
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
Session.__init__(self, id=id, **kwargs)
-
+
def setup(cls, **kwargs):
"""Set up the storage system for file-based sessions.
-
+
This should only be called once per process; this will be done
automatically when using sessions.init (as the built-in Tool does).
"""
# The 'storage_path' arg is required for file-based sessions.
kwargs['storage_path'] = os.path.abspath(kwargs['storage_path'])
-
+
for k, v in kwargs.items():
setattr(cls, k, v)
-
+
# Warn if any lock files exist at startup.
lockfiles = [fname for fname in os.listdir(cls.storage_path)
if (fname.startswith(cls.SESSION_PREFIX)
@@ -421,17 +421,17 @@ class FileSession(Session):
"manually delete the lockfiles found at %r."
% (len(lockfiles), plural, cls.storage_path))
setup = classmethod(setup)
-
+
def _get_file_path(self):
f = os.path.join(self.storage_path, self.SESSION_PREFIX + self.id)
if not os.path.abspath(f).startswith(self.storage_path):
raise cherrypy.HTTPError(400, "Invalid session id in cookie.")
return f
-
+
def _exists(self):
path = self._get_file_path()
return os.path.exists(path)
-
+
def _load(self, path=None):
if path is None:
path = self._get_file_path()
@@ -443,20 +443,20 @@ class FileSession(Session):
f.close()
except (IOError, EOFError):
return None
-
+
def _save(self, expiration_time):
f = open(self._get_file_path(), "wb")
try:
pickle.dump((self._data, expiration_time), f, self.pickle_protocol)
finally:
f.close()
-
+
def _delete(self):
try:
os.unlink(self._get_file_path())
except OSError:
pass
-
+
def acquire_lock(self, path=None):
"""Acquire an exclusive lock on the currently-loaded session data."""
if path is None:
@@ -468,17 +468,17 @@ class FileSession(Session):
except OSError:
time.sleep(0.1)
else:
- os.close(lockfd)
+ os.close(lockfd)
break
self.locked = True
-
+
def release_lock(self, path=None):
"""Release the lock on the currently-loaded session data."""
if path is None:
path = self._get_file_path()
os.unlink(path + self.LOCK_SUFFIX)
self.locked = False
-
+
def clean_up(self):
"""Clean up expired sessions."""
now = self.now()
@@ -500,7 +500,7 @@ class FileSession(Session):
os.unlink(path)
finally:
self.release_lock(path)
-
+
def __len__(self):
"""Return the number of active sessions."""
return len([fname for fname in os.listdir(self.storage_path)
@@ -517,40 +517,40 @@ class PostgresqlSession(Session):
data text,
expiration_time timestamp
)
-
+
You must provide your own get_db function.
"""
-
+
pickle_protocol = pickle.HIGHEST_PROTOCOL
-
+
def __init__(self, id=None, **kwargs):
Session.__init__(self, id, **kwargs)
self.cursor = self.db.cursor()
-
+
def setup(cls, **kwargs):
"""Set up the storage system for Postgres-based sessions.
-
+
This should only be called once per process; this will be done
automatically when using sessions.init (as the built-in Tool does).
"""
for k, v in kwargs.items():
setattr(cls, k, v)
-
+
self.db = self.get_db()
setup = classmethod(setup)
-
+
def __del__(self):
if self.cursor:
self.cursor.close()
self.db.commit()
-
+
def _exists(self):
# Select session data from table
self.cursor.execute('select data, expiration_time from session '
'where id=%s', (self.id,))
rows = self.cursor.fetchall()
return bool(rows)
-
+
def _load(self):
# Select session data from table
self.cursor.execute('select data, expiration_time from session '
@@ -558,34 +558,34 @@ class PostgresqlSession(Session):
rows = self.cursor.fetchall()
if not rows:
return None
-
+
pickled_data, expiration_time = rows[0]
data = pickle.loads(pickled_data)
return data, expiration_time
-
+
def _save(self, expiration_time):
pickled_data = pickle.dumps(self._data, self.pickle_protocol)
self.cursor.execute('update session set data = %s, '
'expiration_time = %s where id = %s',
(pickled_data, expiration_time, self.id))
-
+
def _delete(self):
self.cursor.execute('delete from session where id=%s', (self.id,))
-
+
def acquire_lock(self):
"""Acquire an exclusive lock on the currently-loaded session data."""
# We use the "for update" clause to lock the row
self.locked = True
self.cursor.execute('select id from session where id=%s for update',
(self.id,))
-
+
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
# We just close the cursor and that will remove the lock
# introduced by the "for update" clause
self.cursor.close()
self.locked = False
-
+
def clean_up(self):
"""Clean up expired sessions."""
self.cursor.execute('delete from session where expiration_time < %s',
@@ -593,29 +593,29 @@ class PostgresqlSession(Session):
class MemcachedSession(Session):
-
+
# The most popular memcached client for Python isn't thread-safe.
# Wrap all .get and .set operations in a single lock.
mc_lock = threading.RLock()
-
+
# This is a seperate set of locks per session id.
locks = {}
-
+
servers = ['127.0.0.1:11211']
-
+
def setup(cls, **kwargs):
"""Set up the storage system for memcached-based sessions.
-
+
This should only be called once per process; this will be done
automatically when using sessions.init (as the built-in Tool does).
"""
for k, v in kwargs.items():
setattr(cls, k, v)
-
+
import memcache
cls.cache = memcache.Client(cls.servers)
setup = classmethod(setup)
-
+
def _get_id(self):
return self._id
def _set_id(self, value):
@@ -628,21 +628,21 @@ class MemcachedSession(Session):
for o in self.id_observers:
o(value)
id = property(_get_id, _set_id, doc="The current session ID.")
-
+
def _exists(self):
self.mc_lock.acquire()
try:
return bool(self.cache.get(self.id))
finally:
self.mc_lock.release()
-
+
def _load(self):
self.mc_lock.acquire()
try:
return self.cache.get(self.id)
finally:
self.mc_lock.release()
-
+
def _save(self, expiration_time):
# Send the expiration time as "Unix time" (seconds since 1/1/1970)
td = int(time.mktime(expiration_time.timetuple()))
@@ -652,20 +652,20 @@ class MemcachedSession(Session):
raise AssertionError("Session data for id %r not set." % self.id)
finally:
self.mc_lock.release()
-
+
def _delete(self):
self.cache.delete(self.id)
-
+
def acquire_lock(self):
"""Acquire an exclusive lock on the currently-loaded session data."""
self.locked = True
self.locks.setdefault(self.id, threading.RLock()).acquire()
-
+
def release_lock(self):
"""Release the lock on the currently-loaded session data."""
self.locks[self.id].release()
self.locked = False
-
+
def __len__(self):
"""Return the number of active sessions."""
raise NotImplementedError
@@ -675,17 +675,17 @@ class MemcachedSession(Session):
def save():
"""Save any changed session data."""
-
+
if not hasattr(cherrypy.serving, "session"):
return
request = cherrypy.serving.request
response = cherrypy.serving.response
-
+
# Guard against running twice
if hasattr(request, "_sessionsaved"):
return
request._sessionsaved = True
-
+
if response.stream:
# If the body is being streamed, we have to save the data
# *after* the response has been written out
@@ -712,59 +712,59 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
timeout=60, domain=None, secure=False, clean_freq=5,
persistent=True, httponly=False, debug=False, **kwargs):
"""Initialize session object (using cookies).
-
+
storage_type
One of 'ram', 'file', 'postgresql', 'memcached'. This will be
used to look up the corresponding class in cherrypy.lib.sessions
globals. For example, 'file' will use the FileSession class.
-
+
path
The 'path' value to stick in the response cookie metadata.
-
+
path_header
If 'path' is None (the default), then the response
cookie 'path' will be pulled from request.headers[path_header].
-
+
name
The name of the cookie.
-
+
timeout
The expiration timeout (in minutes) for the stored session data.
If 'persistent' is True (the default), this is also the timeout
for the cookie.
-
+
domain
The cookie domain.
-
+
secure
If False (the default) the cookie 'secure' value will not
be set. If True, the cookie 'secure' value will be set (to 1).
-
+
clean_freq (minutes)
The poll rate for expired session cleanup.
-
+
persistent
If True (the default), the 'timeout' argument will be used
to expire the cookie. If False, the cookie will not have an expiry,
and the cookie will be a "session cookie" which expires when the
browser is closed.
-
+
httponly
If False (the default) the cookie 'httponly' value will not be set.
If True, the cookie 'httponly' value will be set (to 1).
-
+
Any additional kwargs will be bound to the new Session instance,
and may be specific to the storage type. See the subclass of Session
you're using for more information.
"""
-
+
request = cherrypy.serving.request
-
+
# Guard against running twice
if hasattr(request, "_session_init_flag"):
return
request._session_init_flag = True
-
+
# Check if request came with a session ID
id = None
if name in request.cookie:
@@ -772,14 +772,14 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
if debug:
cherrypy.log('ID obtained from request.cookie: %r' % id,
'TOOLS.SESSIONS')
-
+
# Find the storage class and call setup (first time only).
storage_class = storage_type.title() + 'Session'
storage_class = globals()[storage_class]
if not hasattr(cherrypy, "session"):
if hasattr(storage_class, "setup"):
storage_class.setup(**kwargs)
-
+
# Create and attach a new Session instance to cherrypy.serving.
# It will possess a reference to (and lock, and lazily load)
# the requested session data.
@@ -791,11 +791,11 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
"""Update the cookie every time the session id changes."""
cherrypy.serving.response.cookie[name] = id
sess.id_observers.append(update_cookie)
-
+
# Create cherrypy.session which will proxy to cherrypy.serving.session
if not hasattr(cherrypy, "session"):
cherrypy.session = cherrypy._ThreadLocalProxy('session')
-
+
if persistent:
cookie_timeout = timeout
else:
@@ -810,7 +810,7 @@ def init(storage_type='ram', path=None, path_header=None, name='session_id',
def set_response_cookie(path=None, path_header=None, name='session_id',
timeout=60, domain=None, secure=False, httponly=False):
"""Set a response cookie for the client.
-
+
path
the 'path' value to stick in the response cookie metadata.
@@ -843,7 +843,7 @@ def set_response_cookie(path=None, path_header=None, name='session_id',
cookie[name] = cherrypy.serving.session.id
cookie[name]['path'] = (path or cherrypy.serving.request.headers.get(path_header)
or '/')
-
+
# We'd like to use the "max-age" param as indicated in
# http://www.faqs.org/rfcs/rfc2109.html but IE doesn't
# save it to disk and the session is lost if people close
diff --git a/cherrypy/process/plugins.py b/cherrypy/process/plugins.py
index 7b27dd3d..677597a2 100644
--- a/cherrypy/process/plugins.py
+++ b/cherrypy/process/plugins.py
@@ -195,7 +195,7 @@ except ImportError:
class DropPrivileges(SimplePlugin):
"""Drop privileges. uid/gid arguments not available on Windows.
- Special thanks to Gavin Baker: http://antonym.org/node/100.
+ Special thanks to Gavin Baker: http://antonym.org/2005/12/dropping-privileges-in-python.html
"""
def __init__(self, bus, umask=None, uid=None, gid=None):
diff --git a/cherrypy/process/wspbus.py b/cherrypy/process/wspbus.py
index 3ef0217c..38288ee6 100644
--- a/cherrypy/process/wspbus.py
+++ b/cherrypy/process/wspbus.py
@@ -84,7 +84,7 @@ class ChannelFailures(Exception):
def __init__(self, *args, **kwargs):
# Don't use 'super' here; Exceptions are old-style in Py2.4
- # See http://www.cherrypy.org/ticket/959
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/959
Exception.__init__(self, *args, **kwargs)
self._exceptions = list()
@@ -311,10 +311,10 @@ class Bus(object):
raise
# Waiting for ALL child threads to finish is necessary on OS X.
- # See http://www.cherrypy.org/ticket/581.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/581.
# It's also good to let them all shut down before allowing
# the main thread to call atexit handlers.
- # See http://www.cherrypy.org/ticket/751.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/751.
self.log("Waiting for child threads to terminate...")
for t in threading.enumerate():
if t != threading.currentThread() and t.isAlive():
diff --git a/cherrypy/test/test_config_server.py b/cherrypy/test/test_config_server.py
index 98a5b711..489904fa 100644
--- a/cherrypy/test/test_config_server.py
+++ b/cherrypy/test/test_config_server.py
@@ -90,7 +90,7 @@ class ServerConfigTests(helper.CPWebCase):
self.getPage("/", headers=[('From', "x" * 500)])
self.assertStatus(413)
- # Test for http://www.cherrypy.org/ticket/421
+ # Test for https://bitbucket.org/cherrypy/cherrypy/issue/421
# (Incorrect border condition in readline of SizeCheckWrapper).
# This hangs in rev 891 and earlier.
lines256 = "x" * 248
diff --git a/cherrypy/test/test_conn.py b/cherrypy/test/test_conn.py
index 1346f593..8f2512b7 100644
--- a/cherrypy/test/test_conn.py
+++ b/cherrypy/test/test_conn.py
@@ -182,7 +182,7 @@ class ConnectionCloseTests(helper.CPWebCase):
# Make another request on the same connection, which should error.
self.assertRaises(NotConnected, self.getPage, "/")
- # Try HEAD. See http://www.cherrypy.org/ticket/864.
+ # Try HEAD. See https://bitbucket.org/cherrypy/cherrypy/issue/864.
self.getPage("/stream", method='HEAD')
self.assertStatus('200 OK')
self.assertBody('')
diff --git a/cherrypy/test/test_core.py b/cherrypy/test/test_core.py
index 75a5310a..2c0a2552 100644
--- a/cherrypy/test/test_core.py
+++ b/cherrypy/test/test_core.py
@@ -398,7 +398,7 @@ class CoreRequestHandlingTest(helper.CPWebCase):
self.assertStatus(('302 Found', '303 See Other'))
# check injection protection
- # See http://www.cherrypy.org/ticket/1003
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/1003
self.getPage("/redirect/custom?code=303&url=/foobar/%0d%0aSet-Cookie:%20somecookie=someval")
self.assertStatus(303)
loc = self.assertHeader('Location')
diff --git a/cherrypy/test/test_http.py b/cherrypy/test/test_http.py
index 334065dd..ff3797fd 100644
--- a/cherrypy/test/test_http.py
+++ b/cherrypy/test/test_http.py
@@ -161,7 +161,7 @@ class HTTPTests(helper.CPWebCase):
c = HTTPConnection('%s:%s' % (self.interface(), self.PORT))
c.putrequest('GET', '/')
c.putheader('Content-Type', 'text/plain')
- # See http://www.cherrypy.org/ticket/941
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/941
c._output(ntob('Re, 1.2.3.4#015#012'))
c.endheaders()
diff --git a/cherrypy/test/test_mime.py b/cherrypy/test/test_mime.py
index 478b7760..03cd078e 100644
--- a/cherrypy/test/test_mime.py
+++ b/cherrypy/test/test_mime.py
@@ -69,7 +69,7 @@ This is the <strong>HTML</strong> version
'bar',
'--X',
# Test a param with more than one value.
- # See http://www.cherrypy.org/ticket/1028
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/1028
'Content-Disposition: form-data; name="baz"',
'',
'111',
diff --git a/cherrypy/test/test_objectmapping.py b/cherrypy/test/test_objectmapping.py
index 96910d51..86a0df33 100644
--- a/cherrypy/test/test_objectmapping.py
+++ b/cherrypy/test/test_objectmapping.py
@@ -268,7 +268,7 @@ class ObjectMappingTest(helper.CPWebCase):
self.assertStatus("404 Not Found")
# Make sure /foobar maps to Root.foobar and not to the app
- # mounted at /foo. See http://www.cherrypy.org/ticket/573
+ # mounted at /foo. See https://bitbucket.org/cherrypy/cherrypy/issue/573
self.getPage("/foobar")
self.assertBody("bar")
@@ -319,7 +319,7 @@ class ObjectMappingTest(helper.CPWebCase):
self.assertBody("default for dir1, param is:('dir2', '5', '3', 'sir')")
# test that extra positional args raises an 404 Not Found
- # See http://www.cherrypy.org/ticket/733.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/733.
self.getPage("/dir1/dir2/script_name/extra/stuff")
self.assertStatus(404)
diff --git a/cherrypy/test/test_proxy.py b/cherrypy/test/test_proxy.py
index 280a6b0b..a620115a 100644
--- a/cherrypy/test/test_proxy.py
+++ b/cherrypy/test/test_proxy.py
@@ -122,7 +122,7 @@ class ProxyTest(helper.CPWebCase):
self.getPage(sn + "/pageurl")
self.assertBody(expected)
- # Test trailing slash (see http://www.cherrypy.org/ticket/562).
+ # Test trailing slash (see https://bitbucket.org/cherrypy/cherrypy/issue/562).
self.getPage("/xhost/", headers=[('X-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/xhost"
% self.scheme)
diff --git a/cherrypy/test/test_request_obj.py b/cherrypy/test/test_request_obj.py
index 26df353a..3f1fc03c 100644
--- a/cherrypy/test/test_request_obj.py
+++ b/cherrypy/test/test_request_obj.py
@@ -178,7 +178,7 @@ class RequestObjectTests(helper.CPWebCase):
return cherrypy.request.headers[headername]
def doubledheaders(self):
- # From http://www.cherrypy.org/ticket/165:
+ # From https://bitbucket.org/cherrypy/cherrypy/issue/165:
# "header field names should not be case sensitive sayes the rfc.
# if i set a headerfield in complete lowercase i end up with two
# header fields, one in lowercase, the other in mixed-case."
@@ -575,7 +575,7 @@ class RequestObjectTests(helper.CPWebCase):
"en-gb;q=0.8\n"
"en;q=0.7")
- # Test malformed header parsing. See http://www.cherrypy.org/ticket/763.
+ # Test malformed header parsing. See https://bitbucket.org/cherrypy/cherrypy/issue/763.
self.getPage("/headerelements/get_elements?headername=Content-Type",
# Note the illegal trailing ";"
headers=[('Content-Type', 'text/html; charset=utf-8;')])
@@ -584,7 +584,7 @@ class RequestObjectTests(helper.CPWebCase):
def test_repeated_headers(self):
# Test that two request headers are collapsed into one.
- # See http://www.cherrypy.org/ticket/542.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/542.
self.getPage("/headers/Accept-Charset",
headers=[("Accept-Charset", "iso-8859-5"),
("Accept-Charset", "unicode-1-1;q=0.8")])
@@ -667,7 +667,7 @@ class RequestObjectTests(helper.CPWebCase):
self.assertBody(b)
# Request a PUT method with a file body but no Content-Type.
- # See http://www.cherrypy.org/ticket/790.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/790.
b = ntob("one thing on top of another")
self.persistent = True
try:
@@ -686,7 +686,7 @@ class RequestObjectTests(helper.CPWebCase):
self.persistent = False
# Request a PUT method with no body whatsoever (not an empty one).
- # See http://www.cherrypy.org/ticket/650.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/650.
# Provide a C-T or webtest will provide one (and a C-L) for us.
h = [("Content-Type", "text/plain")]
self.getPage("/method/reachable", headers=h, method="PUT")
diff --git a/cherrypy/test/test_xmlrpc.py b/cherrypy/test/test_xmlrpc.py
index 9d34ed52..90f28e9c 100644
--- a/cherrypy/test/test_xmlrpc.py
+++ b/cherrypy/test/test_xmlrpc.py
@@ -156,7 +156,7 @@ class XmlRpcTest(helper.CPWebCase):
else:
self.fail("Expected xmlrpclib.Fault")
- # http://www.cherrypy.org/ticket/533
+ # https://bitbucket.org/cherrypy/cherrypy/issue/533
# if a method is not found, an xmlrpclib.Fault should be raised
try:
proxy.non_method()
diff --git a/cherrypy/wsgiserver/wsgiserver2.py b/cherrypy/wsgiserver/wsgiserver2.py
index 25cd39d5..278743b0 100644
--- a/cherrypy/wsgiserver/wsgiserver2.py
+++ b/cherrypy/wsgiserver/wsgiserver2.py
@@ -4,24 +4,24 @@ Simplest example on how to use this module directly
(without using CherryPy's application machinery)::
from cherrypy import wsgiserver
-
+
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return ['Hello world!']
-
+
server = wsgiserver.CherryPyWSGIServer(
('0.0.0.0', 8070), my_crazy_app,
server_name='www.cherrypy.example')
server.start()
-
-The CherryPy WSGI server can serve as many WSGI applications
+
+The CherryPy WSGI server can serve as many WSGI applications
as you want in one instance by using a WSGIPathInfoDispatcher::
-
+
d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
-
+
Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance.
This won't call the CherryPy engine (application side) at all, only the
@@ -148,7 +148,7 @@ import errno
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
-
+
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
@@ -192,31 +192,31 @@ if not hasattr(logging, 'statistics'): logging.statistics = {}
def read_headers(rfile, hdict=None):
"""Read headers from the given stream into the given header dict.
-
+
If hdict is None, a new header dict is created. Returns the populated
header dict.
-
+
Headers which are repeated are folded together using a comma if their
specification so dictates.
-
+
This function raises ValueError when the read bytes violate the HTTP spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
-
+
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
-
+
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
-
+
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
@@ -229,13 +229,13 @@ def read_headers(rfile, hdict=None):
k = k.strip().title()
v = v.strip()
hname = k
-
+
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = ", ".join((existing, v))
hdict[hname] = v
-
+
return hdict
@@ -244,29 +244,29 @@ class MaxSizeExceeded(Exception):
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
-
+
def __init__(self, rfile, maxlen):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
-
+
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded()
-
+
def read(self, size=None):
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
-
+
def readline(self, size=None):
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
-
+
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
@@ -275,10 +275,10 @@ class SizeCheckWrapper(object):
self.bytes_read += len(data)
self._check_length()
res.append(data)
- # See http://www.cherrypy.org/ticket/421
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/421
if len(data) < 256 or data[-1:] == "\n":
return EMPTY.join(res)
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -291,19 +291,19 @@ class SizeCheckWrapper(object):
break
line = self.readline()
return lines
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
return self
-
+
def __next__(self):
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
-
+
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
@@ -313,11 +313,11 @@ class SizeCheckWrapper(object):
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
-
+
def __init__(self, rfile, content_length):
self.rfile = rfile
self.remaining = content_length
-
+
def read(self, size=None):
if self.remaining == 0:
return ''
@@ -325,11 +325,11 @@ class KnownLengthRFile(object):
size = self.remaining
else:
size = min(size, self.remaining)
-
+
data = self.rfile.read(size)
self.remaining -= len(data)
return data
-
+
def readline(self, size=None):
if self.remaining == 0:
return ''
@@ -337,11 +337,11 @@ class KnownLengthRFile(object):
size = self.remaining
else:
size = min(size, self.remaining)
-
+
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -354,13 +354,13 @@ class KnownLengthRFile(object):
break
line = self.readline(sizehint)
return lines
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
return self
-
+
def __next__(self):
data = next(self.rfile)
self.remaining -= len(data)
@@ -369,12 +369,12 @@ class KnownLengthRFile(object):
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
-
+
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
-
+
def __init__(self, rfile, maxlen, bufsize=8192):
self.rfile = rfile
self.maxlen = maxlen
@@ -382,75 +382,75 @@ class ChunkedRFile(object):
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
-
+
def _fetch(self):
if self.closed:
return
-
+
line = self.rfile.readline()
self.bytes_read += len(line)
-
+
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded("Request Entity Too Large", self.maxlen)
-
+
line = line.strip().split(SEMICOLON, 1)
-
+
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError("Bad chunked transfer size: " + repr(chunk_size))
-
+
if chunk_size <= 0:
self.closed = True
return
-
+
## if line: chunk_extension = line[0]
-
+
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError("Request Entity Too Large")
-
+
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
-
+
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
"got " + repr(crlf) + ")")
-
+
def read(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
-
+
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
-
+
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
-
+
def readline(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
-
+
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
-
+
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
@@ -467,7 +467,7 @@ class ChunkedRFile(object):
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -480,33 +480,33 @@ class ChunkedRFile(object):
break
line = self.readline(sizehint)
return lines
-
+
def read_trailer_lines(self):
if not self.closed:
raise ValueError(
"Cannot read trailers until the request body has been read.")
-
+
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
-
+
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError("Request Entity Too Large")
-
+
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
-
+
yield line
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
# Shamelessly stolen from StringIO
total = 0
@@ -521,41 +521,41 @@ class ChunkedRFile(object):
class HTTPRequest(object):
"""An HTTP Request (and response).
-
+
A single HTTP connection may consist of multiple request/response pairs.
"""
-
+
server = None
"""The HTTPServer object which is receiving this request."""
-
+
conn = None
"""The HTTPConnection object on which this request connected."""
-
+
inheaders = {}
"""A dict of request headers."""
-
+
outheaders = []
"""A list of header tuples to write in the response."""
-
+
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
-
+
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
-
+
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
-
+
This value is set automatically inside send_headers."""
-
+
def __init__(self, server, conn):
self.server= server
self.conn = conn
-
+
self.ready = False
self.started_request = False
self.scheme = ntob("http")
@@ -564,14 +564,14 @@ class HTTPRequest(object):
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
-
+
self.status = ""
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
-
+
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
@@ -586,7 +586,7 @@ class HTTPRequest(object):
else:
if not success:
return
-
+
try:
success = self.read_request_headers()
except MaxSizeExceeded:
@@ -597,9 +597,9 @@ class HTTPRequest(object):
else:
if not success:
return
-
+
self.ready = True
-
+
def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
@@ -609,13 +609,13 @@ class HTTPRequest(object):
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
-
+
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
-
+
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
@@ -624,35 +624,35 @@ class HTTPRequest(object):
request_line = self.rfile.readline()
if not request_line:
return False
-
+
if not request_line.endswith(CRLF):
self.simple_response("400 Bad Request", "HTTP requires CRLF terminators")
return False
-
+
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
rp = int(req_protocol[5]), int(req_protocol[7])
except (ValueError, IndexError):
self.simple_response("400 Bad Request", "Malformed Request-Line")
return False
-
+
self.uri = uri
self.method = method
-
+
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if NUMBER_SIGN in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return False
-
+
if scheme:
self.scheme = scheme
-
+
qs = EMPTY
if QUESTION_MARK in path:
path, qs = path.split(QUESTION_MARK, 1)
-
+
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
@@ -668,11 +668,11 @@ class HTTPRequest(object):
return False
path = "%2F".join(atoms)
self.path = path
-
+
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
-
+
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
@@ -686,7 +686,7 @@ class HTTPRequest(object):
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
sp = int(self.server.protocol[5]), int(self.server.protocol[7])
-
+
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return False
@@ -698,7 +698,7 @@ class HTTPRequest(object):
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
-
+
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
@@ -706,14 +706,14 @@ class HTTPRequest(object):
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
-
+
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get("Content-Length", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
-
+
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
@@ -723,16 +723,16 @@ class HTTPRequest(object):
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get("Connection", "") != "Keep-Alive":
self.close_connection = True
-
+
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get("Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(",") if x.strip()]
-
+
self.chunked_read = False
-
+
if te:
for enc in te:
if enc == "chunked":
@@ -743,7 +743,7 @@ class HTTPRequest(object):
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
-
+
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
@@ -763,7 +763,7 @@ class HTTPRequest(object):
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get("Expect", "") == "100-continue":
# Don't use simple_response here, because it emits headers
- # we don't want. See http://www.cherrypy.org/ticket/951
+ # we don't want. See https://bitbucket.org/cherrypy/cherrypy/issue/951
msg = self.server.protocol + " 100 Continue\r\n\r\n"
try:
self.conn.wfile.sendall(msg)
@@ -772,22 +772,22 @@ class HTTPRequest(object):
if x.args[0] not in socket_errors_to_ignore:
raise
return True
-
+
def parse_request_uri(self, uri):
"""Parse a Request-URI into (scheme, authority, path).
-
+
Note that Request-URI's must be one of::
-
+
Request-URI = "*" | absoluteURI | abs_path | authority
-
+
Therefore, a Request-URI which starts with a double forward-slash
cannot be a "net_path"::
-
+
net_path = "//" authority [ abs_path ]
-
+
Instead, it must be interpreted as an "abs_path" with an empty first
path segment::
-
+
abs_path = "/" path_segments
path_segments = segment *( "/" segment )
segment = *pchar *( ";" param )
@@ -795,7 +795,7 @@ class HTTPRequest(object):
"""
if uri == ASTERISK:
return None, None, uri
-
+
i = uri.find('://')
if i > 0 and QUESTION_MARK not in uri[:i]:
# An absoluteURI.
@@ -805,14 +805,14 @@ class HTTPRequest(object):
authority, path = remainder.split(FORWARD_SLASH, 1)
path = FORWARD_SLASH + path
return scheme, authority, path
-
+
if uri.startswith(FORWARD_SLASH):
# An abs_path.
return None, None, uri
else:
# An authority.
return None, uri, None
-
+
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
@@ -827,15 +827,15 @@ class HTTPRequest(object):
"allowed bytes.")
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
-
+
self.server.gateway(self).respond()
-
+
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.sendall("0\r\n\r\n")
-
+
def simple_response(self, status, msg=""):
"""Write a simple response back to the client."""
status = str(status)
@@ -843,7 +843,7 @@ class HTTPRequest(object):
status + CRLF,
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n"]
-
+
if status[:3] in ("413", "414"):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
@@ -856,20 +856,20 @@ class HTTPRequest(object):
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = "400 Bad Request"
-
+
buf.append(CRLF)
if msg:
if isinstance(msg, unicodestr):
msg = msg.encode("ISO-8859-1")
buf.append(msg)
-
+
try:
self.conn.wfile.sendall("".join(buf))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
-
+
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
@@ -877,15 +877,15 @@ class HTTPRequest(object):
self.conn.wfile.sendall(EMPTY.join(buf))
else:
self.conn.wfile.sendall(chunk)
-
+
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
-
+
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
-
+
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
@@ -904,7 +904,7 @@ class HTTPRequest(object):
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
-
+
if "connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
@@ -914,7 +914,7 @@ class HTTPRequest(object):
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append(("Connection", "Keep-Alive"))
-
+
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
@@ -931,13 +931,13 @@ class HTTPRequest(object):
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
-
+
if "date" not in hkeys:
self.outheaders.append(("Date", rfc822.formatdate()))
-
+
if "server" not in hkeys:
self.outheaders.append(("Server", self.server.server_name))
-
+
buf = [self.server.protocol + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
@@ -962,7 +962,7 @@ class CP_fileobject(socket._fileobject):
self.bytes_read = 0
self.bytes_written = 0
socket._fileobject.__init__(self, *args, **kwargs)
-
+
def sendall(self, data):
"""Sendall for non-blocking sockets."""
while data:
@@ -1257,26 +1257,26 @@ class CP_fileobject(socket._fileobject):
class HTTPConnection(object):
"""An HTTP connection (active socket).
-
+
server: the Server object which received this connection.
socket: the raw socket object (usually TCP) for this connection.
makefile: a fileobject class for reading from the socket.
"""
-
+
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = DEFAULT_BUFFER_SIZE
wbufsize = DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
-
+
def __init__(self, server, sock, makefile=CP_fileobject):
self.server = server
self.socket = sock
self.rfile = makefile(sock, "rb", self.rbufsize)
self.wfile = makefile(sock, "wb", self.wbufsize)
self.requests_seen = 0
-
+
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
@@ -1287,7 +1287,7 @@ class HTTPConnection(object):
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
-
+
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
@@ -1297,7 +1297,7 @@ class HTTPConnection(object):
# probably already made a simple_response). Return and
# let the conn close.
return
-
+
request_seen = True
req.respond()
if req.close_connection:
@@ -1310,7 +1310,7 @@ class HTTPConnection(object):
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
- # See http://www.cherrypy.org/ticket/853
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
@@ -1352,13 +1352,13 @@ class HTTPConnection(object):
except FatalSSLAlert:
# Close the connection.
return
-
+
linger = False
-
+
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
-
+
if not self.linger:
# Python's socket module does NOT call close on the kernel socket
# when you call socket.close(). We do so manually here because we
@@ -1391,29 +1391,29 @@ _SHUTDOWNREQUEST = None
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
-
+
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
-
+
conn = None
"""The current connection pulled off the Queue, or None."""
-
+
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
-
+
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
-
-
+
+
def __init__(self, server):
self.ready = False
self.server = server
-
+
self.requests_seen = 0
self.bytes_read = 0
self.bytes_written = 0
@@ -1428,7 +1428,7 @@ class WorkerThread(threading.Thread):
'Write Throughput': lambda s: s['Bytes Written'](s) / (s['Work Time'](s) or 1e-6),
}
threading.Thread.__init__(self)
-
+
def run(self):
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
@@ -1437,7 +1437,7 @@ class WorkerThread(threading.Thread):
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
-
+
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
@@ -1459,11 +1459,11 @@ class WorkerThread(threading.Thread):
class ThreadPool(object):
"""A Request Queue for an HTTPServer which pools threads.
-
+
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
-
+
def __init__(self, server, min=10, max=-1):
self.server = server
self.min = min
@@ -1471,7 +1471,7 @@ class ThreadPool(object):
self._threads = []
self._queue = queue.Queue()
self.get = self._queue.get
-
+
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
@@ -1482,17 +1482,17 @@ class ThreadPool(object):
for worker in self._threads:
while not worker.ready:
time.sleep(.1)
-
+
def _get_idle(self):
"""Number of worker threads which are idle. Read-only."""
return len([t for t in self._threads if t.conn is None])
idle = property(_get_idle, doc=_get_idle.__doc__)
-
+
def put(self, obj):
self._queue.put(obj)
if obj is _SHUTDOWNREQUEST:
return
-
+
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
for i in range(amount):
@@ -1502,7 +1502,7 @@ class ThreadPool(object):
worker.setName("CP Server " + worker.getName())
self._threads.append(worker)
worker.start()
-
+
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
@@ -1511,7 +1511,7 @@ class ThreadPool(object):
if not t.isAlive():
self._threads.remove(t)
amount -= 1
-
+
if amount > 0:
for i in range(min(amount, len(self._threads) - self.min)):
# Put a number of shutdown requests on the queue equal
@@ -1519,13 +1519,13 @@ class ThreadPool(object):
# that worker will terminate and be culled from our list
# in self.put.
self._queue.put(_SHUTDOWNREQUEST)
-
+
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
-
+
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
@@ -1553,10 +1553,10 @@ class ThreadPool(object):
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
- # See http://www.cherrypy.org/ticket/691.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/691.
KeyboardInterrupt):
pass
-
+
def _get_qsize(self):
return self._queue.qsize()
qsize = property(_get_qsize)
@@ -1568,6 +1568,14 @@ try:
except ImportError:
try:
from ctypes import windll, WinError
+ import ctypes.wintypes
+ _SetHandleInformation = windll.kernel32.SetHandleInformation
+ _SetHandleInformation.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ]
+ _SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
@@ -1575,7 +1583,7 @@ except ImportError:
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
- if not windll.kernel32.SetHandleInformation(sock.fileno(), 1, 0):
+ if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
@@ -1587,98 +1595,98 @@ else:
class SSLAdapter(object):
"""Base class for SSL driver library adapters.
-
+
Required methods:
-
+
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) -> socket file object``
"""
-
+
def __init__(self, certificate, private_key, certificate_chain=None):
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
-
+
def wrap(self, sock):
raise NotImplemented
-
+
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
raise NotImplemented
class HTTPServer(object):
"""An HTTP server."""
-
+
_bind_addr = "127.0.0.1"
_interrupt = None
-
+
gateway = None
"""A Gateway instance."""
-
+
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
-
+
maxthreads = None
"""The maximum number of worker threads to create (default -1 = no limit)."""
-
+
server_name = None
"""The name of the server; defaults to socket.gethostname()."""
-
+
protocol = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses.
-
+
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
-
+
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections (default 5)."""
-
+
shutdown_timeout = 5
"""The total time, in seconds, to wait for worker threads to cleanly exit."""
-
+
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
-
+
version = "CherryPy/3.2.3"
"""A version string for the HTTPServer."""
-
+
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
-
+
If None, this defaults to ``'%s Server' % self.version``."""
-
+
ready = False
"""An internal flag which marks whether the socket is accepting connections."""
-
+
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
-
+
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
-
+
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
-
+
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
-
+
ssl_adapter = None
"""An instance of SSLAdapter (or a subclass).
-
+
You must have the corresponding SSL driver library installed."""
-
+
def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
self.bind_addr = bind_addr
self.gateway = gateway
-
+
self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads)
-
+
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.clear_stats()
-
+
def clear_stats(self):
self._start_time = None
self._run_time = 0
@@ -1709,17 +1717,17 @@ class HTTPServer(object):
'Worker Threads': {},
}
logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats
-
+
def runtime(self):
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
-
+
def __str__(self):
return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
self.bind_addr)
-
+
def _get_bind_addr(self):
return self._bind_addr
def _set_bind_addr(self, value):
@@ -1740,16 +1748,16 @@ class HTTPServer(object):
self._bind_addr = value
bind_addr = property(_get_bind_addr, _set_bind_addr,
doc="""The interface on which to listen for connections.
-
+
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
-
+
For UNIX sockets, supply the filename as a string.""")
-
+
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
@@ -1757,10 +1765,10 @@ class HTTPServer(object):
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
-
+
if self.software is None:
self.software = "%s Server" % self.version
-
+
# SSL backward compatibility
if (self.ssl_adapter is None and
getattr(self, 'ssl_certificate', None) and
@@ -1779,19 +1787,19 @@ class HTTPServer(object):
self.ssl_adapter = pyOpenSSLAdapter(
self.ssl_certificate, self.ssl_private_key,
getattr(self, 'ssl_certificate_chain', None))
-
+
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
-
+
# So we can reuse the socket...
try: os.unlink(self.bind_addr)
except: pass
-
+
# So everyone can access the socket...
try: os.chmod(self.bind_addr, 511) # 0777
except: pass
-
+
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
@@ -1807,7 +1815,7 @@ class HTTPServer(object):
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
-
+
self.socket = None
msg = "No socket could be created"
for res in info:
@@ -1822,14 +1830,14 @@ class HTTPServer(object):
break
if not self.socket:
raise socket.error(msg)
-
+
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
-
+
# Create worker threads
self.requests.start()
-
+
self.ready = True
self._start_time = time.time()
while self.ready:
@@ -1840,7 +1848,7 @@ class HTTPServer(object):
except:
self.error_log("Error in HTTPServer.tick", level=logging.ERROR,
traceback=True)
-
+
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
@@ -1864,12 +1872,12 @@ class HTTPServer(object):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
+
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
-
+
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
- # activate dual-stack. See http://www.cherrypy.org/ticket/871.
+ # activate dual-stack. See https://bitbucket.org/cherrypy/cherrypy/issue/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
@@ -1878,9 +1886,9 @@ class HTTPServer(object):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
-
+
self.socket.bind(self.bind_addr)
-
+
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
@@ -1889,11 +1897,11 @@ class HTTPServer(object):
self.stats['Accepts'] += 1
if not self.ready:
return
-
+
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
-
+
makefile = CP_fileobject
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
@@ -1907,7 +1915,7 @@ class HTTPServer(object):
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n\r\n",
msg]
-
+
wfile = makefile(s, "wb", DEFAULT_BUFFER_SIZE)
try:
wfile.sendall("".join(buf))
@@ -1922,9 +1930,9 @@ class HTTPServer(object):
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
-
+
conn = self.ConnectionClass(self, s, makefile)
-
+
if not isinstance(self.bind_addr, basestring):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
@@ -1938,9 +1946,9 @@ class HTTPServer(object):
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
-
+
conn.ssl_env = ssl_env
-
+
self.requests.put(conn)
except socket.timeout:
# The only reason for the timeout in start() is so we can
@@ -1956,17 +1964,17 @@ class HTTPServer(object):
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
- # elsewhere. See http://www.cherrypy.org/ticket/707.
+ # elsewhere. See https://bitbucket.org/cherrypy/cherrypy/issue/707.
return
if x.args[0] in socket_errors_nonblocking:
- # Just try again. See http://www.cherrypy.org/ticket/479.
+ # Just try again. See https://bitbucket.org/cherrypy/cherrypy/issue/479.
return
if x.args[0] in socket_errors_to_ignore:
# Our socket was closed.
- # See http://www.cherrypy.org/ticket/686.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/686.
return
raise
-
+
def _get_interrupt(self):
return self._interrupt
def _set_interrupt(self, interrupt):
@@ -1976,14 +1984,14 @@ class HTTPServer(object):
interrupt = property(_get_interrupt, _set_interrupt,
doc="Set this to an Exception instance to "
"interrupt the server.")
-
+
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
-
+
sock = getattr(self, "socket", None)
if sock:
if not isinstance(self.bind_addr, basestring):
@@ -1994,7 +2002,7 @@ class HTTPServer(object):
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
# Changed to use error code and not message
- # See http://www.cherrypy.org/ticket/860.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
@@ -2018,16 +2026,16 @@ class HTTPServer(object):
if hasattr(sock, "close"):
sock.close()
self.socket = None
-
+
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""A base class to interface HTTPServer with other systems, such as WSGI."""
-
+
def __init__(self, req):
self.req = req
-
+
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplemented
@@ -2047,7 +2055,7 @@ def get_ssl_adapter_class(name='pyopenssl'):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
-
+
try:
mod = sys.modules[mod_path]
if mod is None:
@@ -2055,14 +2063,14 @@ def get_ssl_adapter_class(name='pyopenssl'):
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
-
+
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
-
+
return adapter
# -------------------------------- WSGI Stuff -------------------------------- #
@@ -2070,26 +2078,26 @@ def get_ssl_adapter_class(name='pyopenssl'):
class CherryPyWSGIServer(HTTPServer):
"""A subclass of HTTPServer which calls a WSGI application."""
-
+
wsgi_version = (1, 0)
"""The version of WSGI to produce."""
-
+
def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5):
self.requests = ThreadPool(self, min=numthreads or 1, max=max)
self.wsgi_app = wsgi_app
self.gateway = wsgi_gateways[self.wsgi_version]
-
+
self.bind_addr = bind_addr
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.request_queue_size = request_queue_size
-
+
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
self.clear_stats()
-
+
def _get_numthreads(self):
return self.requests.min
def _set_numthreads(self, value):
@@ -2099,17 +2107,17 @@ class CherryPyWSGIServer(HTTPServer):
class WSGIGateway(Gateway):
"""A base class to interface HTTPServer with WSGI."""
-
+
def __init__(self, req):
self.req = req
self.started_response = False
self.env = self.get_environ()
self.remaining_bytes_out = None
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented
-
+
def respond(self):
"""Process the current request."""
response = self.req.server.wsgi_app(self.env, self.start_response)
@@ -2128,7 +2136,7 @@ class WSGIGateway(Gateway):
finally:
if hasattr(response, "close"):
response.close()
-
+
def start_response(self, status, headers, exc_info = None):
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
@@ -2137,7 +2145,7 @@ class WSGIGateway(Gateway):
raise AssertionError("WSGI start_response called a second "
"time with no exc_info.")
self.started_response = True
-
+
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
@@ -2146,7 +2154,7 @@ class WSGIGateway(Gateway):
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
-
+
self.req.status = status
for k, v in headers:
if not isinstance(k, str):
@@ -2156,18 +2164,18 @@ class WSGIGateway(Gateway):
if k.lower() == 'content-length':
self.remaining_bytes_out = int(v)
self.req.outheaders.extend(headers)
-
+
return self.write
-
+
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
-
+
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
-
+
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
@@ -2180,13 +2188,13 @@ class WSGIGateway(Gateway):
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
-
+
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
-
+
self.req.write(chunk)
-
+
if rbo is not None:
rbo -= chunklen
if rbo < 0:
@@ -2196,7 +2204,7 @@ class WSGIGateway(Gateway):
class WSGIGateway_10(WSGIGateway):
"""A Gateway class to interface HTTPServer with WSGI 1.0.x."""
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
@@ -2224,18 +2232,18 @@ class WSGIGateway_10(WSGIGateway):
'wsgi.url_scheme': req.scheme,
'wsgi.version': (1, 0),
}
-
+
if isinstance(req.server.bind_addr, basestring):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env["SERVER_PORT"] = ""
else:
env["SERVER_PORT"] = str(req.server.bind_addr[1])
-
+
# Request headers
for k, v in req.inheaders.iteritems():
env["HTTP_" + k.upper().replace("-", "_")] = v
-
+
# CONTENT_TYPE/CONTENT_LENGTH
ct = env.pop("HTTP_CONTENT_TYPE", None)
if ct is not None:
@@ -2243,27 +2251,27 @@ class WSGIGateway_10(WSGIGateway):
cl = env.pop("HTTP_CONTENT_LENGTH", None)
if cl is not None:
env["CONTENT_LENGTH"] = cl
-
+
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
-
+
return env
class WSGIGateway_u0(WSGIGateway_10):
"""A Gateway class to interface HTTPServer with WSGI u.0.
-
+
WSGI u.0 is an experimental protocol, which uses unicode for keys and values
in both Python 2 and Python 3.
"""
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = dict([(k.decode('ISO-8859-1'), v) for k, v in env_10.iteritems()])
env[u'wsgi.version'] = ('u', 0)
-
+
# Request-URI
env.setdefault(u'wsgi.url_encoding', u'utf-8')
try:
@@ -2274,11 +2282,11 @@ class WSGIGateway_u0(WSGIGateway_10):
env[u'wsgi.url_encoding'] = u'ISO-8859-1'
for key in [u"PATH_INFO", u"SCRIPT_NAME", u"QUERY_STRING"]:
env[key] = env_10[str(key)].decode(env[u'wsgi.url_encoding'])
-
+
for k, v in sorted(env.items()):
if isinstance(v, str) and k not in ('REQUEST_URI', 'wsgi.input'):
env[k] = v.decode('ISO-8859-1')
-
+
return env
wsgi_gateways = {
@@ -2288,24 +2296,24 @@ wsgi_gateways = {
class WSGIPathInfoDispatcher(object):
"""A WSGI dispatcher for dispatch based on the PATH_INFO.
-
+
apps: a dict or list of (path_prefix, app) pairs.
"""
-
+
def __init__(self, apps):
try:
apps = list(apps.items())
except AttributeError:
pass
-
+
# Sort the apps by len(path), descending
apps.sort(cmp=lambda x,y: cmp(len(x[0]), len(y[0])))
apps.reverse()
-
+
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
-
+
def __call__(self, environ, start_response):
path = environ["PATH_INFO"] or "/"
for p, app in self.apps:
@@ -2315,7 +2323,7 @@ class WSGIPathInfoDispatcher(object):
environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
environ["PATH_INFO"] = path[len(p):]
return app(environ, start_response)
-
+
start_response('404 Not Found', [('Content-Type', 'text/plain'),
('Content-Length', '0')])
return ['']
diff --git a/cherrypy/wsgiserver/wsgiserver3.py b/cherrypy/wsgiserver/wsgiserver3.py
index 068056ff..13daddd7 100644
--- a/cherrypy/wsgiserver/wsgiserver3.py
+++ b/cherrypy/wsgiserver/wsgiserver3.py
@@ -4,24 +4,24 @@ Simplest example on how to use this module directly
(without using CherryPy's application machinery)::
from cherrypy import wsgiserver
-
+
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return ['Hello world!']
-
+
server = wsgiserver.CherryPyWSGIServer(
('0.0.0.0', 8070), my_crazy_app,
server_name='www.cherrypy.example')
server.start()
-
-The CherryPy WSGI server can serve as many WSGI applications
+
+The CherryPy WSGI server can serve as many WSGI applications
as you want in one instance by using a WSGIPathInfoDispatcher::
-
+
d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
-
+
Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance.
This won't call the CherryPy engine (application side) at all, only the
@@ -138,7 +138,7 @@ import errno
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
-
+
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
@@ -182,31 +182,31 @@ if not hasattr(logging, 'statistics'): logging.statistics = {}
def read_headers(rfile, hdict=None):
"""Read headers from the given stream into the given header dict.
-
+
If hdict is None, a new header dict is created. Returns the populated
header dict.
-
+
Headers which are repeated are folded together using a comma if their
specification so dictates.
-
+
This function raises ValueError when the read bytes violate the HTTP spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
-
+
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
-
+
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
-
+
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
@@ -219,13 +219,13 @@ def read_headers(rfile, hdict=None):
k = k.strip().title()
v = v.strip()
hname = k
-
+
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = b", ".join((existing, v))
hdict[hname] = v
-
+
return hdict
@@ -234,29 +234,29 @@ class MaxSizeExceeded(Exception):
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
-
+
def __init__(self, rfile, maxlen):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
-
+
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded()
-
+
def read(self, size=None):
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
-
+
def readline(self, size=None):
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
-
+
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
@@ -265,10 +265,10 @@ class SizeCheckWrapper(object):
self.bytes_read += len(data)
self._check_length()
res.append(data)
- # See http://www.cherrypy.org/ticket/421
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/421
if len(data) < 256 or data[-1:] == "\n":
return EMPTY.join(res)
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -281,19 +281,19 @@ class SizeCheckWrapper(object):
break
line = self.readline()
return lines
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
return self
-
+
def __next__(self):
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
-
+
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
@@ -303,11 +303,11 @@ class SizeCheckWrapper(object):
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
-
+
def __init__(self, rfile, content_length):
self.rfile = rfile
self.remaining = content_length
-
+
def read(self, size=None):
if self.remaining == 0:
return b''
@@ -315,11 +315,11 @@ class KnownLengthRFile(object):
size = self.remaining
else:
size = min(size, self.remaining)
-
+
data = self.rfile.read(size)
self.remaining -= len(data)
return data
-
+
def readline(self, size=None):
if self.remaining == 0:
return b''
@@ -327,11 +327,11 @@ class KnownLengthRFile(object):
size = self.remaining
else:
size = min(size, self.remaining)
-
+
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -344,13 +344,13 @@ class KnownLengthRFile(object):
break
line = self.readline(sizehint)
return lines
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
return self
-
+
def __next__(self):
data = next(self.rfile)
self.remaining -= len(data)
@@ -359,12 +359,12 @@ class KnownLengthRFile(object):
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
-
+
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
-
+
def __init__(self, rfile, maxlen, bufsize=8192):
self.rfile = rfile
self.maxlen = maxlen
@@ -372,75 +372,75 @@ class ChunkedRFile(object):
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
-
+
def _fetch(self):
if self.closed:
return
-
+
line = self.rfile.readline()
self.bytes_read += len(line)
-
+
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded("Request Entity Too Large", self.maxlen)
-
+
line = line.strip().split(SEMICOLON, 1)
-
+
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError("Bad chunked transfer size: " + repr(chunk_size))
-
+
if chunk_size <= 0:
self.closed = True
return
-
+
## if line: chunk_extension = line[0]
-
+
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError("Request Entity Too Large")
-
+
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
-
+
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
"got " + repr(crlf) + ")")
-
+
def read(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
-
+
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
-
+
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
-
+
def readline(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
-
+
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
-
+
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
@@ -457,7 +457,7 @@ class ChunkedRFile(object):
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
-
+
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
@@ -470,33 +470,33 @@ class ChunkedRFile(object):
break
line = self.readline(sizehint)
return lines
-
+
def read_trailer_lines(self):
if not self.closed:
raise ValueError(
"Cannot read trailers until the request body has been read.")
-
+
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
-
+
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError("Request Entity Too Large")
-
+
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
-
+
yield line
-
+
def close(self):
self.rfile.close()
-
+
def __iter__(self):
# Shamelessly stolen from StringIO
total = 0
@@ -511,41 +511,41 @@ class ChunkedRFile(object):
class HTTPRequest(object):
"""An HTTP Request (and response).
-
+
A single HTTP connection may consist of multiple request/response pairs.
"""
-
+
server = None
"""The HTTPServer object which is receiving this request."""
-
+
conn = None
"""The HTTPConnection object on which this request connected."""
-
+
inheaders = {}
"""A dict of request headers."""
-
+
outheaders = []
"""A list of header tuples to write in the response."""
-
+
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
-
+
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
-
+
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
-
+
This value is set automatically inside send_headers."""
-
+
def __init__(self, server, conn):
self.server= server
self.conn = conn
-
+
self.ready = False
self.started_request = False
self.scheme = ntob("http")
@@ -554,14 +554,14 @@ class HTTPRequest(object):
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
-
+
self.status = ""
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
-
+
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
@@ -576,7 +576,7 @@ class HTTPRequest(object):
else:
if not success:
return
-
+
try:
success = self.read_request_headers()
except MaxSizeExceeded:
@@ -587,9 +587,9 @@ class HTTPRequest(object):
else:
if not success:
return
-
+
self.ready = True
-
+
def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
@@ -599,13 +599,13 @@ class HTTPRequest(object):
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
-
+
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
-
+
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
@@ -614,11 +614,11 @@ class HTTPRequest(object):
request_line = self.rfile.readline()
if not request_line:
return False
-
+
if not request_line.endswith(CRLF):
self.simple_response("400 Bad Request", "HTTP requires CRLF terminators")
return False
-
+
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
@@ -626,24 +626,24 @@ class HTTPRequest(object):
except ValueError:
self.simple_response("400 Bad Request", "Malformed Request-Line")
return False
-
+
self.uri = uri
self.method = method
-
+
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if NUMBER_SIGN in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return False
-
+
if scheme:
self.scheme = scheme
-
+
qs = EMPTY
if QUESTION_MARK in path:
path, qs = path.split(QUESTION_MARK, 1)
-
+
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
@@ -659,11 +659,11 @@ class HTTPRequest(object):
return False
path = b"%2F".join(atoms)
self.path = path
-
+
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
-
+
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
@@ -678,7 +678,7 @@ class HTTPRequest(object):
# only return 505 if the _major_ version is different.
# The [x:y] slicing is necessary for byte strings to avoid getting ord's
sp = int(self.server.protocol[5:6]), int(self.server.protocol[7:8])
-
+
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return False
@@ -689,7 +689,7 @@ class HTTPRequest(object):
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
-
+
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
@@ -697,14 +697,14 @@ class HTTPRequest(object):
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
-
+
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b"Content-Length", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
-
+
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
@@ -714,16 +714,16 @@ class HTTPRequest(object):
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b"Connection", b"") != b"Keep-Alive":
self.close_connection = True
-
+
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get(b"Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(b",") if x.strip()]
-
+
self.chunked_read = False
-
+
if te:
for enc in te:
if enc == b"chunked":
@@ -734,7 +734,7 @@ class HTTPRequest(object):
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
-
+
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
@@ -754,7 +754,7 @@ class HTTPRequest(object):
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b"Expect", b"") == b"100-continue":
# Don't use simple_response here, because it emits headers
- # we don't want. See http://www.cherrypy.org/ticket/951
+ # we don't want. See https://bitbucket.org/cherrypy/cherrypy/issue/951
msg = self.server.protocol.encode('ascii') + b" 100 Continue\r\n\r\n"
try:
self.conn.wfile.write(msg)
@@ -763,22 +763,22 @@ class HTTPRequest(object):
if x.args[0] not in socket_errors_to_ignore:
raise
return True
-
+
def parse_request_uri(self, uri):
"""Parse a Request-URI into (scheme, authority, path).
-
+
Note that Request-URI's must be one of::
-
+
Request-URI = "*" | absoluteURI | abs_path | authority
-
+
Therefore, a Request-URI which starts with a double forward-slash
cannot be a "net_path"::
-
+
net_path = "//" authority [ abs_path ]
-
+
Instead, it must be interpreted as an "abs_path" with an empty first
path segment::
-
+
abs_path = "/" path_segments
path_segments = segment *( "/" segment )
segment = *pchar *( ";" param )
@@ -801,11 +801,11 @@ class HTTPRequest(object):
else:
# An authority.
return None, uri, None
-
+
def unquote_bytes(self, path):
- """takes quoted string and unquotes % encoded values"""
+ """takes quoted string and unquotes % encoded values"""
res = path.split(b'%')
-
+
for i in range(1, len(res)):
item = res[i]
try:
@@ -813,7 +813,7 @@ class HTTPRequest(object):
except ValueError:
raise
return b''.join(res)
-
+
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
@@ -828,15 +828,15 @@ class HTTPRequest(object):
"allowed bytes.")
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
-
+
self.server.gateway(self).respond()
-
+
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.write(b"0\r\n\r\n")
-
+
def simple_response(self, status, msg=""):
"""Write a simple response back to the client."""
status = str(status)
@@ -844,7 +844,7 @@ class HTTPRequest(object):
bytes(status, "ISO-8859-1") + CRLF,
bytes("Content-Length: %s\r\n" % len(msg), "ISO-8859-1"),
b"Content-Type: text/plain\r\n"]
-
+
if status[:3] in ("413", "414"):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
@@ -857,20 +857,20 @@ class HTTPRequest(object):
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = "400 Bad Request"
-
+
buf.append(CRLF)
if msg:
if isinstance(msg, unicodestr):
msg = msg.encode("ISO-8859-1")
buf.append(msg)
-
+
try:
self.conn.wfile.write(b"".join(buf))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
-
+
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
@@ -878,15 +878,15 @@ class HTTPRequest(object):
self.conn.wfile.write(EMPTY.join(buf))
else:
self.conn.wfile.write(chunk)
-
+
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
-
+
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
-
+
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
@@ -905,7 +905,7 @@ class HTTPRequest(object):
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
-
+
if b"connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
@@ -915,7 +915,7 @@ class HTTPRequest(object):
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b"Connection", b"Keep-Alive"))
-
+
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
@@ -932,15 +932,15 @@ class HTTPRequest(object):
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
-
+
if b"date" not in hkeys:
self.outheaders.append(
(b"Date", email.utils.formatdate(usegmt=True).encode('ISO-8859-1')))
-
+
if b"server" not in hkeys:
self.outheaders.append(
(b"Server", self.server.server_name.encode('ISO-8859-1')))
-
+
buf = [self.server.protocol.encode('ascii') + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
@@ -965,12 +965,12 @@ class CP_BufferedWriter(io.BufferedWriter):
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
-
+
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b)
-
+
def _flush_unlocked(self):
self._checkClosed("flush of closed file")
while self._write_buf:
@@ -991,26 +991,26 @@ def CP_makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
class HTTPConnection(object):
"""An HTTP connection (active socket).
-
+
server: the Server object which received this connection.
socket: the raw socket object (usually TCP) for this connection.
makefile: a fileobject class for reading from the socket.
"""
-
+
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = DEFAULT_BUFFER_SIZE
wbufsize = DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
-
+
def __init__(self, server, sock, makefile=CP_makefile):
self.server = server
self.socket = sock
self.rfile = makefile(sock, "rb", self.rbufsize)
self.wfile = makefile(sock, "wb", self.wbufsize)
self.requests_seen = 0
-
+
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
@@ -1021,7 +1021,7 @@ class HTTPConnection(object):
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
-
+
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
@@ -1031,7 +1031,7 @@ class HTTPConnection(object):
# probably already made a simple_response). Return and
# let the conn close.
return
-
+
request_seen = True
req.respond()
if req.close_connection:
@@ -1044,7 +1044,7 @@ class HTTPConnection(object):
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
- # See http://www.cherrypy.org/ticket/853
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
@@ -1086,13 +1086,13 @@ class HTTPConnection(object):
except FatalSSLAlert:
# Close the connection.
return
-
+
linger = False
-
+
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
-
+
if not self.linger:
# Python's socket module does NOT call close on the kernel socket
# when you call socket.close(). We do so manually here because we
@@ -1125,29 +1125,29 @@ _SHUTDOWNREQUEST = None
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
-
+
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
-
+
conn = None
"""The current connection pulled off the Queue, or None."""
-
+
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
-
+
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
-
-
+
+
def __init__(self, server):
self.ready = False
self.server = server
-
+
self.requests_seen = 0
self.bytes_read = 0
self.bytes_written = 0
@@ -1162,7 +1162,7 @@ class WorkerThread(threading.Thread):
'Write Throughput': lambda s: s['Bytes Written'](s) / (s['Work Time'](s) or 1e-6),
}
threading.Thread.__init__(self)
-
+
def run(self):
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
@@ -1171,7 +1171,7 @@ class WorkerThread(threading.Thread):
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
-
+
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
@@ -1193,11 +1193,11 @@ class WorkerThread(threading.Thread):
class ThreadPool(object):
"""A Request Queue for an HTTPServer which pools threads.
-
+
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
-
+
def __init__(self, server, min=10, max=-1):
self.server = server
self.min = min
@@ -1205,7 +1205,7 @@ class ThreadPool(object):
self._threads = []
self._queue = queue.Queue()
self.get = self._queue.get
-
+
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
@@ -1216,17 +1216,17 @@ class ThreadPool(object):
for worker in self._threads:
while not worker.ready:
time.sleep(.1)
-
+
def _get_idle(self):
"""Number of worker threads which are idle. Read-only."""
return len([t for t in self._threads if t.conn is None])
idle = property(_get_idle, doc=_get_idle.__doc__)
-
+
def put(self, obj):
self._queue.put(obj)
if obj is _SHUTDOWNREQUEST:
return
-
+
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
for i in range(amount):
@@ -1236,7 +1236,7 @@ class ThreadPool(object):
worker.setName("CP Server " + worker.getName())
self._threads.append(worker)
worker.start()
-
+
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
@@ -1245,7 +1245,7 @@ class ThreadPool(object):
if not t.isAlive():
self._threads.remove(t)
amount -= 1
-
+
if amount > 0:
for i in range(min(amount, len(self._threads) - self.min)):
# Put a number of shutdown requests on the queue equal
@@ -1253,13 +1253,13 @@ class ThreadPool(object):
# that worker will terminate and be culled from our list
# in self.put.
self._queue.put(_SHUTDOWNREQUEST)
-
+
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
-
+
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
@@ -1287,10 +1287,10 @@ class ThreadPool(object):
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
- # See http://www.cherrypy.org/ticket/691.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/691.
KeyboardInterrupt):
pass
-
+
def _get_qsize(self):
return self._queue.qsize()
qsize = property(_get_qsize)
@@ -1302,6 +1302,14 @@ try:
except ImportError:
try:
from ctypes import windll, WinError
+ import ctypes.wintypes
+ _SetHandleInformation = windll.kernel32.SetHandleInformation
+ _SetHandleInformation.argtypes = [
+ ctypes.wintypes.HANDLE,
+ ctypes.wintypes.DWORD,
+ ctypes.wintypes.DWORD,
+ ]
+ _SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
@@ -1309,7 +1317,7 @@ except ImportError:
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
- if not windll.kernel32.SetHandleInformation(sock.fileno(), 1, 0):
+ if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
@@ -1321,98 +1329,98 @@ else:
class SSLAdapter(object):
"""Base class for SSL driver library adapters.
-
+
Required methods:
-
+
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) -> socket file object``
"""
-
+
def __init__(self, certificate, private_key, certificate_chain=None):
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
-
+
def wrap(self, sock):
raise NotImplemented
-
+
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
raise NotImplemented
class HTTPServer(object):
"""An HTTP server."""
-
+
_bind_addr = "127.0.0.1"
_interrupt = None
-
+
gateway = None
"""A Gateway instance."""
-
+
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
-
+
maxthreads = None
"""The maximum number of worker threads to create (default -1 = no limit)."""
-
+
server_name = None
"""The name of the server; defaults to socket.gethostname()."""
-
+
protocol = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses.
-
+
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
-
+
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections (default 5)."""
-
+
shutdown_timeout = 5
"""The total time, in seconds, to wait for worker threads to cleanly exit."""
-
+
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
-
+
version = "CherryPy/3.2.3"
"""A version string for the HTTPServer."""
-
+
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
-
+
If None, this defaults to ``'%s Server' % self.version``."""
-
+
ready = False
"""An internal flag which marks whether the socket is accepting connections."""
-
+
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
-
+
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
-
+
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
-
+
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
-
+
ssl_adapter = None
"""An instance of SSLAdapter (or a subclass).
-
+
You must have the corresponding SSL driver library installed."""
-
+
def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
self.bind_addr = bind_addr
self.gateway = gateway
-
+
self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads)
-
+
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.clear_stats()
-
+
def clear_stats(self):
self._start_time = None
self._run_time = 0
@@ -1443,17 +1451,17 @@ class HTTPServer(object):
'Worker Threads': {},
}
logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats
-
+
def runtime(self):
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
-
+
def __str__(self):
return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
self.bind_addr)
-
+
def _get_bind_addr(self):
return self._bind_addr
def _set_bind_addr(self, value):
@@ -1474,16 +1482,16 @@ class HTTPServer(object):
self._bind_addr = value
bind_addr = property(_get_bind_addr, _set_bind_addr,
doc="""The interface on which to listen for connections.
-
+
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
-
+
For UNIX sockets, supply the filename as a string.""")
-
+
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
@@ -1491,22 +1499,22 @@ class HTTPServer(object):
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
-
+
if self.software is None:
self.software = "%s Server" % self.version
-
+
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
-
+
# So we can reuse the socket...
try: os.unlink(self.bind_addr)
except: pass
-
+
# So everyone can access the socket...
try: os.chmod(self.bind_addr, 511) # 0777
except: pass
-
+
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
@@ -1522,7 +1530,7 @@ class HTTPServer(object):
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
-
+
self.socket = None
msg = "No socket could be created"
for res in info:
@@ -1537,14 +1545,14 @@ class HTTPServer(object):
break
if not self.socket:
raise socket.error(msg)
-
+
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
-
+
# Create worker threads
self.requests.start()
-
+
self.ready = True
self._start_time = time.time()
while self.ready:
@@ -1578,12 +1586,12 @@ class HTTPServer(object):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
-
+
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
-
+
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
- # activate dual-stack. See http://www.cherrypy.org/ticket/871.
+ # activate dual-stack. See https://bitbucket.org/cherrypy/cherrypy/issue/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
@@ -1592,9 +1600,9 @@ class HTTPServer(object):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
-
+
self.socket.bind(self.bind_addr)
-
+
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
@@ -1603,11 +1611,11 @@ class HTTPServer(object):
self.stats['Accepts'] += 1
if not self.ready:
return
-
+
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
-
+
makefile = CP_makefile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
@@ -1621,7 +1629,7 @@ class HTTPServer(object):
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n\r\n",
msg]
-
+
wfile = makefile(s, "wb", DEFAULT_BUFFER_SIZE)
try:
wfile.write("".join(buf).encode('ISO-8859-1'))
@@ -1636,9 +1644,9 @@ class HTTPServer(object):
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
-
+
conn = self.ConnectionClass(self, s, makefile)
-
+
if not isinstance(self.bind_addr, basestring):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
@@ -1652,9 +1660,9 @@ class HTTPServer(object):
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
-
+
conn.ssl_env = ssl_env
-
+
self.requests.put(conn)
except socket.timeout:
# The only reason for the timeout in start() is so we can
@@ -1670,17 +1678,17 @@ class HTTPServer(object):
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
- # elsewhere. See http://www.cherrypy.org/ticket/707.
+ # elsewhere. See https://bitbucket.org/cherrypy/cherrypy/issue/707.
return
if x.args[0] in socket_errors_nonblocking:
- # Just try again. See http://www.cherrypy.org/ticket/479.
+ # Just try again. See https://bitbucket.org/cherrypy/cherrypy/issue/479.
return
if x.args[0] in socket_errors_to_ignore:
# Our socket was closed.
- # See http://www.cherrypy.org/ticket/686.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/686.
return
raise
-
+
def _get_interrupt(self):
return self._interrupt
def _set_interrupt(self, interrupt):
@@ -1690,14 +1698,14 @@ class HTTPServer(object):
interrupt = property(_get_interrupt, _set_interrupt,
doc="Set this to an Exception instance to "
"interrupt the server.")
-
+
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
-
+
sock = getattr(self, "socket", None)
if sock:
if not isinstance(self.bind_addr, basestring):
@@ -1708,7 +1716,7 @@ class HTTPServer(object):
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
# Changed to use error code and not message
- # See http://www.cherrypy.org/ticket/860.
+ # See https://bitbucket.org/cherrypy/cherrypy/issue/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
@@ -1732,16 +1740,16 @@ class HTTPServer(object):
if hasattr(sock, "close"):
sock.close()
self.socket = None
-
+
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""A base class to interface HTTPServer with other systems, such as WSGI."""
-
+
def __init__(self, req):
self.req = req
-
+
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplemented
@@ -1760,7 +1768,7 @@ def get_ssl_adapter_class(name='builtin'):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
-
+
try:
mod = sys.modules[mod_path]
if mod is None:
@@ -1768,14 +1776,14 @@ def get_ssl_adapter_class(name='builtin'):
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
-
+
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
-
+
return adapter
# -------------------------------- WSGI Stuff -------------------------------- #
@@ -1783,26 +1791,26 @@ def get_ssl_adapter_class(name='builtin'):
class CherryPyWSGIServer(HTTPServer):
"""A subclass of HTTPServer which calls a WSGI application."""
-
+
wsgi_version = (1, 0)
"""The version of WSGI to produce."""
-
+
def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5):
self.requests = ThreadPool(self, min=numthreads or 1, max=max)
self.wsgi_app = wsgi_app
self.gateway = wsgi_gateways[self.wsgi_version]
-
+
self.bind_addr = bind_addr
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.request_queue_size = request_queue_size
-
+
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
self.clear_stats()
-
+
def _get_numthreads(self):
return self.requests.min
def _set_numthreads(self, value):
@@ -1812,17 +1820,17 @@ class CherryPyWSGIServer(HTTPServer):
class WSGIGateway(Gateway):
"""A base class to interface HTTPServer with WSGI."""
-
+
def __init__(self, req):
self.req = req
self.started_response = False
self.env = self.get_environ()
self.remaining_bytes_out = None
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented
-
+
def respond(self):
"""Process the current request."""
response = self.req.server.wsgi_app(self.env, self.start_response)
@@ -1841,7 +1849,7 @@ class WSGIGateway(Gateway):
finally:
if hasattr(response, "close"):
response.close()
-
+
def start_response(self, status, headers, exc_info = None):
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
@@ -1850,7 +1858,7 @@ class WSGIGateway(Gateway):
raise AssertionError("WSGI start_response called a second "
"time with no exc_info.")
self.started_response = True
-
+
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
@@ -1876,18 +1884,18 @@ class WSGIGateway(Gateway):
if k.lower() == 'content-length':
self.remaining_bytes_out = int(v)
self.req.outheaders.append((k.encode('ISO-8859-1'), v.encode('ISO-8859-1')))
-
+
return self.write
-
+
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
-
+
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
-
+
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
@@ -1900,13 +1908,13 @@ class WSGIGateway(Gateway):
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
-
+
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
-
+
self.req.write(chunk)
-
+
if rbo is not None:
rbo -= chunklen
if rbo < 0:
@@ -1916,7 +1924,7 @@ class WSGIGateway(Gateway):
class WSGIGateway_10(WSGIGateway):
"""A Gateway class to interface HTTPServer with WSGI 1.0.x."""
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
@@ -1944,19 +1952,19 @@ class WSGIGateway_10(WSGIGateway):
'wsgi.url_scheme': req.scheme.decode('ISO-8859-1'),
'wsgi.version': (1, 0),
}
-
+
if isinstance(req.server.bind_addr, basestring):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env["SERVER_PORT"] = ""
else:
env["SERVER_PORT"] = str(req.server.bind_addr[1])
-
+
# Request headers
for k, v in req.inheaders.items():
k = k.decode('ISO-8859-1').upper().replace("-", "_")
env["HTTP_" + k] = v.decode('ISO-8859-1')
-
+
# CONTENT_TYPE/CONTENT_LENGTH
ct = env.pop("HTTP_CONTENT_TYPE", None)
if ct is not None:
@@ -1964,27 +1972,27 @@ class WSGIGateway_10(WSGIGateway):
cl = env.pop("HTTP_CONTENT_LENGTH", None)
if cl is not None:
env["CONTENT_LENGTH"] = cl
-
+
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
-
+
return env
class WSGIGateway_u0(WSGIGateway_10):
"""A Gateway class to interface HTTPServer with WSGI u.0.
-
+
WSGI u.0 is an experimental protocol, which uses unicode for keys and values
in both Python 2 and Python 3.
"""
-
+
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = env_10.copy()
env['wsgi.version'] = ('u', 0)
-
+
# Request-URI
env.setdefault('wsgi.url_encoding', 'utf-8')
try:
@@ -1996,7 +2004,7 @@ class WSGIGateway_u0(WSGIGateway_10):
env['wsgi.url_encoding'] = 'ISO-8859-1'
env["PATH_INFO"] = env_10["PATH_INFO"]
env["QUERY_STRING"] = env_10["QUERY_STRING"]
-
+
return env
wsgi_gateways = {
@@ -2006,24 +2014,24 @@ wsgi_gateways = {
class WSGIPathInfoDispatcher(object):
"""A WSGI dispatcher for dispatch based on the PATH_INFO.
-
+
apps: a dict or list of (path_prefix, app) pairs.
"""
-
+
def __init__(self, apps):
try:
apps = list(apps.items())
except AttributeError:
pass
-
+
# Sort the apps by len(path), descending
apps.sort()
apps.reverse()
-
+
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
-
+
def __call__(self, environ, start_response):
path = environ["PATH_INFO"] or "/"
for p, app in self.apps:
@@ -2033,7 +2041,7 @@ class WSGIPathInfoDispatcher(object):
environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
environ["PATH_INFO"] = path[len(p):]
return app(environ, start_response)
-
+
start_response('404 Not Found', [('Content-Type', 'text/plain'),
('Content-Length', '0')])
return ['']
diff --git a/setup.cfg b/setup.cfg
index e77318d3..f12f0f55 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,2 +1,11 @@
[sdist]
formats=gztar
+
+[nosetests]
+cover-erase=True
+with-coverage=True
+where=cherrypy
+cover-package=cherrypy
+logging-filter=cherrypy
+verbosity=2
+nocapture=True
diff --git a/sphinx/source/concepts/config.rst b/sphinx/source/concepts/config.rst
index 1b8336b0..8fcae773 100644
--- a/sphinx/source/concepts/config.rst
+++ b/sphinx/source/concepts/config.rst
@@ -44,7 +44,7 @@ config entries. Here's an example of passing a dict argument::
'server.socket_port': 80,
})
-The ``server.socket_host`` option n this example determines on which network
+The ``server.socket_host`` option in this example determines on which network
interface CherryPy will listen. The ``server.socket_port`` option declares
the TCP port on which to listen.
diff --git a/sphinx/source/concepts/dispatching.rst b/sphinx/source/concepts/dispatching.rst
index b7b9c416..57509f05 100644
--- a/sphinx/source/concepts/dispatching.rst
+++ b/sphinx/source/concepts/dispatching.rst
@@ -12,7 +12,7 @@ Dispatching
request content. All of these implementation-specific issues are hidden
behind the Web interface; their nature cannot be assumed by a client that
only has access through the Web interface.
-
+
`Roy Fielding <http://www.ics.uci.edu/~fielding/pubs/dissertation/evaluation.htm>`_
When you wish to serve a resource on the Web, you never actually serve the
@@ -89,7 +89,7 @@ In this example, the URL ``http://localhost/some/page`` will be mapped to the
``root.some.page`` object. If this object is exposed (or alternatively, its
``index`` method is), it will be called for that URL.
-In our HelloWorld example, adding the ``http://onepage/`` mapping
+In our HelloWorld example, adding the ``http://localhost/onepage/`` mapping
to ``OnePage().index`` could be done like this::
class OnePage(object):
@@ -99,7 +99,7 @@ to ``OnePage().index`` could be done like this::
class HelloWorld(object):
onepage = OnePage()
-
+
def index(self):
return "hello world"
index.exposed = True
@@ -117,7 +117,7 @@ URL that is directly mapped to them. For example::
def foo(self):
return 'Foo!'
foo.exposed = True
-
+
root.foo = foo
In the example, ``root.foo`` contains a function object, named ``foo``. When
@@ -162,10 +162,10 @@ method) can receive additional data from HTML or other forms using
<form action="doLogin" method="post">
<p>Username</p>
- <input type="text" name="username" value=""
+ <input type="text" name="username" value=""
size="15" maxlength="40"/>
<p>Password</p>
- <input type="password" name="password" value=""
+ <input type="password" name="password" value=""
size="10" maxlength="40"/>
<p><input type="submit" value="Login"/></p>
<p><input type="reset" value="Clear"/></p>
@@ -216,12 +216,12 @@ following code::
def blog(self, year, month, day):
...
blog.exposed = True
-
+
root = Root()
So the URL above will be mapped as a call to::
- root.blog('2005', '1', '17')
+ root.blog('2005', '01', '17')
In this case, there is a partial match up to the ``blog`` component. The rest
of the URL can't be found in the mounted object tree. In this case, the
@@ -248,15 +248,15 @@ written the above "blog" example equivalently with a "default" method instead::
def default(self, year, month, day):
...
default.exposed = True
-
+
class Root: pass
-
+
root = Root()
root.blog = Blog()
So the URL ``http://localhost/blog/2005/01/17`` will be mapped as a call to::
- root.blog.default('2005', '1', '17')
+ root.blog.default('2005', '01', '17')
You could achieve the same effect by defining a ``__call__`` method in this
case, but "default" just reads better. ;)
@@ -348,12 +348,12 @@ object for this, which looks a lot like::
class PageHandler(object):
"""Callable which sets response.body."""
-
+
def __init__(self, callable, *args, **kwargs):
self.callable = callable
self.args = args
self.kwargs = kwargs
-
+
def __call__(self):
return self.callable(*self.args, **self.kwargs)
diff --git a/sphinx/source/refman/process/plugins/dropprivileges.rst b/sphinx/source/refman/process/plugins/dropprivileges.rst
index daaa1efc..5eab3337 100644
--- a/sphinx/source/refman/process/plugins/dropprivileges.rst
+++ b/sphinx/source/refman/process/plugins/dropprivileges.rst
@@ -12,7 +12,7 @@ starting on a low port (which requires root) and then dropping to another user.
Example::
- DropPrivileges(cherrypy.engine, '/var/run/myapp.pid').subscribe()
+ DropPrivileges(cherrypy.engine, uid=1000, gid=1000).subscribe()
.. currentmodule:: cherrypy.process.plugins