summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMiro Hron?ok <miro@hroncok.cz>2018-06-08 18:49:42 +0200
committerMiro Hron?ok <miro@hroncok.cz>2018-06-08 18:49:42 +0200
commitbb9bdf7e5b5cb0d3458242c5b8ba6134efb282a4 (patch)
treee3f03e7ce1f234e0c91075da6d0b56040f693162
downloadpaste-git-bb9bdf7e5b5cb0d3458242c5b8ba6134efb282a4.tar.gz
Don't raise StopIteration from generator, return instead
See https://www.python.org/dev/peps/pep-0479/
-rw-r--r--.hgignore14
-rw-r--r--MANIFEST.in10
-rw-r--r--README.rst109
-rw-r--r--docs/DeveloperGuidelines.txt110
-rw-r--r--docs/StyleGuide.txt102
-rw-r--r--docs/_static/paste.css15
-rw-r--r--docs/_templates/layout.html29
-rw-r--r--docs/community/index.txt15
-rw-r--r--docs/community/mailing-list.txt14
-rw-r--r--docs/community/repository.txt10
-rw-r--r--docs/conf.py132
-rw-r--r--docs/default.css394
-rw-r--r--docs/developer-features.txt148
-rw-r--r--docs/do-it-yourself-framework.txt538
-rw-r--r--docs/download/index.txt32
-rw-r--r--docs/future.txt108
-rw-r--r--docs/include/contact.txt5
-rw-r--r--docs/include/reference_header.txt5
-rw-r--r--docs/index.txt69
-rw-r--r--docs/license.txt20
-rw-r--r--docs/modules/auth.auth_tkt.txt14
-rw-r--r--docs/modules/auth.basic.txt11
-rw-r--r--docs/modules/auth.cas.txt11
-rw-r--r--docs/modules/auth.cookie.txt12
-rw-r--r--docs/modules/auth.digest.txt12
-rw-r--r--docs/modules/auth.form.txt10
-rw-r--r--docs/modules/auth.grantip.txt10
-rw-r--r--docs/modules/auth.multi.txt11
-rw-r--r--docs/modules/cascade.txt10
-rw-r--r--docs/modules/cgiapp.txt11
-rw-r--r--docs/modules/cgitb_catcher.txt10
-rw-r--r--docs/modules/debug.debugapp.txt13
-rw-r--r--docs/modules/debug.fsdiff.txt15
-rw-r--r--docs/modules/debug.prints.txt10
-rw-r--r--docs/modules/debug.profile.txt13
-rw-r--r--docs/modules/debug.watchthreads.txt12
-rw-r--r--docs/modules/debug.wdg_validate.txt11
-rw-r--r--docs/modules/errordocument.txt12
-rw-r--r--docs/modules/evalexception.txt9
-rw-r--r--docs/modules/exceptions.txt48
-rw-r--r--docs/modules/fileapp.txt15
-rw-r--r--docs/modules/fixture.txt40
-rw-r--r--docs/modules/gzipper.txt10
-rw-r--r--docs/modules/httpexceptions.txt49
-rw-r--r--docs/modules/httpheaders.txt8
-rw-r--r--docs/modules/httpserver.txt10
-rw-r--r--docs/modules/lint.txt10
-rw-r--r--docs/modules/pony.txt10
-rw-r--r--docs/modules/progress.txt13
-rw-r--r--docs/modules/proxy.txt14
-rw-r--r--docs/modules/recursive.txt10
-rw-r--r--docs/modules/registry.txt13
-rw-r--r--docs/modules/reloader.txt14
-rw-r--r--docs/modules/request.txt19
-rw-r--r--docs/modules/response.txt15
-rw-r--r--docs/modules/session.txt11
-rw-r--r--docs/modules/transaction.txt11
-rw-r--r--docs/modules/translogger.txt10
-rw-r--r--docs/modules/url.txt10
-rw-r--r--docs/modules/urlmap.txt11
-rw-r--r--docs/modules/urlparser.txt14
-rw-r--r--docs/modules/util.import_string.txt12
-rw-r--r--docs/modules/util.multidict.txt11
-rw-r--r--docs/modules/wsgilib.txt18
-rw-r--r--docs/modules/wsgiwrappers.txt10
-rw-r--r--docs/news.txt1075
-rw-r--r--docs/paste-httpserver-threadpool.txt150
-rw-r--r--docs/test_server.ini42
-rw-r--r--docs/testing-applications.txt156
-rw-r--r--docs/url-parsing-with-wsgi.txt304
-rw-r--r--docs/web/default-site.css382
-rw-r--r--docs/web/site.js69
-rw-r--r--docs/web/style.css90
-rw-r--r--paste/__init__.py17
-rw-r--r--paste/auth/__init__.py9
-rw-r--r--paste/auth/auth_tkt.py429
-rw-r--r--paste/auth/basic.py122
-rw-r--r--paste/auth/cas.py99
-rw-r--r--paste/auth/cookie.py405
-rw-r--r--paste/auth/digest.py254
-rw-r--r--paste/auth/form.py149
-rw-r--r--paste/auth/grantip.py114
-rw-r--r--paste/auth/multi.py79
-rw-r--r--paste/auth/open_id.py413
-rw-r--r--paste/cascade.py133
-rw-r--r--paste/cgiapp.py280
-rw-r--r--paste/cgitb_catcher.py121
-rw-r--r--paste/config.py120
-rw-r--r--paste/cowbell/__init__.py104
-rw-r--r--paste/cowbell/bell-ascending.pngbin0 -> 132993 bytes
-rw-r--r--paste/cowbell/bell-descending.pngbin0 -> 124917 bytes
-rw-r--r--paste/debug/__init__.py5
-rwxr-xr-xpaste/debug/debugapp.py79
-rwxr-xr-xpaste/debug/doctest_webapp.py432
-rw-r--r--paste/debug/fsdiff.py408
-rw-r--r--paste/debug/prints.py149
-rw-r--r--paste/debug/profile.py228
-rwxr-xr-xpaste/debug/testserver.py93
-rw-r--r--paste/debug/watchthreads.py347
-rw-r--r--paste/debug/wdg_validate.py118
-rw-r--r--paste/errordocument.py389
-rw-r--r--paste/evalexception/__init__.py7
-rw-r--r--paste/evalexception/evalcontext.py69
-rw-r--r--paste/evalexception/media/MochiKit.packed.js7829
-rw-r--r--paste/evalexception/media/debug.js161
-rw-r--r--paste/evalexception/media/minus.jpgbin0 -> 359 bytes
-rw-r--r--paste/evalexception/media/plus.jpgbin0 -> 361 bytes
-rw-r--r--paste/evalexception/middleware.py618
-rw-r--r--paste/exceptions/__init__.py6
-rw-r--r--paste/exceptions/collector.py523
-rw-r--r--paste/exceptions/errormiddleware.py466
-rw-r--r--paste/exceptions/formatter.py565
-rw-r--r--paste/exceptions/reporter.py141
-rw-r--r--paste/exceptions/serial_number_generator.py129
-rw-r--r--paste/fileapp.py356
-rw-r--r--paste/fixture.py1755
-rw-r--r--paste/flup_session.py108
-rw-r--r--paste/gzipper.py107
-rw-r--r--paste/httpexceptions.py667
-rw-r--r--paste/httpheaders.py1116
-rwxr-xr-xpaste/httpserver.py1430
-rw-r--r--paste/lint.py438
-rw-r--r--paste/modpython.py253
-rw-r--r--paste/pony.py57
-rwxr-xr-xpaste/progress.py222
-rw-r--r--paste/proxy.py289
-rw-r--r--paste/recursive.py406
-rw-r--r--paste/registry.py581
-rw-r--r--paste/reloader.py179
-rw-r--r--paste/request.py428
-rw-r--r--paste/response.py240
-rw-r--r--paste/session.py346
-rw-r--r--paste/transaction.py120
-rw-r--r--paste/translogger.py122
-rw-r--r--paste/url.py478
-rw-r--r--paste/urlmap.py263
-rw-r--r--paste/urlparser.py639
-rw-r--r--paste/util/PySourceColor.py2102
-rw-r--r--paste/util/__init__.py4
-rw-r--r--paste/util/classinit.py42
-rw-r--r--paste/util/classinstance.py38
-rw-r--r--paste/util/converters.py30
-rw-r--r--paste/util/dateinterval.py104
-rw-r--r--paste/util/datetimeutil.py359
-rw-r--r--paste/util/filemixin.py53
-rw-r--r--paste/util/finddata.py98
-rw-r--r--paste/util/findpackage.py26
-rw-r--r--paste/util/import_string.py95
-rw-r--r--paste/util/intset.py515
-rw-r--r--paste/util/ip4.py274
-rw-r--r--paste/util/killthread.py30
-rw-r--r--paste/util/looper.py156
-rw-r--r--paste/util/mimeparse.py160
-rw-r--r--paste/util/multidict.py429
-rw-r--r--paste/util/quoting.py85
-rw-r--r--paste/util/scgiserver.py172
-rw-r--r--paste/util/template.py756
-rw-r--r--paste/util/threadedprint.py250
-rw-r--r--paste/util/threadinglocal.py43
-rw-r--r--paste/wsgilib.py604
-rw-r--r--paste/wsgiwrappers.py590
-rwxr-xr-xregen-docs9
-rw-r--r--setup.cfg8
-rw-r--r--setup.py116
-rw-r--r--tests/__init__.py7
-rwxr-xr-xtests/cgiapp_data/error.cgi3
-rwxr-xr-xtests/cgiapp_data/form.cgi69
-rwxr-xr-xtests/cgiapp_data/ok.cgi5
-rwxr-xr-xtests/cgiapp_data/stderr.cgi8
-rw-r--r--tests/test_auth/__init__.py0
-rw-r--r--tests/test_auth/test_auth_cookie.py46
-rw-r--r--tests/test_auth/test_auth_digest.py93
-rw-r--r--tests/test_cgiapp.py59
-rw-r--r--tests/test_cgitb_catcher.py78
-rw-r--r--tests/test_config.py85
-rw-r--r--tests/test_doctests.py63
-rw-r--r--tests/test_errordocument.py92
-rw-r--r--tests/test_exceptions/__init__.py1
-rw-r--r--tests/test_exceptions/test_error_middleware.py109
-rw-r--r--tests/test_exceptions/test_formatter.py183
-rw-r--r--tests/test_exceptions/test_httpexceptions.py97
-rw-r--r--tests/test_exceptions/test_reporter.py50
-rw-r--r--tests/test_fileapp.py242
-rw-r--r--tests/test_fixture.py28
-rw-r--r--tests/test_grantip.py37
-rw-r--r--tests/test_gzipper.py19
-rw-r--r--tests/test_httpheaders.py159
-rw-r--r--tests/test_httpserver.py45
-rw-r--r--tests/test_import_string.py16
-rw-r--r--tests/test_multidict.py162
-rw-r--r--tests/test_profilemiddleware.py29
-rw-r--r--tests/test_proxy.py12
-rw-r--r--tests/test_recursive.py105
-rw-r--r--tests/test_registry.py314
-rw-r--r--tests/test_request.py66
-rw-r--r--tests/test_request_form.py36
-rw-r--r--tests/test_response.py11
-rw-r--r--tests/test_session.py56
-rw-r--r--tests/test_template.txt136
-rw-r--r--tests/test_urlmap.py53
-rw-r--r--tests/test_urlparser.py178
-rw-r--r--tests/test_util/__init__.py0
-rw-r--r--tests/test_util/test_datetimeutil.py135
-rw-r--r--tests/test_util/test_mimeparse.py235
-rw-r--r--tests/test_util/test_quoting.py28
-rw-r--r--tests/test_wsgilib.py52
-rw-r--r--tests/test_wsgiwrappers.py146
-rw-r--r--tests/urlparser_data/__init__.py1
-rw-r--r--tests/urlparser_data/deep/index.html1
-rw-r--r--tests/urlparser_data/deep/sub/Main.txt1
-rw-r--r--tests/urlparser_data/find_file/dir with spaces/test 4.html1
-rw-r--r--tests/urlparser_data/find_file/index.txt1
-rw-r--r--tests/urlparser_data/find_file/test 3.html1
-rw-r--r--tests/urlparser_data/find_file/test2.html1
-rw-r--r--tests/urlparser_data/hook/__init__.py10
-rw-r--r--tests/urlparser_data/hook/app.py9
-rw-r--r--tests/urlparser_data/hook/index.py9
-rw-r--r--tests/urlparser_data/not_found/__init__.py1
-rw-r--r--tests/urlparser_data/not_found/recur/__init__.py9
-rw-r--r--tests/urlparser_data/not_found/recur/isfound.txt1
-rw-r--r--tests/urlparser_data/not_found/simple/__init__.py3
-rw-r--r--tests/urlparser_data/not_found/simple/found.txt1
-rw-r--r--tests/urlparser_data/not_found/user/__init__.py12
-rw-r--r--tests/urlparser_data/not_found/user/list.py8
-rw-r--r--tests/urlparser_data/python/__init__.py1
-rw-r--r--tests/urlparser_data/python/simpleapp.py5
-rw-r--r--tests/urlparser_data/python/stream.py7
-rw-r--r--tests/urlparser_data/python/sub/__init__.py1
-rw-r--r--tests/urlparser_data/python/sub/simpleapp.py4
-rw-r--r--tests/urlparser_data/secured.txt1
-rw-r--r--tox.ini9
231 files changed, 41584 insertions, 0 deletions
diff --git a/.hgignore b/.hgignore
new file mode 100644
index 0000000..36cfdf7
--- /dev/null
+++ b/.hgignore
@@ -0,0 +1,14 @@
+syntax: glob
+
+.project
+.pydevproject
+.settings
+*.pyc
+*.pyo
+*.log
+*.tmp
+*.egg-info
+dist/
+build/
+docs/_build
+.tox
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..cc3f4ba
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,10 @@
+recursive-include docs *.txt *.css *.js
+include docs/_templates/*.html
+include docs/conf.py
+include docs/test_server.ini
+include regen-docs
+include tox.ini
+recursive-exclude docs/_build/_sources *
+recursive-include docs/_build *.html
+recursive-include tests *.txt *.py *.cgi *.html
+recursive-include paste *.js *.jpg *.png
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..3b2ab8f
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,109 @@
+Paste provides several pieces of "middleware" (or filters) that can be nested
+to build web applications. Each piece of middleware uses the WSGI (`PEP 333`_)
+interface, and should be compatible with other middleware based on those
+interfaces.
+
+.. _PEP 333: http://www.python.org/dev/peps/pep-0333.html
+
+* `Paste project at Bitbucket (source code, bug tracker)
+ <https://bitbucket.org/ianb/paste/>`_
+* `Paste on the Python Cheeseshop (PyPI)
+ <https://pypi.python.org/pypi/Paste>`_
+* `Paste documentation
+ <http://pythonpaste.org/>`_
+
+See also:
+
+* `PasteDeploy <http://pythonpaste.org/deploy/>`_
+* `PasteScript <http://pythonpaste.org/script/>`_
+* `WebTest <http://webtest.pythonpaste.org/>`_
+* `WebOb <http://docs.webob.org/>`_
+
+Includes these features...
+
+Testing
+-------
+
+* A fixture for testing WSGI applications conveniently and in-process,
+ in ``paste.fixture``
+
+* A fixture for testing command-line applications, also in
+ ``paste.fixture``
+
+* Check components for WSGI-compliance in ``paste.lint``
+
+Dispatching
+-----------
+
+* Chain and cascade WSGI applications (returning the first non-error
+ response) in ``paste.cascade``
+
+* Dispatch to several WSGI applications based on URL prefixes, in
+ ``paste.urlmap``
+
+* Allow applications to make subrequests and forward requests
+ internally, in ``paste.recursive``
+
+Web Application
+---------------
+
+* Run CGI programs as WSGI applications in ``paste.cgiapp``
+
+* Traverse files and load WSGI applications from ``.py`` files (or
+ static files), in ``paste.urlparser``
+
+* Serve static directories of files, also in ``paste.urlparser``; also
+ in that module serving from Egg resources using ``pkg_resources``.
+
+Tools
+-----
+
+* Catch HTTP-related exceptions (e.g., ``HTTPNotFound``) and turn them
+ into proper responses in ``paste.httpexceptions``
+
+* Several authentication techniques, including HTTP (Basic and
+ Digest), signed cookies, and CAS single-signon, in the
+ ``paste.auth`` package.
+
+* Create sessions in ``paste.session`` and ``paste.flup_session``
+
+* Gzip responses in ``paste.gzip``
+
+* A wide variety of routines for manipulating WSGI requests and
+ producing responses, in ``paste.request``, ``paste.response`` and
+ ``paste.wsgilib``
+
+Debugging Filters
+-----------------
+
+* Catch (optionally email) errors with extended tracebacks (using
+ Zope/ZPT conventions) in ``paste.exceptions``
+
+* Catch errors presenting a `cgitb
+ <http://docs.python.org/2/library/cgitb.html>`_-based
+ output, in ``paste.cgitb_catcher``.
+
+* Profile each request and append profiling information to the HTML,
+ in ``paste.debug.profile``
+
+* Capture ``print`` output and present it in the browser for
+ debugging, in ``paste.debug.prints``
+
+* Validate all HTML output from applications using the `WDG Validator
+ <http://www.htmlhelp.com/tools/validator/>`_, appending any errors
+ or warnings to the page, in ``paste.debug.wdg_validator``
+
+Other Tools
+-----------
+
+* A file monitor to allow restarting the server when files have been
+ updated (for automatic restarting when editing code) in
+ ``paste.reloader``
+
+* A class for generating and traversing URLs, and creating associated
+ HTML code, in ``paste.url``
+
+The official development repo is at https://bitbucket.org/ianb/paste.
+
+For the latest changes see the `news file
+<http://pythonpaste.org/news.html>`_.
diff --git a/docs/DeveloperGuidelines.txt b/docs/DeveloperGuidelines.txt
new file mode 100644
index 0000000..69f39ee
--- /dev/null
+++ b/docs/DeveloperGuidelines.txt
@@ -0,0 +1,110 @@
+++++++++++++++++++++++++++++
+Python Paste Developer Guide
+++++++++++++++++++++++++++++
+
+Hi. Welcome to Paste. I hope you enjoy your stay here.
+
+I hope to bring together multiple efforts here, for Paste to support
+multiple frameworks and directions, while presenting a fairly
+integrated frontend to users. How to do that? That's an open
+question, and this code is in some ways an exploration.
+
+There's some basic principles:
+
+* Keep stuff decoupled.
+
+* Must be testable. Of course tested is even better than testable.
+
+* Use WSGI standards for communication between decoupled libraries.
+
+* When possible, use HTTP semantics for communicating between
+ libraries (e.g., indicate cachability using the appropriate HTTP
+ headers).
+
+* When possible, use WSGI as a wrapper around web-neutral libraries.
+ For instance, the configuration is a simple library, but the WSGI
+ middleware that puts the configuration in place is really really
+ simple. If it could be used outside of a web context, then having
+ both a library and middleware form is good.
+
+* Entry into frameworks should be easy, but exit should also be easy.
+ Heterogeneous frameworks and applications are the ambition. But we
+ have to get some messiness into Paste before we can try to resolve
+ that messiness.
+
+* When all is said and done, users should be able to ignore much of
+ what we've done and focus on writing their applications, and Stuff
+ Just Works. Documentation is good; stuff that works without user
+ intervention is better.
+
+Developer Info
+==============
+
+Mostly, if there's a problem we can discuss it and work it out, no one
+is going to bite your head off for committing something.
+
+* Framework-like things should go in subpackages, or perhaps in
+ separate distributions entirely (Paste WebKit and Wareweb were
+ extracted for this reason).
+
+* Integrating external servers and frameworks is also interesting, but
+ it's best to introduce that as a requirement instead of including
+ the work here. Paste Script contains several wrappers for external
+ projects (servers in particular).
+
+* Tests are good. We use py.test_, because it is simple. I want to
+ use doctests too, but the infrastructure isn't really there now --
+ but please feel free to use those too. ``unittest`` is kind of
+ annoying, and py.test is both more powerful and easier to write for.
+ Tests should go in the ``tests/`` directory. ``paste.fixture``
+ contains some convenience functions for testing WSGI applications
+ and middleware. Pay particular attention to ``TestApp``.
+
+.. _py.test: http://codespeak.net/py/current/doc/test.html
+
+* If you move something around that someone may be using, keep their
+ imports working and introduce a warning, like::
+
+ def backward_compat_function(*args, **kw):
+ import warnings
+ # Deprecated on 2005 Mar 5
+ warnings.warn('Moved to foo.function', DeprecationWarning, 2)
+ return foo.function(*args, **kw)
+
+* If something is really experimental, put it in your home directory,
+ or make a branch in your home directory. You can make a home
+ directory for yourself, in ``http://svn.w4py.org/home/username``.
+
+* Not everything in the repository or even in the trunk will
+ necessarily go into the release. The release should contain stuff
+ that is tested, documented, and useful. Each module or feature also
+ needs a champion -- someone who will stand by the code, answer
+ questions, etc. It doesn't have to be the original developer, but
+ there has to be *someone*. So when a release is cut, if some
+ modules don't fulfill that they may be left out.
+
+* Try to keep to the `Style Guidelines`_. But if you are bringing in
+ outside work, don't stress out too much about it. Still, if you
+ have a choice, follow that. Those guidelines are meant to represent
+ conventional Python style guides, there's nothing out of the normal
+ there.
+
+.. _Style Guidelines: StyleGuide.html
+
+* Write your docstrings in `restructured text
+ <http://docutils.sourceforge.net/rst.html>`_. As time goes on, I
+ want to rely on docstrings more for documentation, with shorter
+ narrative documentation pointing into the documentation generated
+ from docstrings.
+
+ The generation is done with `Pudge <http://pudge.lesscode.org/>`_.
+ To try generating the documentation, this should work::
+
+ $ easy_install svn://lesscode.org/buildutils/trunk \
+ svn://lesscode.org/pudge/trunk
+ $ cd Paste
+ $ python setup.py pudge
+
+ This will install Pudge and `buildutils
+ <http://buildutils.lesscode.org/>`_, and then generate the
+ documentation into ``Paste/docs/html/``.
diff --git a/docs/StyleGuide.txt b/docs/StyleGuide.txt
new file mode 100644
index 0000000..b307282
--- /dev/null
+++ b/docs/StyleGuide.txt
@@ -0,0 +1,102 @@
++++++++++++++++++++
+Paste Style Guide
++++++++++++++++++++
+
+Generally you should follow the recommendations in `PEP 8`_, the
+Python Style Guide. Some things to take particular note of:
+
+.. _PEP 8: http://www.python.org/peps/pep-0008.html
+
+* **No tabs**. Not anywhere. Always indent with 4 spaces.
+
+* I don't stress too much on line length. But try to break lines up
+ by grouping with parenthesis instead of with backslashes (if you
+ can). Do asserts like::
+
+ assert some_condition(a, b), (
+ "Some condition failed, %r isn't right!" % a)
+
+* But if you are having problems with line length, maybe you should
+ just break the expression up into multiple statements.
+
+* Blank lines between methods, unless they are very small and closely
+ bound to each other.
+
+* Don't use the form ``condition and trueValue or falseValue``. Break
+ it out and use a variable.
+
+* I (Ian Bicking) am very picky about whitespace. There's one and
+ only one right way to do it. Good examples::
+
+ short = 3
+ longerVar = 4
+
+ if x == 4:
+ do stuff
+
+ func(arg1='a', arg2='b')
+ func((a + b)*10)
+
+ **Bad** examples::
+
+ short =3
+ longerVar=4
+
+ if x==4: do stuff
+
+ func(arg1 = 'a', arg2 = 'b')
+ func(a,b)
+ func( a, b )
+ [ 1, 2, 3 ]
+
+ If the whitespace isn't right, it'll annoy me and I'll feel a need
+ to fix it. Really, this whitespace stuff shouldn't be that
+ controversial should it? Some particular points that I feel
+ strongly about:
+
+ * No space after a function name (bad: ``func (arg)``).
+ * No space after or before a parenthesis (bad: ``func( arg )``).
+ * Always one space after a comma (bad: ``func(a,b)``).
+
+* Use ``@@`` to mark something that is suboptimal, or where you have a
+ concern that it's not right. Try to also date it and put your
+ username there.
+
+* Docstrings are good. They should look like::
+
+ class AClass(object):
+ """
+ doc string...
+ """
+
+ Don't use single quotes (''') -- they tend to cause problems in
+ Emacs. Don't bother trying make the string less vertically compact.
+
+* Comments go right before the thing they are commenting on.
+
+* Methods never, ever, ever start with capital letters. Generally
+ only classes are capitalized. But definitely never methods.
+
+* Use ``cls`` to refer to a class. Use ``meta`` to refer to a
+ metaclass (which also happens to be a class, but calling a metaclass
+ ``cls`` will be confusing).
+
+* Use ``isinstance`` instead of comparing types. E.g.::
+
+ if isinstance(var, str): ...
+ # Bad:
+ if type(var) is StringType: ...
+
+* Never, ever use two leading underscores. This is annoyingly
+ private. If name clashes are a concern, use explicit name mangling
+ instead (e.g., ``_MyThing_blahblah``). This is essentially the same
+ thing as double-underscore, only it's transparent where double
+ underscore obscures.
+
+* Module names should be unique in the package. Subpackages shouldn't
+ share module names with sibling or parent packages. Sadly this
+ isn't possible for ``__init__.py``, but it's otherwise easy enough.
+
+* Module names should be all lower case, and probably have no
+ underscores (smushedwords).
+
diff --git a/docs/_static/paste.css b/docs/_static/paste.css
new file mode 100644
index 0000000..6705e5d
--- /dev/null
+++ b/docs/_static/paste.css
@@ -0,0 +1,15 @@
+a.invisible-link {
+ color: #fff;
+ text-decoration: none;
+}
+
+a.invisible-link:visited {
+ color: #fff;
+ text-decoration: none;
+}
+
+a.invisible:link {
+ color: #fff;
+ text-decoration: none;
+}
+
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
new file mode 100644
index 0000000..6ae2d42
--- /dev/null
+++ b/docs/_templates/layout.html
@@ -0,0 +1,29 @@
+{% extends "!layout.html" %}
+
+{% block extrahead %}
+{{ super() }}
+<link rel="stylesheet" type="text/css"
+ href="{{ pathto('_static/paste.css') }}">
+{% endblock %}
+
+{% block sidebartoc %}
+<h3><a href="http://pythonpaste.org/" class="invisible-link">Python Paste</a></h3>
+
+<ul>
+<li><a href="https://bitbucket.org/ianb/pastescript/issues/">Issue tracker</a></li>
+<li><a href="http://pythonpaste.org/">Paste core</a></li>
+<li><a href="http://webob.org/">WebOb</a></li>
+<li><a href="http://pythonpaste.org/deploy/">Paste Deploy</a></li>
+<li><a href="http://pythonpaste.org/script/">Paste Script</a></li>
+<li><a href="http://webtest.pythonpaste.org/">WebTest</a></li>
+<li><a href="http://pythonpaste.org/scripttest/">ScriptType</a></li>
+<li><a href="http://pythonpaste.org/initools/">INITools</a></li>
+<li><a href="http://pythonpaste.org/tempita/">Tempita</a></li>
+<li><a href="http://pythonpaste.org/waitforit/">WaitForIt</a></li>
+<li><a href="http://pythonpaste.org/wphp/">WPHP</a></li>
+<li><a href="http://pythonpaste.org/wsgifilter/">WSGIFilter</a></li>
+<li><a href="http://pythonpaste.org/wsgiproxy/">WSGIProxy</a></li>
+</ul>
+
+{{ super() }}
+{% endblock %}
diff --git a/docs/community/index.txt b/docs/community/index.txt
new file mode 100644
index 0000000..5b30110
--- /dev/null
+++ b/docs/community/index.txt
@@ -0,0 +1,15 @@
+Community
+=========
+
+Much of the communication goes on in the `mailing lists
+<mailing-list.html>`_; see that page for information on the lists.
+
+For live IRC discussion, try the ``#pythonpaste`` channel on `Freenode
+<http://freenode.net/>`_.
+
+If you find bugs in the code or documentation, please `submit a ticket
+<http://pythonpaste.org/trac/report>`_. You can also `view tickets
+<http://pythonpaste.org/trac/report>`_.
+
+
+
diff --git a/docs/community/mailing-list.txt b/docs/community/mailing-list.txt
new file mode 100644
index 0000000..854ec3e
--- /dev/null
+++ b/docs/community/mailing-list.txt
@@ -0,0 +1,14 @@
+Mailing Lists
+=============
+
+General discussion and questions should go to:
+
+`paste-users@googlegroups.org <http://groups.google.com/group/paste-users>`_:
+ New posts are `on Google Groups <http://groups.google.com/group/paste-users/topics>`_ `old posts are in their own archive <http://pythonpaste.org/archives/list/paste-users.en.html>`_
+
+More abstract discussion of Python web programming should go to:
+
+`web-sig@python.org <http://mail.python.org/mailman/listinfo/web-sig>`_:
+ `Subscribe <http://mail.python.org/mailman/listinfo/web-sig>`__,
+ `Archives <http://www.python.org/pipermail/web-sig/>`__
+
diff --git a/docs/community/repository.txt b/docs/community/repository.txt
new file mode 100644
index 0000000..b8f3700
--- /dev/null
+++ b/docs/community/repository.txt
@@ -0,0 +1,10 @@
+Repository
+==========
+
+Paste is kept in a Mercurial (hg) repository at
+http://bitbucket.org/ianb/paste
+
+If you are using a command-line Mercurial client, you can check
+it out like::
+
+ hg clone http://bitbucket.org/ianb/paste
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..e035d50
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+#
+# Paste documentation build configuration file, created by
+# sphinx-quickstart on Tue Apr 22 22:08:49 2008.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# The contents of this file are pickled, so don't put values in the namespace
+# that aren't pickleable (module imports are okay, they're removed automatically).
+#
+# All configuration values have a default value; values that are commented out
+# serve to show the default value.
+
+import sys
+
+# If your extensions are in another directory, add it here.
+#sys.path.append('some/directory')
+
+# General configuration
+# ---------------------
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc']
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.txt'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General substitutions.
+project = 'Paste'
+copyright = '2008, Ian Bicking'
+
+# The default replacements for |version| and |release|, also used in various
+# other places throughout the built documents.
+#
+# The short X.Y version.
+version = '1.7'
+# The full version, including alpha/beta/rc tags.
+release = '1.7.5.1'
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+today_fmt = '%B %d, %Y'
+
+# List of documents that shouldn't be included in the build.
+unused_docs = ['include/contact.txt', 'include/reference_header.txt']
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+
+# Options for HTML output
+# -----------------------
+
+# The style sheet to use for HTML and HTML Help pages. A file of that name
+# must exist either in Sphinx' static/ path, or in one of the custom paths
+# given in html_static_path.
+html_style = 'default.css'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Content template for the index page.
+#html_index = ''
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_use_modindex = True
+
+# If true, the reST sources are included in the HTML build as _sources/<name>.
+#html_copy_source = True
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Pastedoc'
+
+
+# Options for LaTeX output
+# ------------------------
+
+# The paper size ('letter' or 'a4').
+#latex_paper_size = 'letter'
+
+# The font size ('10pt', '11pt' or '12pt').
+#latex_font_size = '10pt'
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, document class [howto/manual]).
+#latex_documents = []
+
+# Additional stuff for the LaTeX preamble.
+#latex_preamble = ''
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_use_modindex = True
diff --git a/docs/default.css b/docs/default.css
new file mode 100644
index 0000000..a570fb6
--- /dev/null
+++ b/docs/default.css
@@ -0,0 +1,394 @@
+/*
+:Author: David Goodger, Ian Bicking
+:Contact: ianb@colorstudy.com
+:date: $Date: 2003/11/01 20:35:45 $
+:version: $Revision: 1.3 $
+:copyright: This stylesheet has been placed in the public domain.
+
+A modification of the default cascading style sheet (v.1.3) for the
+HTML output of Docutils.
+*/
+
+body {
+ font-family: Arial, sans-serif;
+ background-color: #fff;
+}
+
+em, i {
+ /* Typically serif fonts have much nicer italics */
+ font-family: Times New Roman, Times, serif;
+}
+
+li {
+ list-style-type: circle;
+}
+
+a.target {
+ color: blue;
+}
+
+a.toc-backref {
+ text-decoration: none;
+ color: black;
+}
+
+a.toc-backref:hover {
+ background-color: inherit;
+}
+
+a:hover {
+ background-color: #ccc;
+}
+
+h1 a:hover, h2 a:hover, h3 a:hover, h4 a:hover, h5 a:hover, h6:hover {
+ background-color: inherit;
+}
+
+cite {
+ font-style: normal;
+ font-family: monospace;
+ font-weight: bold;
+}
+
+dd {
+ margin-bottom: 0.5em;
+}
+
+div.abstract {
+ margin: 2em 5em;
+}
+
+div.abstract p.topic-title {
+ font-weight: bold;
+ text-align: center;
+}
+
+div.attention, div.caution, div.danger, div.error, div.hint,
+div.important, div.note, div.tip, div.warning {
+ background-color: #ccc;
+ width: 40%;
+ border: medium outset;
+ padding: 3px;
+ float: right
+}
+
+div.attention p.admonition-title, div.caution p.admonition-title,
+div.danger p.admonition-title, div.error p.admonition-title,
+div.warning p.admonition-title {
+ color: #c00;
+ font-weight: bold;
+ font-family: sans-serif;
+ text-align: center;
+ background-color: #999;
+ display: block;
+ margin: 0;
+}
+
+div.hint p.admonition-title, div.important p.admonition-title,
+div.note p.admonition-title, div.tip p.admonition-title {
+ font-weight: bold;
+ font-family: sans-serif;
+ text-align: center;
+ background-color: #999;
+ display: block;
+ margin: 0;
+}
+
+div.dedication {
+ margin: 2em 5em;
+ text-align: center;
+ font-style: italic;
+}
+
+div.dedication p.topic-title {
+ font-weight: bold;
+ font-style: normal;
+}
+
+div.figure {
+ margin-left: 2em;
+}
+
+div.footer, div.header {
+ font-size: smaller;
+}
+
+div.system-messages {
+ margin: 5em;
+}
+
+div.system-messages h1 {
+ color: red;
+}
+
+div.system-message {
+ border: medium outset;
+ padding: 1em;
+}
+
+div.system-message p.system-message-title {
+ color: red;
+ font-weight: bold;
+}
+
+div.topic {
+ margin: 2em;
+}
+
+h1, h2, h3, h4, h5, h6 {
+ font-family: Helvetica, Arial, sans-serif;
+ border: thin solid black;
+ /* This makes the borders rounded on Mozilla, which pleases me */
+ -moz-border-radius: 8px;
+ padding: 4px;
+}
+
+h1 {
+ background-color: #449;
+ color: #fff;
+ border: medium solid black;
+}
+
+h1 a.toc-backref, h2 a.toc-backref {
+ color: #fff;
+}
+
+h2 {
+ background-color: #666;
+ color: #fff;
+ border: medium solid black;
+}
+
+h3, h4, h5, h6 {
+ background-color: #ccc;
+ color: #000;
+}
+
+h3 a.toc-backref, h4 a.toc-backref, h5 a.toc-backref,
+h6 a.toc-backref {
+ color: #000;
+}
+
+h1.title {
+ text-align: center;
+ background-color: #449;
+ color: #fff;
+ border: thick solid black;
+ -moz-border-radius: 20px;
+}
+
+h2.subtitle {
+ text-align: center;
+}
+
+hr {
+ width: 75%;
+}
+
+ol.simple, ul.simple {
+ margin-bottom: 1em;
+}
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+p.caption {
+ font-style: italic;
+}
+
+p.credits {
+ font-style: italic;
+ font-size: smaller;
+}
+
+p.first {
+ margin-top: 0;
+}
+
+p.label {
+ white-space: nowrap;
+}
+
+p.topic-title {
+ font-weight: bold;
+}
+
+pre.address {
+ margin-bottom: 0;
+ margin-top: 0;
+ font-family: serif;
+ font-size: 100%;
+}
+
+pre.line-block {
+ font-family: serif;
+ font-size: 100%;
+}
+
+pre.literal-block, pre.doctest-block {
+ margin-left: 2em;
+ margin-right: 2em;
+ background-color: #eee;
+ border: thin black solid;
+ padding: 5px;
+}
+
+span.classifier {
+ font-family: sans-serif;
+ font-style: oblique;
+}
+
+span.classifier-delimiter {
+ font-family: sans-serif;
+ font-weight: bold;
+}
+
+span.interpreted {
+ font-family: sans-serif;
+}
+
+span.option-argument {
+ font-style: italic;
+}
+
+span.pre {
+ white-space: pre;
+}
+
+span.problematic {
+ color: red;
+}
+
+table {
+ margin-top: 0.5em;
+ margin-bottom: 0.5em;
+}
+
+table.citation {
+ border-left: solid thin gray;
+ padding-left: 0.5ex
+}
+
+table.docinfo {
+ margin: 2em 4em;
+}
+
+table.footnote {
+ border-left: solid thin black;
+ padding-left: 0.5ex;
+}
+
+td, th {
+ padding-left: 0.5em;
+ padding-right: 0.5em;
+ vertical-align: top;
+}
+
+td > p:first-child, th > p:first-child {
+ margin-top: 0em;
+}
+
+th.docinfo-name, th.field-name {
+ font-weight: bold;
+ text-align: left;
+ white-space: nowrap;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ font-size: 100%;
+}
+
+code, tt {
+ color: #006;
+}
+
+ul.auto-toc {
+ list-style-type: none;
+}
+
+/*****************************************
+ * Doctest embedded examples
+ *****************************************/
+
+span.doctest-url {
+ background-color: #eee;
+ border-top: 2px outset #666;
+ border-left: 2px outset #666;
+ border-right: 2px outset #666;
+ padding: 0.25em;
+}
+
+div.doctest-example {
+ border: outset 5px #666;
+ background-color: #eee;
+ font-family: default;
+ padding: 0.5em;
+}
+
+div.doctest-example h1 {
+ background-color: inherit;
+ border: none;
+ color: inherit;
+ font-family: default;
+}
+
+div.doctest-example tt {
+ color: inherit;
+}
+
+div.doctest-status {
+ background-color: #060;
+ color: #fff;
+}
+
+span.doctest-header {
+ background-color: #ccc;
+ font-family: monospace;
+}
+
+pre.doctest-errors {
+ border: none;
+ background-color: #333;
+ color: #600;
+}
+
+div.source-code {
+ background-color: #000;
+ border: inset #999 3px;
+ overflow: auto;
+}
+
+pre.source-code {
+ background-color: #000;
+ border: inset #999 3px;
+ overflow: auto;
+ font-family: monospace;
+ color: #fff;
+}
+
+span.source-filename {
+ background-color: #000;
+ border-top: 2px outset #999;
+ border-left: 2px outset #999;
+ border-right: 2px outset #999;
+ padding: 0.25em;
+ color: #fff
+}
+
diff --git a/docs/developer-features.txt b/docs/developer-features.txt
new file mode 100644
index 0000000..503d419
--- /dev/null
+++ b/docs/developer-features.txt
@@ -0,0 +1,148 @@
+Features
+========
+
+Testing
+-------
+
+* A fixture for testing WSGI applications conveniently and in-process,
+ in :class:`paste.fixture.TestApp`
+
+* A fixture for testing command-line applications, also in
+ :class:`paste.fixture.TestFileEnvironment`
+
+* Check components for WSGI-compliance in :mod:`paste.lint`
+
+* Check filesystem changes, with :mod:`paste.debug.fsdiff`
+
+Server
+------
+
+* A threaded HTTP server in :mod:`paste.httpserver`
+
+* A tool for seeing and killing errant threads in the HTTP server, in
+ :mod:`paste.debug.watchthreads`
+
+Dispatching
+-----------
+
+* Chain and cascade WSGI applications (returning the first non-error
+ response) in :mod:`paste.cascade`
+
+* Dispatch to several WSGI applications based on URL prefixes, in
+ :mod:`paste.urlmap`
+
+* Allow applications to make subrequests and forward requests
+ internally, in :mod:`paste.recursive`
+
+* Redirect error pages (e.g., 404 Not Found) to custom error pages, in
+ :mod:`paste.errordocument`.
+
+Web Application
+---------------
+
+* Easily deal with incoming requests and sending a response in
+ :mod:`paste.wsgiwrappers`
+
+* Work directly with the WSGI environment in :mod:`paste.request`
+
+* Run CGI programs as WSGI applications in :mod:`paste.cgiapp`
+
+* Traverse files and load WSGI applications from ``.py`` files (or
+ static files), in :mod:`paste.urlparser`
+
+* Serve static directories of files, also in :mod:`paste.urlparser`; also
+ serve using the Setuptools ``pkg_resources`` resource API.
+
+* Proxy to other servers, treating external HTTP servers as WSGI
+ applications, in :mod:`paste.proxy`.
+
+* Serve files (with support for ``If-Modified-Since``, etc) in
+ :mod:`paste.fileapp`
+
+Tools
+-----
+
+* Catch HTTP-related exceptions (e.g., ``HTTPNotFound``) and turn them
+ into proper responses in :mod:`paste.httpexceptions`
+
+* Manage HTTP header fields with :mod:`paste.httpheaders`
+
+* Handle authentication/identification of requests in :mod:`paste.auth`
+
+* Create sessions in :mod:`paste.session` and :mod:`paste.flup_session`
+
+* Gzip responses in :mod:`paste.gzipper`
+
+* A wide variety of routines for manipulating WSGI requests and
+ producing responses, in :mod:`paste.request`, :mod:`paste.response` and
+ :mod:`paste.wsgilib`.
+
+* Create Apache-style logs in :mod:`paste.translogger`
+
+* Handy request and response wrappers in :mod:`paste.wsgiwrappers`
+
+* Handling of request-local module globals sanely in :mod:`paste.registry`
+
+Authentication
+--------------
+
+* Authentication using cookies in :mod:`paste.auth.cookie` and
+ :mod:`paste.auth.auth_tkt`; login form in :mod:`paste.auth.form`
+
+* Authentication using `OpenID <http://openid.net/>`_ in
+ :mod:`paste.auth.open_id`, using `CAS
+ <http://www.ja-sig.org/products/cas/>`_ in :mod:`paste.auth.cas`
+
+* HTTP authentication in :mod:`paste.auth.basic` and
+ :mod:`paste.auth.digest`
+
+* Dispatch to different authentication methods based on User-Agent, in
+ :mod:`paste.auth.multi`
+
+* Grant roles based on IP addresses, in :mod:`paste.auth.grantip`
+
+Debugging Filters
+-----------------
+
+* Catch (optionally email) errors with extended tracebacks (using
+ Zope/ZPT conventions) in :mod:`paste.exceptions`
+
+* During debugging, show tracebacks with information about each stack
+ frame, including an interactive prompt that runs in the individual
+ stack frames, in :mod:`paste.evalexception`.
+
+* Catch errors presenting a `cgitb
+ <http://python.org/doc/current/lib/module-cgitb.html>`_-based
+ output, in :mod:`paste.cgitb_catcher`.
+
+* Profile each request and append profiling information to the HTML,
+ in :mod:`paste.debug.profile`
+
+* Capture ``print`` output and present it in the browser for
+ debugging, in :mod:`paste.debug.prints`
+
+* Validate all HTML output from applications using the `WDG Validator
+ <http://www.htmlhelp.com/tools/validator/>`_, appending any errors
+ or warnings to the page, in :mod:`paste.debug.wdg_validator`
+
+Other Tools
+-----------
+
+* A file monitor to allow restarting the server when files have been
+ updated (for automatic restarting when editing code) in
+ :mod:`paste.reloader`
+
+* A class for generating and traversing URLs, and creating associated
+ HTML code, in :mod:`paste.url`
+
+* A small templating language (for internal use) in
+ :mod:`paste.util.template`
+
+* A class to help with loops in templates, in :mod:`paste.util.looper`
+
+* Import modules and objects given a string, in
+ :mod:`paste.util.import_string`
+
+* Ordered dictionary that can have multiple values with the same key,
+ in :mod:`paste.util.multidict`
+
diff --git a/docs/do-it-yourself-framework.txt b/docs/do-it-yourself-framework.txt
new file mode 100644
index 0000000..ae77ec0
--- /dev/null
+++ b/docs/do-it-yourself-framework.txt
@@ -0,0 +1,538 @@
+A Do-It-Yourself Framework
+++++++++++++++++++++++++++
+
+:author: Ian Bicking <ianb@colorstudy.com>
+:revision: $Rev$
+:date: $LastChangedDate$
+
+This tutorial has been translated `into Portuguese
+<http://montegasppa.blogspot.com/2007/06/um-framework-faa-voc-mesmo.html>`_.
+
+A newer version of this article is available `using WebOb
+<http://pythonpaste.org/webob/do-it-yourself.html>`_.
+
+.. contents::
+
+.. comments:
+
+ Explain SCRIPT_NAME/PATH_INFO better
+
+Introduction and Audience
+=========================
+
+This short tutorial is meant to teach you a little about WSGI, and as
+an example a bit about the architecture that Paste has enabled and
+encourages.
+
+This isn't an introduction to all the parts of Paste -- in fact, we'll
+only use a few, and explain each part. This isn't to encourage
+everyone to go off and make their own framework (though honestly I
+wouldn't mind). The goal is that when you have finished reading this
+you feel more comfortable with some of the frameworks built using this
+architecture, and a little more secure that you will understand the
+internals if you look under the hood.
+
+What is WSGI?
+=============
+
+At its simplest WSGI is an interface between web servers and web
+applications. We'll explain the mechanics of WSGI below, but a higher
+level view is to say that WSGI lets code pass around web requests in a
+fairly formal way. But there's more! WSGI is more than just HTTP.
+It might seem like it is just *barely* more than HTTP, but that little
+bit is important:
+
+* You pass around a CGI-like environment, which means data like
+ ``REMOTE_USER`` (the logged-in username) can be securely passed
+ about.
+
+* A CGI-like environment can be passed around with more context --
+ specifically instead of just one path you two: ``SCRIPT_NAME`` (how
+ we got here) and ``PATH_INFO`` (what we have left).
+
+* You can -- and often should -- put your own extensions into the WSGI
+ environment. This allows for callbacks, extra information,
+ arbitrary Python objects, or whatever you want. These are things
+ you can't put in custom HTTP headers.
+
+This means that WSGI can be used not just between a web server an an
+application, but can be used at all levels for communication. This
+allows web applications to become more like libraries -- well
+encapsulated and reusable, but still with rich reusable functionality.
+
+Writing a WSGI Application
+==========================
+
+The first part is about how to use `WSGI
+<http://www.python.org/peps/pep-0333.html>`_ at its most basic. You
+can read the spec, but I'll do a very brief summary:
+
+* You will be writing a *WSGI application*. That's an object that
+ responds to requests. An application is just a callable object
+ (like a function) that takes two arguments: ``environ`` and
+ ``start_response``.
+
+* The environment looks a lot like a CGI environment, with keys like
+ ``REQUEST_METHOD``, ``HTTP_HOST``, etc.
+
+* The environment also has some special keys like ``wsgi.input`` (the
+ input stream, like the body of a POST request).
+
+* ``start_response`` is a function that starts the response -- you
+ give the status and headers here.
+
+* Lastly the application returns an iterator with the body response
+ (commonly this is just a list of strings, or just a list containing
+ one string that is the entire body.)
+
+So, here's a simple application::
+
+ def app(environ, start_response):
+ start_response('200 OK', [('content-type', 'text/html')])
+ return ['Hello world!']
+
+Well... that's unsatisfying. Sure, you can imagine what it does, but
+you can't exactly point your web browser at it.
+
+There's other cleaner ways to do this, but this tutorial isn't about
+*clean* it's about *easy-to-understand*. So just add this to the
+bottom of your file::
+
+ if __name__ == '__main__':
+ from paste import httpserver
+ httpserver.serve(app, host='127.0.0.1', port='8080')
+
+Now visit http://localhost:8080 and you should see your new app.
+If you want to understand how a WSGI server works, I'd recommend
+looking at the `CGI WSGI server
+<http://www.python.org/peps/pep-0333.html#the-server-gateway-side>`_
+in the WSGI spec.
+
+An Interactive App
+------------------
+
+That last app wasn't very interesting. Let's at least make it
+interactive. To do that we'll give a form, and then parse the form
+fields::
+
+ from paste.request import parse_formvars
+
+ def app(environ, start_response):
+ fields = parse_formvars(environ)
+ if environ['REQUEST_METHOD'] == 'POST':
+ start_response('200 OK', [('content-type', 'text/html')])
+ return ['Hello, ', fields['name'], '!']
+ else:
+ start_response('200 OK', [('content-type', 'text/html')])
+ return ['<form method="POST">Name: <input type="text" '
+ 'name="name"><input type="submit"></form>']
+
+The ``parse_formvars`` function just takes the WSGI environment and
+calls the `cgi <http://python.org/doc/current/lib/module-cgi.html>`_
+module (the ``FieldStorage`` class) and turns that into a MultiDict.
+
+Now For a Framework
+===================
+
+Now, this probably feels a bit crude. After all, we're testing for
+things like REQUEST_METHOD to handle more than one thing, and it's
+unclear how you can have more than one page.
+
+We want to build a framework, which is just a kind of generic
+application. In this tutorial we'll implement an *object publisher*,
+which is something you may have seen in Zope, Quixote, or CherryPy.
+
+Object Publishing
+-----------------
+
+In a typical Python object publisher you translate ``/`` to ``.``. So
+``/articles/view?id=5`` turns into ``root.articles.view(id=5)``. We
+have to start with some root object, of course, which we'll pass in...
+
+::
+
+ class ObjectPublisher(object):
+
+ def __init__(self, root):
+ self.root = root
+
+ def __call__(self, environ, start_response):
+ ...
+
+ app = ObjectPublisher(my_root_object)
+
+We override ``__call__`` to make instances of ``ObjectPublisher``
+callable objects, just like a function, and just like WSGI
+applications. Now all we have to do is translate that ``environ``
+into the thing we are publishing, then call that thing, then turn the
+response into what WSGI wants.
+
+The Path
+--------
+
+WSGI puts the requested path into two variables: ``SCRIPT_NAME`` and
+``PATH_INFO``. ``SCRIPT_NAME`` is everything that was used up
+*getting here*. ``PATH_INFO`` is everything left over -- it's
+the part the framework should be using to find the object. If you put
+the two back together, you get the full path used to get to where we
+are right now; this is very useful for generating correct URLs, and
+we'll make sure we preserve this.
+
+So here's how we might implement ``__call__``::
+
+ def __call__(self, environ, start_response):
+ fields = parse_formvars(environ)
+ obj = self.find_object(self.root, environ)
+ response_body = obj(**fields.mixed())
+ start_response('200 OK', [('content-type', 'text/html')])
+ return [response_body]
+
+ def find_object(self, obj, environ):
+ path_info = environ.get('PATH_INFO', '')
+ if not path_info or path_info == '/':
+ # We've arrived!
+ return obj
+ # PATH_INFO always starts with a /, so we'll get rid of it:
+ path_info = path_info.lstrip('/')
+ # Then split the path into the "next" chunk, and everything
+ # after it ("rest"):
+ parts = path_info.split('/', 1)
+ next = parts[0]
+ if len(parts) == 1:
+ rest = ''
+ else:
+ rest = '/' + parts[1]
+ # Hide private methods/attributes:
+ assert not next.startswith('_')
+ # Now we get the attribute; getattr(a, 'b') is equivalent
+ # to a.b...
+ next_obj = getattr(obj, next)
+ # Now fix up SCRIPT_NAME and PATH_INFO...
+ environ['SCRIPT_NAME'] += '/' + next
+ environ['PATH_INFO'] = rest
+ # and now parse the remaining part of the URL...
+ return self.find_object(next_obj, environ)
+
+And that's it, we've got a framework.
+
+Taking It For a Ride
+--------------------
+
+Now, let's write a little application. Put that ``ObjectPublisher``
+class into a module ``objectpub``::
+
+ from objectpub import ObjectPublisher
+
+ class Root(object):
+
+ # The "index" method:
+ def __call__(self):
+ return '''
+ <form action="welcome">
+ Name: <input type="text" name="name">
+ <input type="submit">
+ </form>
+ '''
+
+ def welcome(self, name):
+ return 'Hello %s!' % name
+
+ app = ObjectPublisher(Root())
+
+ if __name__ == '__main__':
+ from paste import httpserver
+ httpserver.serve(app, host='127.0.0.1', port='8080')
+
+Alright, done! Oh, wait. There's still some big missing features,
+like how do you set headers? And instead of giving ``404 Not Found``
+responses in some places, you'll just get an attribute error. We'll
+fix those up in a later installment...
+
+Give Me More!
+-------------
+
+You'll notice some things are missing here. Most specifically,
+there's no way to set the output headers, and the information on the
+request is a little slim.
+
+::
+
+ # This is just a dictionary-like object that has case-
+ # insensitive keys:
+ from paste.response import HeaderDict
+
+ class Request(object):
+ def __init__(self, environ):
+ self.environ = environ
+ self.fields = parse_formvars(environ)
+
+ class Response(object):
+ def __init__(self):
+ self.headers = HeaderDict(
+ {'content-type': 'text/html'})
+
+Now I'll teach you a little trick. We don't want to change the
+signature of the methods. But we can't put the request and response
+objects in normal global variables, because we want to be
+thread-friendly, and all threads see the same global variables (even
+if they are processing different requests).
+
+But Python 2.4 introduced a concept of "thread-local values". That's
+a value that just this one thread can see. This is in the
+`threading.local <http://docs.python.org/lib/module-threading.html>`_
+object. When you create an instance of ``local`` any attributes you
+set on that object can only be seen by the thread you set them in. So
+we'll attach the request and response objects here.
+
+So, let's remind ourselves of what the ``__call__`` function looked
+like::
+
+ class ObjectPublisher(object):
+ ...
+
+ def __call__(self, environ, start_response):
+ fields = parse_formvars(environ)
+ obj = self.find_object(self.root, environ)
+ response_body = obj(**fields.mixed())
+ start_response('200 OK', [('content-type', 'text/html')])
+ return [response_body]
+
+Lets's update that::
+
+ import threading
+ webinfo = threading.local()
+
+ class ObjectPublisher(object):
+ ...
+
+ def __call__(self, environ, start_response):
+ webinfo.request = Request(environ)
+ webinfo.response = Response()
+ obj = self.find_object(self.root, environ)
+ response_body = obj(**dict(webinfo.request.fields))
+ start_response('200 OK', webinfo.response.headers.items())
+ return [response_body]
+
+Now in our method we might do::
+
+ class Root:
+ def rss(self):
+ webinfo.response.headers['content-type'] = 'text/xml'
+ ...
+
+If we were being fancier we would do things like handle `cookies
+<http://python.org/doc/current/lib/module-Cookie.html>`_ in these
+objects. But we aren't going to do that now. You have a framework,
+be happy!
+
+WSGI Middleware
+===============
+
+`Middleware
+<http://www.python.org/peps/pep-0333.html#middleware-components-that-play-both-sides>`_
+is where people get a little intimidated by WSGI and Paste.
+
+What is middleware? Middleware is software that serves as an
+intermediary.
+
+
+So lets
+write one. We'll write an authentication middleware, so that you can
+keep your greeting from being seen by just anyone.
+
+Let's use HTTP authentication, which also can mystify people a bit.
+HTTP authentication is fairly simple:
+
+* When authentication is requires, we give a ``401 Authentication
+ Required`` status with a ``WWW-Authenticate: Basic realm="This
+ Realm"`` header
+
+* The client then sends back a header ``Authorization: Basic
+ encoded_info``
+
+* The "encoded_info" is a base-64 encoded version of
+ ``username:password``
+
+So how does this work? Well, we're writing "middleware", which means
+we'll typically pass the request on to another application. We could
+change the request, or change the response, but in this case sometimes
+we *won't* pass the request on (like, when we need to give that 401
+response).
+
+To give an example of a really really simple middleware, here's one
+that capitalizes the response::
+
+ class Capitalizer(object):
+
+ # We generally pass in the application to be wrapped to
+ # the middleware constructor:
+ def __init__(self, wrap_app):
+ self.wrap_app = wrap_app
+
+ def __call__(self, environ, start_response):
+ # We call the application we are wrapping with the
+ # same arguments we get...
+ response_iter = self.wrap_app(environ, start_response)
+ # then change the response...
+ response_string = ''.join(response_iter)
+ return [response_string.upper()]
+
+Techically this isn't quite right, because there there's two ways to
+return the response body, but we're skimming bits.
+`paste.wsgilib.intercept_output
+<http://pythonpaste.org/module-paste.wsgilib.html#intercept_output>`_
+is a somewhat more thorough implementation of this.
+
+.. note::
+
+ This, like a lot of parts of this (now fairly old) tutorial is
+ better, more thorough, and easier using `WebOb
+ <http://pythonpaste.org/webob/>`_. This particular example looks
+ like::
+
+ from webob import Request
+
+ class Capitalizer(object):
+ def __init__(self, app):
+ self.app = app
+ def __call__(self, environ, start_response):
+ req = Request(environ)
+ resp = req.get_response(self.app)
+ resp.body = resp.body.upper()
+ return resp(environ, start_response)
+
+So here's some code that does something useful, authentication::
+
+ class AuthMiddleware(object):
+
+ def __init__(self, wrap_app):
+ self.wrap_app = wrap_app
+
+ def __call__(self, environ, start_response):
+ if not self.authorized(environ.get('HTTP_AUTHORIZATION')):
+ # Essentially self.auth_required is a WSGI application
+ # that only knows how to respond with 401...
+ return self.auth_required(environ, start_response)
+ # But if everything is okay, then pass everything through
+ # to the application we are wrapping...
+ return self.wrap_app(environ, start_response)
+
+ def authorized(self, auth_header):
+ if not auth_header:
+ # If they didn't give a header, they better login...
+ return False
+ # .split(None, 1) means split in two parts on whitespace:
+ auth_type, encoded_info = auth_header.split(None, 1)
+ assert auth_type.lower() == 'basic'
+ unencoded_info = encoded_info.decode('base64')
+ username, password = unencoded_info.split(':', 1)
+ return self.check_password(username, password)
+
+ def check_password(self, username, password):
+ # Not very high security authentication...
+ return username == password
+
+ def auth_required(self, environ, start_response):
+ start_response('401 Authentication Required',
+ [('Content-type', 'text/html'),
+ ('WWW-Authenticate', 'Basic realm="this realm"')])
+ return ["""
+ <html>
+ <head><title>Authentication Required</title></head>
+ <body>
+ <h1>Authentication Required</h1>
+ If you can't get in, then stay out.
+ </body>
+ </html>"""]
+
+.. note::
+
+ Again, here's the same thing with WebOb::
+
+ from webob import Request, Response
+
+ class AuthMiddleware(object):
+ def __init__(self, app):
+ self.app = app
+ def __call__(self, environ, start_response):
+ req = Request(environ)
+ if not self.authorized(req.headers['authorization']):
+ resp = self.auth_required(req)
+ else:
+ resp = self.app
+ return resp(environ, start_response)
+ def authorized(self, header):
+ if not header:
+ return False
+ auth_type, encoded = header.split(None, 1)
+ if not auth_type.lower() == 'basic':
+ return False
+ username, password = encoded.decode('base64').split(':', 1)
+ return self.check_password(username, password)
+ def check_password(self, username, password):
+ return username == password
+ def auth_required(self, req):
+ return Response(status=401, headers={'WWW-Authenticate': 'Basic realm="this realm"'},
+ body="""\
+ <html>
+ <head><title>Authentication Required</title></head>
+ <body>
+ <h1>Authentication Required</h1>
+ If you can't get in, then stay out.
+ </body>
+ </html>""")
+
+So, how do we use this?
+
+::
+
+ app = ObjectPublisher(Root())
+ wrapped_app = AuthMiddleware(app)
+
+ if __name__ == '__main__':
+ from paste import httpserver
+ httpserver.serve(wrapped_app, host='127.0.0.1', port='8080')
+
+Now you have middleware! Hurrah!
+
+Give Me More Middleware!
+------------------------
+
+It's even easier to use other people's middleware than to make your
+own, because then you don't have to program. If you've been following
+along, you've probably encountered a few exceptions, and have to look
+at the console to see the exception reports. Let's make that a little
+easier, and show the exceptions in the browser...
+
+::
+
+ app = ObjectPublisher(Root())
+ wrapped_app = AuthMiddleware(app)
+ from paste.exceptions.errormiddleware import ErrorMiddleware
+ exc_wrapped_app = ErrorMiddleware(wrapped_app)
+
+Easy! But let's make it *more* fancy...
+
+::
+
+ app = ObjectPublisher(Root())
+ wrapped_app = AuthMiddleware(app)
+ from paste.evalexception import EvalException
+ exc_wrapped_app = EvalException(wrapped_app)
+
+So go make an error now. And hit the little +'s. And type stuff in
+to the boxes.
+
+Conclusion
+==========
+
+Now that you've created your framework and application (I'm sure it's
+much nicer than the one I've given so far). You might keep writing it
+(many people have so far), but even if you don't you should be able to
+recognize these components in other frameworks now, and you'll have a
+better understanding how they probably work under the covers.
+
+Also check out the version of this tutorial written `using WebOb
+<http://pythonpaste.org/webob/do-it-yourself.html>`_. That tutorial
+includes things like **testing** and **pattern-matching dispatch**
+(instead of object publishing).
diff --git a/docs/download/index.txt b/docs/download/index.txt
new file mode 100644
index 0000000..01f918f
--- /dev/null
+++ b/docs/download/index.txt
@@ -0,0 +1,32 @@
+Downloads
+=========
+
+Each of these packages is available in several formats. The source
+distribution is a complete set of documentation, tests, and the source
+files themselves. There are also two "Egg" files: these are files
+`easy_install <http://peak.telecommunity.com/DevCenter/EasyInstall>`_
+can install directly into your ``site-packages/`` directory, and are
+Python-version specific. The download files for the latest version
+are always located on the Cheese Shop pages (listed below).
+
+* `Paste <http://pypi.python.org/pypi/Paste>`_
+* `Paste Script <http://pypi.python.org/pypi/PasteScript>`_
+* `Paste Deploy <http://pypi.python.org/pypi/PasteDeploy>`_
+* `Paste WebKit <http://pypi.python.org/pypi/PasteWebKit>`_
+* `Wareweb <http://pypi.python.org/pypi/Wareweb>`_ (deprecated, use `Pylons
+ <https://pypi.python.org/pypi/Pylons>`_ instead)
+
+All the packages are available in the Mercurial repositories rooted in
+http://bitbucket.org/ianb/
+
+* http://bitbucket.org/ianb/paste
+* http://bitbucket.org/ianb/pastescript
+* http://bitbucket.org/ianb/pastedeploy
+* https://github.com/Pylons/webob
+* ... and others
+
+Use::
+
+ hg clone http://bitbucket.org/ianb/paste
+
+to check out a working copy of Paste.
diff --git a/docs/future.txt b/docs/future.txt
new file mode 100644
index 0000000..697c750
--- /dev/null
+++ b/docs/future.txt
@@ -0,0 +1,108 @@
+The Future Of Paste
+===================
+
+Introduction
+------------
+
+Paste has been under development for a while, and has lots of code in it. Too much code! The code is largely decoupled except for some core functions shared by many parts of the code. Those core functions are largely replaced in `WebOb <http://pythonpaste.org/webob/>`_, and replaced with better implementations.
+
+The future of these pieces is to split them into independent packages, and refactor the internal Paste dependencies to rely instead on WebOb.
+
+Already Extracted
+-----------------
+
+paste.fixture:
+ WebTest
+ ScriptTest
+
+paste.lint:
+ wsgiref.validate
+
+paste.exceptions and paste.evalexception:
+ WebError
+
+paste.util.template:
+ Tempita
+
+
+To Be Separated
+---------------
+
+paste.httpserver and paste.debug.watchthreads:
+ Not sure what to call this.
+
+paste.cascade and paste.errordocuments:
+ Not sure; Ben has an implementation of errordocuments in ``pylons.middleware.StatusCodeRedirect``
+
+paste.urlmap, paste.deploy.config.PrefixMiddleware:
+ In... some routing thing? Together with the previous package?
+
+paste.proxy:
+ WSGIProxy (needs lots of cleanup though)
+
+paste.fileapp, paste.urlparser.StaticURLParser, paste.urlparser.PkgResourcesParser:
+ In some new file-serving package.
+
+paste.cgiapp, wphp.fcgi_app:
+ Some proxyish app... maybe WSGIProxy?
+
+paste.translogger, paste.debug.prints, paste.util.threadedprint, wsgifilter.proxyapp.DebugHeaders:
+ Some... other place. Something loggy.
+
+paste.registry, paste.config:
+ Not sure. Alberto Valverde expressed interest in splitting out paste.registry.
+
+paste.cgitb_catcher:
+ Move to WebError? Not sure if it matters. For some reason people use this, though.
+
+
+To Deprecate
+------------
+
+(In that, I won't extract these anywhere; I'm not going to do any big deletes anytime soon, though)
+
+paste.recursive
+ Better to do it manually (with webob.Request.get_response)
+
+paste.wsgiwrappers, paste.request, paste.response, paste.wsgilib, paste.httpheaders, paste.httpexceptions:
+ All the functionality is already in WebOb.
+
+paste.urlparser.URLParser:
+ Really this is tied to paste.webkit more than anything.
+
+paste.auth.*:
+ Well, these all need to be refactored, and replacements exist in AuthKit and repoze.who. Some pieces might still have utility.
+
+paste.debug.profile:
+ I think repoze.profile supersedes this.
+
+paste.debug.wdg_validator:
+ It could get reimplemented with more options for validators, but I'm not really that interested at the moment. The code is nothing fancy.
+
+paste.transaction:
+ More general in repoze.tm
+
+paste.url:
+ No one uses this
+
+
+Undecided
+---------
+
+paste.debug.fsdiff:
+ Maybe ScriptTest?
+
+paste.session:
+ It's an okay naive session system. But maybe Beaker makes it irrelevant (Beaker does seem slightly more complex to setup). But then, this can just live here indefinitely.
+
+paste.gzipper:
+ I'm a little uncomfortable with this in concept. It's largely in WebOb right now, but not as middleware.
+
+paste.reloader:
+ Maybe this should be moved to paste.script (i.e., paster serve)
+
+paste.debug.debugapp, paste.script.testapp:
+ Alongside other debugging tools, I guess
+
+paste.progress:
+ Not sure this works.
diff --git a/docs/include/contact.txt b/docs/include/contact.txt
new file mode 100644
index 0000000..87e0bc1
--- /dev/null
+++ b/docs/include/contact.txt
@@ -0,0 +1,5 @@
+If you have questions about this document, please contact the `paste
+mailing list <http://groups.google.com/group/paste-users>`_
+or try IRC (``#pythonpaste`` on freenode.net). If there's something that
+confused you and you want to give feedback, please `submit an issue
+<http://pythonpaste.org/trac/newticket?component=documentation>`_.
diff --git a/docs/include/reference_header.txt b/docs/include/reference_header.txt
new file mode 100644
index 0000000..9b73f85
--- /dev/null
+++ b/docs/include/reference_header.txt
@@ -0,0 +1,5 @@
+Paste Reference Document
+@@@@@@@@@@@@@@@@@@@@@@@@
+
+.. contents::
+
diff --git a/docs/index.txt b/docs/index.txt
new file mode 100644
index 0000000..546e9fb
--- /dev/null
+++ b/docs/index.txt
@@ -0,0 +1,69 @@
+Python Paste
+============
+
+Contents:
+
+.. toctree::
+ :maxdepth: 1
+
+ news
+ future
+ testing-applications
+ url-parsing-with-wsgi
+ do-it-yourself-framework
+ paste-httpserver-threadpool
+ developer-features
+ DeveloperGuidelines
+ StyleGuide
+ paste-httpserver-threadpool
+ testing-applications
+ url-parsing-with-wsgi
+ community/index.txt
+ community/mailing-list.txt
+ community/repository.txt
+ download/index.txt
+ license
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
+.. comment:
+
+ I want to put these somewhere sometime, but no place for them now...
+ Python Paste -- 50% tastier than Elmer's!
+ Paste: making the web sticky.
+ Fix broken websites by applying Paste liberally.
+ Paste: paper over your inconsistencies.
+ Paste: a soft mixture of malleable consistency.
+ Paste: a tasty mixture to be spread on bread or crackers.
+ Paste: glue that won't hurt you if you eat it.
+ Python Paste: the web extruded into the form of a snake.
+ Paste: the vinegar eel.
+ Paste: you bring the cut.
+ Paste: a doughy substance from which to make metaphorical web cupcakes.
+ LAMP? LAMPP!
+ Putting the P in Wep 2.0!
+ Frankenweb crush tiny humans!
+ DSL? DSF!
+ Paste: Comfort for the framework-scarred
+
+Other Components
+================
+
+* `Paste Deploy <./deploy/>`_
+
+* `Paste Script <./script/>`_
+
+* `WebOb <http://webob.org/>`_
+
+* `WSGI specification (PEP 333) <http://www.python.org/dev/peps/pep-0333.html>`_
+
+License
+=======
+
+Paste is distributed under the `MIT license
+<http://www.opensource.org/licenses/mit-license.php>`_.
diff --git a/docs/license.txt b/docs/license.txt
new file mode 100644
index 0000000..c810dec
--- /dev/null
+++ b/docs/license.txt
@@ -0,0 +1,20 @@
+Copyright (c) 2006-2007 Ian Bicking and Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/docs/modules/auth.auth_tkt.txt b/docs/modules/auth.auth_tkt.txt
new file mode 100644
index 0000000..8622bcf
--- /dev/null
+++ b/docs/modules/auth.auth_tkt.txt
@@ -0,0 +1,14 @@
+:mod:`paste.auth.auth_tkt` -- auth_tkt cookie parsing
+=====================================================
+
+.. automodule:: paste.auth.auth_tkt
+
+Module Contents
+---------------
+
+.. autoclass:: AuthTKTMiddleware
+.. autofunction:: make_auth_tkt_middleware
+.. autoclass:: AuthTicket
+.. autoexception:: BadTicket
+
+
diff --git a/docs/modules/auth.basic.txt b/docs/modules/auth.basic.txt
new file mode 100644
index 0000000..2f8f21f
--- /dev/null
+++ b/docs/modules/auth.basic.txt
@@ -0,0 +1,11 @@
+:mod:`paste.auth.basic` -- Basic HTTP authentication
+====================================================
+
+.. automodule:: paste.auth.basic
+
+Module Contents
+---------------
+
+.. autoclass:: AuthBasicAuthenticator
+.. autoclass:: AuthBasicHandler
+.. autofunction:: make_basic
diff --git a/docs/modules/auth.cas.txt b/docs/modules/auth.cas.txt
new file mode 100644
index 0000000..d32dd7a
--- /dev/null
+++ b/docs/modules/auth.cas.txt
@@ -0,0 +1,11 @@
+:mod:`paste.auth.cas` -- CAS authentication
+===========================================
+
+.. automodule:: paste.auth.cas
+
+Module Contents
+---------------
+
+.. autoclass:: AuthCASHandler
+
+
diff --git a/docs/modules/auth.cookie.txt b/docs/modules/auth.cookie.txt
new file mode 100644
index 0000000..000cb52
--- /dev/null
+++ b/docs/modules/auth.cookie.txt
@@ -0,0 +1,12 @@
+:mod:`paste.auth.cookie` -- Cookie-based authentication
+=======================================================
+
+.. automodule:: paste.auth.cookie
+
+Module Contents
+---------------
+
+.. autoclass:: AuthCookieSigner
+.. autoclass:: AuthCookieHandler
+.. autoclass:: AuthCookieEnviron
+.. autofunction:: make_auth_cookie
diff --git a/docs/modules/auth.digest.txt b/docs/modules/auth.digest.txt
new file mode 100644
index 0000000..d13357e
--- /dev/null
+++ b/docs/modules/auth.digest.txt
@@ -0,0 +1,12 @@
+:mod:`paste.auth.digest` -- HTTP Digest login
+=============================================
+
+.. automodule:: paste.auth.digest
+
+Module Contents
+---------------
+
+.. autoclass:: AuthDigestAuthenticator
+.. autoclass:: AuthDigestHandler
+.. autofunction:: digest_password
+.. autofunction:: make_digest
diff --git a/docs/modules/auth.form.txt b/docs/modules/auth.form.txt
new file mode 100644
index 0000000..c059589
--- /dev/null
+++ b/docs/modules/auth.form.txt
@@ -0,0 +1,10 @@
+:mod:`paste.auth.form` -- HTML form/cookie authentication
+=========================================================
+
+.. automodule:: paste.auth.form
+
+Module Contents
+---------------
+
+.. autoclass:: AuthFormHandler
+.. autofunction:: make_form
diff --git a/docs/modules/auth.grantip.txt b/docs/modules/auth.grantip.txt
new file mode 100644
index 0000000..ead45d7
--- /dev/null
+++ b/docs/modules/auth.grantip.txt
@@ -0,0 +1,10 @@
+:mod:`paste.auth.grantip` -- Set user and groups based on IP address
+====================================================================
+
+.. automodule:: paste.auth.grantip
+
+Module Contents
+---------------
+
+.. autoclass:: GrantIPMiddleware
+.. autofunction:: make_grantip
diff --git a/docs/modules/auth.multi.txt b/docs/modules/auth.multi.txt
new file mode 100644
index 0000000..5813ee7
--- /dev/null
+++ b/docs/modules/auth.multi.txt
@@ -0,0 +1,11 @@
+:mod:`paste.auth.multi` -- Authentication via one of multiple methods
+=====================================================================
+
+.. automodule:: paste.auth.multi
+
+Module Contents
+---------------
+
+.. autoclass:: MultiHandler
+
+
diff --git a/docs/modules/cascade.txt b/docs/modules/cascade.txt
new file mode 100644
index 0000000..b54c735
--- /dev/null
+++ b/docs/modules/cascade.txt
@@ -0,0 +1,10 @@
+:mod:`paste.cascade` -- send requests to multiple applications until success
+============================================================================
+
+.. automodule:: paste.cascade
+
+Module Contents
+---------------
+
+.. autoclass:: Cascade
+.. autofunction:: make_cascade
diff --git a/docs/modules/cgiapp.txt b/docs/modules/cgiapp.txt
new file mode 100644
index 0000000..039ec6d
--- /dev/null
+++ b/docs/modules/cgiapp.txt
@@ -0,0 +1,11 @@
+:mod:`paste.cgiapp` -- run CGI scripts as WSGI applications
+===========================================================
+
+.. automodule:: paste.cgiapp
+
+Module Contents
+---------------
+
+.. autoclass:: CGIApplication
+.. autoexception:: CGIError
+.. autofunction:: make_cgi_application
diff --git a/docs/modules/cgitb_catcher.txt b/docs/modules/cgitb_catcher.txt
new file mode 100644
index 0000000..44f8771
--- /dev/null
+++ b/docs/modules/cgitb_catcher.txt
@@ -0,0 +1,10 @@
+:mod:`paste.cgitb_catcher` -- catch exceptions using cgitb
+==========================================================
+
+.. automodule:: paste.cgitb_catcher
+
+Module Contents
+---------------
+
+.. autoclass:: CgitbMiddleware
+.. autofunction:: make_cgitb_middleware
diff --git a/docs/modules/debug.debugapp.txt b/docs/modules/debug.debugapp.txt
new file mode 100644
index 0000000..1eb0a54
--- /dev/null
+++ b/docs/modules/debug.debugapp.txt
@@ -0,0 +1,13 @@
+:mod:`paste.debug.debugapp` -- debug app
+========================================
+
+.. automodule:: paste.debug.debugapp
+
+Module Contents
+---------------
+
+.. autoclass:: SimpleApplication
+.. autoclass:: SlowConsumer
+.. autofunction:: make_test_app
+.. autofunction:: make_slow_app
+
diff --git a/docs/modules/debug.fsdiff.txt b/docs/modules/debug.fsdiff.txt
new file mode 100644
index 0000000..0a267e7
--- /dev/null
+++ b/docs/modules/debug.fsdiff.txt
@@ -0,0 +1,15 @@
+:mod:`paste.debug.fsdiff` -- Show differences between directories
+=================================================================
+
+.. automodule:: paste.debug.fsdiff
+
+Module Contents
+---------------
+
+.. autoclass:: Diff
+.. autoclass:: Snapshot
+.. autoclass:: File
+.. autoclass:: Dir
+.. autofunction:: report_expected_diffs
+.. autofunction:: show_diff
+
diff --git a/docs/modules/debug.prints.txt b/docs/modules/debug.prints.txt
new file mode 100644
index 0000000..1787e71
--- /dev/null
+++ b/docs/modules/debug.prints.txt
@@ -0,0 +1,10 @@
+:mod:`paste.debug.prints` -- capture print output
+=================================================
+
+.. automodule:: paste.debug.prints
+
+Module Contents
+---------------
+
+.. autoclass:: PrintDebugMiddleware
+
diff --git a/docs/modules/debug.profile.txt b/docs/modules/debug.profile.txt
new file mode 100644
index 0000000..ccc0910
--- /dev/null
+++ b/docs/modules/debug.profile.txt
@@ -0,0 +1,13 @@
+:mod:`paste.debug.profile` -- profile applications and requests
+===============================================================
+
+.. automodule:: paste.debug.profile
+
+Module Contents
+---------------
+
+.. autoclass:: ProfileMiddleware
+.. autofunction:: make_profile_middleware
+.. autofunction:: profile_decorator
+
+
diff --git a/docs/modules/debug.watchthreads.txt b/docs/modules/debug.watchthreads.txt
new file mode 100644
index 0000000..cd0c915
--- /dev/null
+++ b/docs/modules/debug.watchthreads.txt
@@ -0,0 +1,12 @@
+:mod:`paste.debug.watchthreads` -- watch thread workers in paste.httpserver
+===========================================================================
+
+.. automodule:: paste.debug.watchthreads
+
+Module Contents
+---------------
+
+.. autoclass:: WatchThreads
+.. autofunction:: make_watch_threads
+.. autofunction:: make_bad_app
+
diff --git a/docs/modules/debug.wdg_validate.txt b/docs/modules/debug.wdg_validate.txt
new file mode 100644
index 0000000..26f7eff
--- /dev/null
+++ b/docs/modules/debug.wdg_validate.txt
@@ -0,0 +1,11 @@
+:mod:`paste.debug.debugapp` -- debug app
+========================================
+
+.. automodule:: paste.debug.wdg_validate
+
+Module Contents
+---------------
+
+.. autoclass:: WDGValidateMiddleware
+.. autofunction:: make_wdg_validate_middleware
+
diff --git a/docs/modules/errordocument.txt b/docs/modules/errordocument.txt
new file mode 100644
index 0000000..111ac18
--- /dev/null
+++ b/docs/modules/errordocument.txt
@@ -0,0 +1,12 @@
+:mod:`paste.errordocument` -- Do internal redirects for error responses
+=======================================================================
+
+.. automodule:: paste.errordocument
+
+Module Contents
+---------------
+
+.. autoclass:: StatusBasedForward
+.. autofunction:: make_errordocument
+
+
diff --git a/docs/modules/evalexception.txt b/docs/modules/evalexception.txt
new file mode 100644
index 0000000..23587fe
--- /dev/null
+++ b/docs/modules/evalexception.txt
@@ -0,0 +1,9 @@
+:mod:`paste.evalexception` -- Interactive debugging for errors
+==============================================================
+
+.. automodule:: paste.evalexception
+
+Module Contents
+---------------
+
+.. autoclass:: EvalException
diff --git a/docs/modules/exceptions.txt b/docs/modules/exceptions.txt
new file mode 100644
index 0000000..dd1a63f
--- /dev/null
+++ b/docs/modules/exceptions.txt
@@ -0,0 +1,48 @@
+:mod:`paste.exceptions` -- Catch, display, and notify for exceptions
+====================================================================
+
+.. automodule:: paste.exceptions.errormiddleware
+
+Module Contents
+---------------
+
+.. autoclass:: ErrorMiddleware
+.. autofunction:: handle_exception
+.. autofunction:: make_error_middleware
+
+:mod:`paste.exceptions.collector` -- Collection information from exceptions
+===========================================================================
+
+.. automodule:: paste.exceptions.collector
+
+Module Contents
+---------------
+
+.. autoclass:: ExceptionCollector
+.. autofunction:: collect_exception
+
+:mod:`paste.exceptions.formatter` -- Format exception information
+=================================================================
+
+.. automodule:: paste.exceptions.formatter
+
+Module Contents
+---------------
+
+.. autoclass:: TextFormatter
+.. autoclass:: HTMLFormatter
+.. autofunction:: format_html
+.. autofunction:: format_text
+
+:mod:`paste.exceptions.reporter` -- Report exceptions
+=====================================================
+
+.. automodule:: paste.exceptions.reporter
+
+Module Contents
+---------------
+
+.. autoclass:: EmailReporter
+.. autoclass:: LogReporter
+.. autoclass:: FileReporter
+.. autoclass:: WSGIAppReporter
diff --git a/docs/modules/fileapp.txt b/docs/modules/fileapp.txt
new file mode 100644
index 0000000..dffefd1
--- /dev/null
+++ b/docs/modules/fileapp.txt
@@ -0,0 +1,15 @@
+:mod:`paste.fileapp` -- Serve files
+===================================
+
+.. automodule:: paste.fileapp
+
+Module Contents
+---------------
+
+.. autoclass:: FileApp
+.. autoclass:: DirectoryApp
+.. autofunction:: DataApp
+.. autofunction:: ArchiveStore
+
+
+
diff --git a/docs/modules/fixture.txt b/docs/modules/fixture.txt
new file mode 100644
index 0000000..c519ce2
--- /dev/null
+++ b/docs/modules/fixture.txt
@@ -0,0 +1,40 @@
+:mod:`paste.fixture` -- Test applications
+=========================================
+
+.. contents::
+
+.. automodule:: paste.fixture
+
+Module Contents
+---------------
+
+.. autoclass:: TestApp
+ :members:
+.. autoclass:: TestRequest
+
+Forms
+-----
+
+.. autoclass:: Form
+ :members:
+.. autoclass:: Field
+ :members:
+.. autoclass:: Select
+.. autoclass:: Radio
+.. autoclass:: Checkbox
+.. autoclass:: Text
+.. autoclass:: Textarea
+.. autoclass:: Hidden
+.. autoclass:: Submit
+
+Script Testing
+--------------
+
+.. autoclass:: TestFileEnvironment
+ :members:
+.. autoclass:: ProcResult
+ :members:
+.. autoclass:: FoundFile
+.. autoclass:: FoundDir
+
+
diff --git a/docs/modules/gzipper.txt b/docs/modules/gzipper.txt
new file mode 100644
index 0000000..1036422
--- /dev/null
+++ b/docs/modules/gzipper.txt
@@ -0,0 +1,10 @@
+:mod:`paste.gzipper` -- Gzip-compress responses
+===============================================
+
+.. automodule:: paste.gzipper
+
+Module Contents
+---------------
+
+.. autoclass:: middleware
+.. autofunction:: make_gzip_middleware
diff --git a/docs/modules/httpexceptions.txt b/docs/modules/httpexceptions.txt
new file mode 100644
index 0000000..736576b
--- /dev/null
+++ b/docs/modules/httpexceptions.txt
@@ -0,0 +1,49 @@
+:mod:`paste.httpexceptions` -- Easily product HTTP errors
+=========================================================
+
+.. automodule:: paste.httpexceptions
+
+Module Contents
+---------------
+
+.. autoclass:: HTTPExceptionHandler
+.. autofunction:: make_middleware
+
+Exceptions
+----------
+
+.. autoexception:: HTTPException
+.. autoexception:: HTTPError
+.. autoexception:: HTTPRedirection
+.. autoexception:: HTTPMultipleChoices
+.. autoexception:: HTTPMovedPermanently
+.. autoexception:: HTTPFound
+.. autoexception:: HTTPNotModified
+.. autoexception:: HTTPUseProxy
+.. autoexception:: HTTPTemporaryRedirect
+.. autoexception:: HTTPClientError
+.. autoexception:: HTTPBadRequest
+.. autoexception:: HTTPUnauthorized
+.. autoexception:: HTTPPaymentRequired
+.. autoexception:: HTTPForbidden
+.. autoexception:: HTTPNotFound
+.. autoexception:: HTTPMethodNotAllowed
+.. autoexception:: HTTPNotAcceptable
+.. autoexception:: HTTPProxyAuthenticationRequired
+.. autoexception:: HTTPRequestTimeout
+.. autoexception:: HTTPConflict
+.. autoexception:: HTTPGone
+.. autoexception:: HTTPLengthRequired
+.. autoexception:: HTTPPreconditionFailed
+.. autoexception:: HTTPRequestEntityTooLarge
+.. autoexception:: HTTPRequestURITooLong
+.. autoexception:: HTTPUnsupportedMediaType
+.. autoexception:: HTTPRequestRangeNotSatisfiable
+.. autoexception:: HTTPExpectationFailed
+.. autoexception:: HTTPServerError
+.. autoexception:: HTTPInternalServerError
+.. autoexception:: HTTPNotImplemented
+.. autoexception:: HTTPBadGateway
+.. autoexception:: HTTPServiceUnavailable
+.. autoexception:: HTTPGatewayTimeout
+.. autoexception:: HTTPVersionNotSupported
diff --git a/docs/modules/httpheaders.txt b/docs/modules/httpheaders.txt
new file mode 100644
index 0000000..ef5c74b
--- /dev/null
+++ b/docs/modules/httpheaders.txt
@@ -0,0 +1,8 @@
+:mod:`paste.httpheaders` -- Manipulate HTTP Headers
+===================================================
+
+.. comment:
+ I just don't feel like documenting the items...
+
+.. automodule:: paste.httpheaders
+ :members:
diff --git a/docs/modules/httpserver.txt b/docs/modules/httpserver.txt
new file mode 100644
index 0000000..5d260c5
--- /dev/null
+++ b/docs/modules/httpserver.txt
@@ -0,0 +1,10 @@
+:mod:`paste.httpserver` -- HTTP server
+======================================
+
+.. automodule:: paste.httpserver
+
+Module Contents
+---------------
+
+.. autofunction:: serve
+.. autofunction:: server_runner
diff --git a/docs/modules/lint.txt b/docs/modules/lint.txt
new file mode 100644
index 0000000..7a21caf
--- /dev/null
+++ b/docs/modules/lint.txt
@@ -0,0 +1,10 @@
+:mod:`paste.lint` -- Check for the validity of WSGI requests and responses
+==========================================================================
+
+.. automodule:: paste.lint
+
+Module Contents
+---------------
+
+.. autofunction:: middleware
+.. autoexception:: WSGIWarning
diff --git a/docs/modules/pony.txt b/docs/modules/pony.txt
new file mode 100644
index 0000000..b2c281b
--- /dev/null
+++ b/docs/modules/pony.txt
@@ -0,0 +1,10 @@
+:mod:`paste.pony` -- Add pony power to your application
+=======================================================
+
+.. automodule:: paste.pony
+
+Module Contents
+---------------
+
+.. autoclass:: PonyMiddleware
+.. autofunction:: make_pony
diff --git a/docs/modules/progress.txt b/docs/modules/progress.txt
new file mode 100644
index 0000000..8b15dc8
--- /dev/null
+++ b/docs/modules/progress.txt
@@ -0,0 +1,13 @@
+:mod:`paste.progress` -- Track progress of uploads
+==================================================
+
+.. automodule:: paste.progress
+
+Module Contents
+---------------
+
+.. autoclass:: UploadProgressMonitor
+.. autoclass:: UploadProgressReporter
+
+
+
diff --git a/docs/modules/proxy.txt b/docs/modules/proxy.txt
new file mode 100644
index 0000000..1e6841d
--- /dev/null
+++ b/docs/modules/proxy.txt
@@ -0,0 +1,14 @@
+:mod:`paste.proxy` -- Proxy WSGI requests to HTTP requests
+==========================================================
+
+.. automodule:: paste.proxy
+
+Module Contents
+---------------
+
+.. autoclass:: Proxy
+.. autofunction:: make_proxy
+.. autoclass:: TransparentProxy
+.. autofunction:: make_transparent_proxy
+
+
diff --git a/docs/modules/recursive.txt b/docs/modules/recursive.txt
new file mode 100644
index 0000000..a9339de
--- /dev/null
+++ b/docs/modules/recursive.txt
@@ -0,0 +1,10 @@
+:mod:`paste.recursive` -- internal requests
+===========================================
+
+.. automodule:: paste.recursive
+
+Module Contents
+---------------
+
+.. autoclass:: RecursiveMiddleware
+.. autofunction:: ForwardRequestException
diff --git a/docs/modules/registry.txt b/docs/modules/registry.txt
new file mode 100644
index 0000000..aba5bce
--- /dev/null
+++ b/docs/modules/registry.txt
@@ -0,0 +1,13 @@
+:mod:`paste.registry` -- Manage thread-local request-specific objects
+=====================================================================
+
+.. automodule:: paste.registry
+
+Module Contents
+---------------
+
+.. autoclass:: StackedObjectProxy
+.. autoclass:: Registry
+.. autoclass:: RegistryManager
+.. autoclass:: StackedObjectRestorer
+.. autofunction:: make_registry_manager
diff --git a/docs/modules/reloader.txt b/docs/modules/reloader.txt
new file mode 100644
index 0000000..fb27333
--- /dev/null
+++ b/docs/modules/reloader.txt
@@ -0,0 +1,14 @@
+:mod:`paste.reloader` -- Monitor for file changes to restart the process
+========================================================================
+
+.. automodule:: paste.reloader
+
+Module Contents
+---------------
+
+.. autofunction:: install
+.. autoclass:: Monitor
+.. autofunction:: watch_file
+
+
+
diff --git a/docs/modules/request.txt b/docs/modules/request.txt
new file mode 100644
index 0000000..d37b129
--- /dev/null
+++ b/docs/modules/request.txt
@@ -0,0 +1,19 @@
+:mod:`paste.request` -- Utility functions for the WSGI environment
+==================================================================
+
+.. automodule:: paste.request
+
+Module Contents
+---------------
+
+.. autofunction:: get_cookies
+.. autofunction:: get_cookie_dict
+.. autofunction:: parse_querystring
+.. autofunction:: parse_formvars
+.. autofunction:: construct_url
+.. autofunction:: path_info_split
+.. autofunction:: path_info_pop
+.. autofunction:: resolve_relative_url
+.. autoclass:: EnvironHeaders
+
+
diff --git a/docs/modules/response.txt b/docs/modules/response.txt
new file mode 100644
index 0000000..1b6c129
--- /dev/null
+++ b/docs/modules/response.txt
@@ -0,0 +1,15 @@
+:mod:`paste.response` -- Utility functions for producing responses
+==================================================================
+
+.. automodule:: paste.response
+
+Module Contents
+---------------
+
+.. autoclass:: HeaderDict
+.. autofunction:: has_header
+.. autofunction:: header_value
+.. autofunction:: remove_header
+.. autofunction:: replace_header
+
+
diff --git a/docs/modules/session.txt b/docs/modules/session.txt
new file mode 100644
index 0000000..6a11dfd
--- /dev/null
+++ b/docs/modules/session.txt
@@ -0,0 +1,11 @@
+:mod:`paste.session` -- Simple file-based sessions
+==================================================
+
+.. automodule:: paste.session
+
+Module Contents
+---------------
+
+.. autoclass:: SessionMiddleware
+.. autofunction:: make_session_middleware
+
diff --git a/docs/modules/transaction.txt b/docs/modules/transaction.txt
new file mode 100644
index 0000000..1e23a3e
--- /dev/null
+++ b/docs/modules/transaction.txt
@@ -0,0 +1,11 @@
+:mod:`paste.transaction` -- DB-API transactions
+===============================================
+
+.. automodule:: paste.transaction
+
+Module Contents
+---------------
+
+.. autoclass:: TransactionManagerMiddleware
+.. autoclass:: ConnectionFactory
+.. autofunction:: BasicTransactionHandler
diff --git a/docs/modules/translogger.txt b/docs/modules/translogger.txt
new file mode 100644
index 0000000..84a7217
--- /dev/null
+++ b/docs/modules/translogger.txt
@@ -0,0 +1,10 @@
+:mod:`paste.translogger` -- Log requests
+========================================
+
+.. automodule:: paste.translogger
+
+Module Contents
+---------------
+
+.. autoclass:: TransLogger
+.. autofunction:: make_filter
diff --git a/docs/modules/url.txt b/docs/modules/url.txt
new file mode 100644
index 0000000..6b5e83f
--- /dev/null
+++ b/docs/modules/url.txt
@@ -0,0 +1,10 @@
+:mod:`paste.url` -- URL convenience class
+=========================================
+
+.. automodule:: paste.url
+
+Module Contents
+---------------
+
+.. autoclass:: URL
+.. autoclass:: Image
diff --git a/docs/modules/urlmap.txt b/docs/modules/urlmap.txt
new file mode 100644
index 0000000..ae584d9
--- /dev/null
+++ b/docs/modules/urlmap.txt
@@ -0,0 +1,11 @@
+:mod:`paste.urlmap` -- Map URL paths
+====================================
+
+.. automodule:: paste.urlmap
+
+Module Contents
+---------------
+
+.. autoclass:: URLMap
+.. autofunction:: urlmap_factory
+.. autoclass:: PathProxyURLMap
diff --git a/docs/modules/urlparser.txt b/docs/modules/urlparser.txt
new file mode 100644
index 0000000..28752ab
--- /dev/null
+++ b/docs/modules/urlparser.txt
@@ -0,0 +1,14 @@
+:mod:`paste.urlparser` -- Handle URL paths and server static files
+==================================================================
+
+.. automodule:: paste.urlparser
+
+Module Contents
+---------------
+
+.. autoclass:: StaticURLParser
+.. autofunction:: make_static
+.. autoclass:: PkgResourcesParser
+.. autofunction:: make_pkg_resources
+.. autoclass:: URLParser
+.. autofunction:: make_url_parser
diff --git a/docs/modules/util.import_string.txt b/docs/modules/util.import_string.txt
new file mode 100644
index 0000000..04586d1
--- /dev/null
+++ b/docs/modules/util.import_string.txt
@@ -0,0 +1,12 @@
+:mod:`paste.util.import_string` -- Import objects from strings
+==============================================================
+
+.. automodule:: paste.util.import_string
+
+Module Contents
+---------------
+
+.. autofunction:: eval_import
+.. autofunction:: simple_import
+.. autofunction:: import_module
+.. autofunction:: try_import_module
diff --git a/docs/modules/util.multidict.txt b/docs/modules/util.multidict.txt
new file mode 100644
index 0000000..58b094a
--- /dev/null
+++ b/docs/modules/util.multidict.txt
@@ -0,0 +1,11 @@
+:mod:`paste.util.multidict` -- Dictionaries with multiple values
+================================================================
+
+.. automodule:: paste.util.multidict
+
+Module Contents
+---------------
+
+.. autoclass:: MultiDict
+.. autoclass:: UnicodeMultiDict
+
diff --git a/docs/modules/wsgilib.txt b/docs/modules/wsgilib.txt
new file mode 100644
index 0000000..e40d426
--- /dev/null
+++ b/docs/modules/wsgilib.txt
@@ -0,0 +1,18 @@
+:mod:`paste.wsgilib` -- Miscellaneous WSGI utility functions
+============================================================
+
+.. automodule:: paste.wsgilib
+
+Module Contents
+---------------
+
+.. autofunction:: add_close
+.. autofunction:: add_start_close
+.. autofunction:: chained_app_iters
+.. autoclass:: encode_unicode_app_iter
+.. autofunction:: catch_errors
+.. autofunction:: catch_errors_app
+.. autofunction:: raw_interactive
+.. autofunction:: interactive
+.. autofunction:: dump_environ
+.. autofunction:: intercept_output
diff --git a/docs/modules/wsgiwrappers.txt b/docs/modules/wsgiwrappers.txt
new file mode 100644
index 0000000..7774854
--- /dev/null
+++ b/docs/modules/wsgiwrappers.txt
@@ -0,0 +1,10 @@
+:mod:`paste.wsgiwrappers` -- Wrapper functions for WSGI request and response
+============================================================================
+
+.. automodule:: paste.wsgiwrappers
+
+Module Contents
+---------------
+
+.. autoclass:: WSGIRequest
+.. autoclass:: WSGIResponse
diff --git a/docs/news.txt b/docs/news.txt
new file mode 100644
index 0000000..d17001c
--- /dev/null
+++ b/docs/news.txt
@@ -0,0 +1,1075 @@
+News
+====
+
+.. contents::
+
+2.0.3
+-----
+
+* #26: Change six requirement to >=1.4.0
+ from [Linus Heckemann](https://bitbucket.org/sphalerite/)
+ https://bitbucket.org/ianb/paste/pull-requests/26/change-six-requirement-to-140/diff
+
+* #28: Py3k fixes
+ from [Nils Philippsen](https://bitbucket.org/nilsph/)
+ https://bitbucket.org/ianb/paste/pull-requests/28/py3k-fixes/diff
+
+* #29: paste.wsgilib.add_close: Add __next__ method to support using `add_close` objects as iterators on Python 3.
+ fixes https://bitbucket.org/ianb/pastedeploy/issues/18/py3-test_config_middleware-failed
+ from [Marc Abramowitz](https://bitbucket.org/msabramo/)
+ https://bitbucket.org/ianb/paste/pull-requests/29/pastewsgilibadd_close-add-__next__-method/diff
+
+* #30: tox.ini: Add py35 to envlist
+ from [Marc Abramowitz](https://bitbucket.org/msabramo/)
+ https://bitbucket.org/ianb/paste/pull-requests/30/toxini-add-py35-to-envlist/diff
+
+* #31: Enable testing with pypy
+ from [Marc Abramowitz](https://bitbucket.org/msabramo/)
+ https://bitbucket.org/ianb/paste/pull-requests/31/enable-testing-with-pypy/diff
+
+* #33: tox.ini: Measure test coveraage
+ from [Marc Abramowitz](https://bitbucket.org/msabramo/)
+ https://bitbucket.org/ianb/paste/pull-requests/33/toxini-measure-test-coverage/diff
+
+2.0.2
+-----
+
+* #22: Fix improper commas in request headers in wsgi_environ (https://bitbucket.org/ianb/paste/pull-request/22/fix-improper-commas-in-request-headers-in)
+ Fixes issue #4 ("WSGI environ totally borked") (https://bitbucket.org/ianb/paste/issue/4/wsgi-environ-totally-borked)
+
+* #24: test_wsgirequest_charset: Use UTF-8 instead of iso-8859-1 (https://bitbucket.org/ianb/paste/pull-request/24/test_wsgirequest_charset-use-utf-8-instead)
+ Fixes issue #7 ("Python 3 test failure") (https://bitbucket.org/ianb/paste/issue/7/python-3-test-failure)
+
+* #23: Replace cgi.parse_qsl w/ six.moves.urllib.parse.parse_qsl (https://bitbucket.org/ianb/paste/pull-request/23/replace-cgiparse_qsl-w)
+ Fixes issue #8 ("cgi.parse_qsl is pending deprecation") (https://bitbucket.org/ianb/paste/issue/8/cgiparse_qsl-is-pending-deprecation)
+
+* #20: Escape CGI environment variables in HTTP 404 responses (https://bitbucket.org/ianb/paste/pull-request/20/escape-cgi-environment-variables-in-http)
+
+* #6: Add HTTP exception for new code 429 "Too Many Requests" (https://bitbucket.org/ianb/paste/pull-request/6/add-http-exception-for-new-code-429-too)
+
+* #25: replace ``has_key`` method to ``in`` operator #9 (https://bitbucket.org/ianb/paste/pull-request/25/replace-has_key-method-to-in-operator-9)
+ Fixes #9 ("used methods removed from py3") (https://bitbucket.org/ianb/paste/issue/9/used-methods-removed-from-py3)
+
+* #5: Invalid error message when the socket is already in use (https://bitbucket.org/ianb/paste/issue/5/invalid-error-message-when-the-socket-is)
+
+2.0.1
+-----
+
+* Fix setup.py for six dependency: move the six dependency from extras_require
+ to install_requires.
+
+* Port paste.proxy to Python 3.
+
+* Fix paste.exceptions.serial_number_generator.hash_identifier() on Python 3.
+
+* Fix paste.util.threadedprint.uninstall(). Rename duplicated uninstall()
+ function to uninstall_stdin() and fix typo in variable name (_oldstin =>
+ _oldstdin).
+
+* Add README.rst file.
+
+2.0
+---
+
+* Experimental Python 3 support.
+
+* paste now requires the six module.
+
+* Drop support of Python 2.5 and older.
+
+* Fixed ``egg:Paste#cgi``
+
+* In ``paste.httpserver``: give a 100 Continue response even when the
+ server has been configured as an HTTP/1.0 server (clients may send
+ ``Expect: 100-Continue`` before they know the version), and wrap
+ 100 Continue ``environ['wsgi.input']`` files with LimitedLengthFile
+ just like normal request bodies are wrapped, keeping WSGI
+ applications from over-reading from the socket.
+
+* Fixed parsing of paths beginning with multiple forward slashes.
+
+* Add tox.ini to run tests with tox on Python 2.6, 2.7 and 3.4.
+
+1.7.5.1
+-------
+
+* Fix bug introduced in :mod:`paste.auth.auth_tkt` (with ``url_unquote``)
+
+1.7.5
+-----
+
+* Won't install ``tests/`` directory (also caused installation
+ problems on some Mac systems).
+
+* Fixed problem with gzip middleware and zero-length responses.
+
+* Use ``X-Forwarded-For`` header in :mod:`paste.translogger`
+
+* Fixed problems with mimeparse code
+
+* Fixed some corner cases with CGI scripts
+
+* :mod:`paste.auth.auth_tkt` will URL-quote usernames, avoiding some
+ errors with usernames with ``!`` in them.
+
+* Improve handling of errors in fetching error pages in
+ :mod:`paste.errordocument`.
+
+1.7.4
+-----
+
+* Fix XSS bug (security issue) with not found handlers for
+ :class:`paste.urlparser.StaticURLParser` and
+ :class:`paste.urlmap.URLMap`. If you ask for a path with
+ ``/--><script>...`` that will be inserted in the error page and can
+ execute Javascript. Reported by Tim Wintle with further details
+ from Georg-Christian Pranschke.
+
+* Replaced :func:`paste.util.mimeparse.desired_match`
+
+1.7.3.1
+-------
+
+* Removed directory name from 404 errors in
+ :class:`paste.urlparser.StaticURLParser`.
+
+* Fixed packaging to include Javascript and images for
+ :mod:`paste.evalexception`
+
+1.7.3
+-----
+
+* I got a fever and the only prescription is more :mod:`paste.cowbell`!
+
+* Fix :mod:`paste.httpserver` on Python 2.6.
+
+* Fix :mod:`paste.auth.cookie`, which would insert newlines for long
+ cookies.
+
+* :mod:`paste.util.mimeparse` parses a single ``*`` in Accept headers
+ (sent by IE 6).
+
+* Fix some problems with the ``wdg_validate`` middleware.
+
+* Improvements to :mod:`paste.auth.auth_tkt`: add httponly support,
+ don't always aggressively set cookies without the
+ ``wildcard_cookie`` option. Also on logout, make cookies expire.
+
+* In :class:`paste.proxy.Proxy` handle Content-Length of -1.
+
+* In :mod:`paste.httpexceptions` avoid some unicode errors.
+
+* In :mod:`paste.httpserver` handle ``.read()`` from 100 Continue
+ properly (because of a typo it was doing a readline).
+
+* Update ``paste.util.mimeparse`` from `upstream
+ <http://code.google.com/p/mimeparse/>`_.
+
+1.7.2
+-----
+
+* In :mod:`paste.proxy`, added some more headers that are disallowed
+ in WSGI (e.g., Keep-Alive). Send Content-Length. Also fix the
+ missing query string when using :class:`paste.proxy.Proxy`
+ (:class:`paste.proxy.TransparentProxy` already worked).
+
+* Make :mod:`paste.debug.prints` work with Google App Engine.
+
+* Make ``environ['wsgi.input']`` with :mod:`paste.httpserver` only
+ have a ``seek`` method if it is wrapping something with a seek
+ method (which usually it is not).
+
+* In :mod:`paste.httpserver` re-raise KeyboardInterrupt in worker
+ threads.
+
+* Added support for the ``HttpOnly`` Cookie property to
+ :mod:`paste.wsgiwrappers`
+
+* Added :func:`paste.reloader.add_file_callback`, which lets you watch
+ files based on a callback.
+
+* Quiet Python 2.6 deprecation warnings.
+
+* Fix :mod:`paste.auth.cookie` generating bad headers.
+
+* Added :class:`paste.reloader.JythonMonitor` for an experimental,
+ optimized reloader on Jython.
+
+1.7.1
+-----
+
+* Normalize and make absolute the paths passed to
+ :class:`paste.urlparser.StaticURLParser` (before passing a
+ relative-to-cwd path to that class would cause Forbidden errors).
+
+* Deprecate :func:`paste.fixture.setup_module`
+
+1.7
+---
+
+* Fixed bug in :class:`paste.fixture.TestApp` that would submit forms
+ with unnamed fields (like an unnamed submit button). Also made
+ checkboxes with no explicit ``value`` send ``on`` instead of
+ ``checked`` (which is what browsers do).
+
+* Fixed bug in :mod:`paste.httpserver` where
+ ``environ['wsgi.input'].readline(max_size)`` ignored the max_size
+ argument, which can lead to large memory usage (from Jakub Stolarski)
+
+* Make :mod:`paste.cascade` notice sockets that have stopped producing
+ data. From Casey Zednick.
+
+* In :class:`paste.fixture.TestApp` Accept MultiDict values for the
+ ``params`` argument in requests. (Anything with a ``.items()``
+ method will have its items encoded as the request parameters.)
+
+* Fix :mod:`paste.httpserver` to allow binding to port 0.
+
+* In :mod:`paste.auth.auth_tkt`, set the same cookies (with the same
+ domains) in ``set_cookie`` as get unset in ``logout_user_cookie``.
+
+* In :mod:`paste.translogger` save REQUEST_METHOD in case it gets
+ overridden somewhere (e.g., when using errordocuments POST would
+ show up as GET).
+
+* Exceptions with unicode messages don't cause the collector to fail.
+
+* Sometimes :class:`paste.exceptions.errormiddleware.ErrorMiddleware`
+ would not call start_response properly; this is fixed (from Andreas
+ Kloecker).
+
+* :mod:`paste.fixture.TestApp` can store multiple cookie values
+ (previously only one cookie was stored; from Andrey Lebedev)
+
+* ``u'' in TestApp(app).get('/')`` will work when the body isn't ASCII
+ (before it would give a unicode error). This problem wasn't present
+ in the recommended `WebTest <http://pythonpaste.org/webtest/>`_.
+
+* :mod:`paste.debug.profile` won't break when content is served with no
+ Content-Type.
+
+* Accept relative paths and paths with ``/../`` in them for
+ :class:`paste.urlparser.StaticURLParser` (from Eric Larson). Also fix
+ problem with case normalization on Windows (from Ionel Maries
+ Cristian).
+
+* :class:`paste.registry.StackedObjectProxy`'s now include the proxied object's names via
+ ``__dir__`` (for Python 2.6).
+
+* Use ``environ['wsgi.file_wrapper']`` when available (in
+ ``paste.fileapp``).
+
+* Make :mod:`paste.debug.prints` compatible with App Engine.
+
+* Fix the ``domain`` keyword in
+ :meth:`paste.wsgiwrappers.WSGIResponse.delete_cookie`.
+
+1.6.1
+-----
+
+* Fixed bug in paste lint where PATH_INFO would become unicode.
+
+1.6
+---
+
+* Make the import of ``socket.sslerror`` conditional in
+ ``paste.exceptions.reporter`` (needed for Python interpreters
+ compiled without SSL support).
+
+* In ``paste.proxy.TransparentProxy``, don't overwrite
+ ``X-Forwarded-For`` header if it is already in the environment.
+
+* Added ``226 IM Used`` status code to ``paste.wsgiwrappers``
+
+* In ``paste.fixture.TestApp`` treat ``<image type="image">`` the same
+ as a submit button.
+
+* Use ``OpenSSL.tsafe.Connection`` for https with
+ ``paste.httpserver``, avoiding some possible errors (``sslv3 alert
+ bad record mac``).
+
+* Fix small issue with ``paste.cgiapp`` and mod_wsgi.
+
+* Use ``BaseCookie`` instead of ``SimpleCookie`` for storing cookies
+ (avoids quoting cookie values).
+
+1.5.1
+-----
+
+* Make ``paste.cascade`` more tolerant of a missing or invalid
+ Content-Length.
+
+1.5
+---
+
+* Fixed memory leak with ``paste.registry`` not properly removing
+ all references to registered objects should register be called
+ multiple times during a single context for a StackedObjectProxy.
+
+* ``paste.httpheaders.CONTENT_RANGE`` returns ``bytes
+ START-END/LENGTH`` instead of just ``START-END/LENGTH``
+
+* In ``paste.fixture.TestApp`` set ``CONTENT_TYPE`` to
+ ``'application/x-www-form-urlencoded'`` whenever there are
+ parameters (and no other content type was provided).
+
+* In ``paste.session``, when cleaning files ignore files that aren't
+ session files.
+
+* ``paste.httpexceptions.HTTPExceptionHandler`` will no longer catch
+ exceptions raised during the app_iter iteration.
+
+* ``paste.cascade.Cascade`` copies ``wsgi.input`` when cascading, so
+ that one of the applications cannot read the input and leave a later
+ application blocked when it tries to read the input.
+
+* Fix assigning to ``WSGIResponse.charset`` breaking the content-type.
+
+* SMTP authentication is supported for the exception handler. You may
+ now set ``smtp_username``, ``smtp_password`` and ``smtp_use_tls`` to
+ control this behavior. From pthy.
+
+1.4.2
+-----
+
+* Remove FreeBSD threadpool condition in paste.httpserver (which was
+ also breaking code for Windows users).
+
+* Fix problem with ``paste.wsgilib.intercept_output`` and passing up
+ exceptions.
+
+1.4.1
+-----
+
+* Allow customization of the ``paste.config.ConfigMiddleware`` environ
+ key.
+
+* Added a ``current`` method (an alias of ``current_conf``) to
+ ``paste.config.DispatchingConfig``.
+
+* Make test response ``.form`` attribute work when you have a single
+ named form.
+
+* Try to encode any unicode input to ``paste.auth.auth_tkt``
+
+* ``paste.wsgiwrappers.WSGIResponse`` now has a ``.content_type``
+ attribute (that does not include parameters), and a ``.charset``
+ attribute (that gets the charset parameter).
+
+* Inherit inherit show_exceptions_in_wsgi_errors from global
+ configuration. Inherit ``debug`` more properly.
+
+1.4
+---
+
+* In ``paste.httpserver`` added lots of functionality to the
+ threadpool. See `the paste.httpserver threadpool documentation
+ <paste-httpserver-threadpool.html>`_ for details. This catches
+ worker threads (and WSGI apps) that take too long to complete their
+ task; killing them eventually, adding more worker threads when the
+ pool is exhausted and it doesn't look good that it'll clear soon,
+ and optionally killing the process when there are too many
+ lost/zombie threads (you must be using some kind supervisor process
+ for this last response to make sense).
+
+* Save host and scheme information during real HTTP proxy requests to
+ ``paste.httpserver``, into the keys
+ ``paste.httpserver.proxy.scheme`` and
+ ``paste.httpserver.proxy.host``
+
+* In ``paste.exceptions`` always call ``start_response``; may help
+ problems when there is an exception in ``start_response`` itself.
+
+* Added method to ``paste.registry.StackedObjectProxy``,
+ ``_object_stack()``, which returns a list of all the registered
+ objects. Useful if you want to search through the effective history
+ of a stacked object.
+
+* Fixed infinite recursion problem with
+ ``paste.request.EnvironHeaders.keys()``.
+
+* Fix ``paste.wsgiwrappers.WSGIRequest.urlvars`` to use
+ ``wsgiorg.routing_args``
+
+* Remove port from ``paste.request.construct_url`` if it's the default
+ port (e.g., port 80 for ``http``).
+
+* ``paste.proxy`` works with headers with continuations in the
+ response (i.e., a header that spans multiple lines). Also, treat a
+ missing Content-Length as 0, not unlimited (may have previously
+ caused freeze ups for some kinds of requests).
+
+* ``StackedObjectProxy`` supports ``__call__`` (i.e., you can use
+ ``StackedObjectProxy`` with callable objects).
+
+* Fixed ``ProfileMiddleware`` not calling ``close()`` on consumed
+ app_iters.
+
+* ``httpheaders.AcceptLanguage`` now won't give an exception when
+ there is a malformed parameter in the header.
+
+* Fix ``paste.auth.form.auth_form`` Paste Deploy entry point.
+
+* Added REST methods to ``paste.fixture.TestApp``, so you can more
+ easily do requests like PUT and DELETE. From Anders Pearson.
+
+* Added ``{{default var=default_value}}`` command to
+ ``paste.util.template``. Make ``{{# comment}}`` work.
+
+1.3
+---
+
+* In ``paste.httpserver`` remove the reverse DNS lookup to set
+ ``REMOTE_HOST``
+
+* In ``paste.fileapp``, if the client sends both If-None-Match and
+ If-Modified-Since, prefer If-None-Match. Make ETags include the
+ size as well as last modified timestamp. Make it possible to
+ override how mimetypes are guessed.
+
+* ``HTTPException`` objects now have a ``exc.response(environ)``
+ method that returns a ``WSGIResponse`` object.
+
+* ``egg:Paste#watch_threads`` will show tracebacks of each thread
+ under Python 2.5.
+
+* Made ``paste.util.template`` trim whitespace around statements that
+ are on their own line.
+
+* ``paste.fileapp.DataApp`` now accepts ``allowed_headers=[...]`` to
+ specify the allowed headers. By default only ``GET`` and ``HEAD``
+ are allowed.
+
+* Added ``paste.util.import_string.try_import_module``, which imports
+ modules and catches ``ImportError``, but only if it's an error
+ importing the specific module, not an uncaught ``ImportError`` in
+ the module being imported.
+
+1.2.1
+-----
+
+* ``paste.httpserver`` didn't implement the ``readline`` that the
+ ``cgi`` module wants (regression in 1.2).
+
+1.2
+---
+
+* **Backward incompatible change**: ``paste.fileapp.FileApp`` properly
+ supports request methods, including HEAD. If you were subclassing
+ ``FileApp`` or ``DataApp`` and overriding ``__call__()`` you may have
+ to subclass ``get()`` instead.
+
+* paste.httpheaders now parses the HTTP Accept-Language header and returns
+ a list of languages the browser supports in the order it prefers them.
+
+* paste.mimeparse module added that handles parsing HTTP Accept headers
+ for quality and mime-types.
+
+* ``paste.request.construct_url`` was adding ``SERVER_PORT`` to
+ ``HTTP_HOST``; but ``HTTP_HOST`` (from the Host header) generally
+ contains a port when necessary, and ``SERVER_PORT`` should only be
+ used with ``SERVER_NAME``.
+
+* Added entry point for ``paste.registry.RegistryManager``
+ (``egg:Paste#registry``).
+
+* ``paste.request.HeaderDict`` fixed to know that ``Content-Length``
+ maps to ``CONTENT_LENGTH``.
+
+* Can use ``paste.urlparser.StaticURLParser`` with sub-instances other
+ than ``paste.fileapp.FileApp`` (if you subclass and override
+ ``make_app``)
+
+* ``paste.fixture.TestApp.get(status=X)`` takes a list of allowed
+ status codes for ``X``.
+
+* Added a small templating system for internal use (``paste.util.template``)
+
+* Removed a bunch of long-deprecated modules (generally modules that
+ have been moved to other names).
+
+In paste.wsgiwrappers
+~~~~~~~~~~~~~~~~~~~~~
+
+* ``paste.wsgiwrappers.WSGIRequest`` has match_accept() function to screen
+ incoming HTPT Accept values against a list of mime-types.
+
+* ``paste.wsgiwrappers.WSGIRequest.defaults`` now accepts a new key:
+
+ ``language``:
+ The i18n language that should be used as the fallback should
+ a translation not occur in a language file. See docs for
+ details.
+
+* ``paste.wsgiwrappers.WSGIRequest`` can now optionally decode form
+ parameters to unicode when it has a ``charset`` value set.
+
+* Deprecated the ``paste.wsgiwrappers.settings`` StackedObjectProxy
+ dictionary for ``paste.wsgiwrappers.WSGIResponse.defaults``.
+
+In paste.httpserver
+~~~~~~~~~~~~~~~~~~~
+
+* Regression in 1.1 fixed, where Paste's HTTP server would drop
+ trailing slashes from paths.
+
+* ``paste.httpserver`` now puts a key in the environment when using a
+ thread pool that allows you to track the thread pool and see any
+ wedged threads. ``egg:Paste#watch_threads`` is an application that
+ can display this information.
+
+* ``paste.httpserver`` now accepts all request methods, not just
+ ``GET``, ``PUT``, etc. (Methods like ``MKCOL`` were previously
+ rejected.)
+
+* ``paste.httpserver`` has a ``wsgi.input`` that now does not block if
+ you try to read past the end (it is limited to returning the number
+ of bytes given in ``Content-Length``). Double-reading from
+ ``wsgi.input`` won't give you the same data, but it won't cause
+ blocking.
+
+1.1.1
+-----
+
+* Fixed major issue with serving static files on Windows (a regression
+ in Paste 1.1 where most static files would return 404 Not Found).
+
+* Fixed ``parse_dict_querystring`` returning empty dicts instead of
+ ``MultiDict``\ s.
+
+* Added ``paste.config``, a rewrite of ``paste.deploy.config`` using
+ ``paste.registry``. This version of ``ConfigMiddleware`` will
+ enable use of ``paste.config.CONFIG`` within the ``EvalException``
+ interactive debugger.
+
+* Fixed problem where ``paste.recursive`` would leave ``wsgi.input``
+ and ``CONTENT_LENGTH`` set for recursive requests.
+
+* Changed the static file servers to give 404 Not Found responses when
+ you have extra parts after a static file, instead of 400 Bad
+ Request (like when you request ``/file.html/extra/path``)
+
+1.1
+---
+
+* Security fix for ``paste.urlparser.StaticURLParser``. The problem
+ allowed escaping the root (and reading files) when used with
+ ``paste.httpserver`` (this does not effect other servers, and does
+ not apply when proxying requests from Apache to
+ ``paste.httpserver``).
+
+* ``paste.httpserver`` and ``paste.fixture.TestApp`` url-unquote
+ ``SCRIPT_NAME`` and ``PATH_INFO``, as specified in the CGI spec.
+ Thanks to Jon Nelson for pointing out both these issues.
+
+* ``paste.registry`` now works within the ``EvalException``
+ interactive debugger.
+
+* Fixed ``paste.auth.open_id`` failures not returning a correct
+ response.
+
+* Changed ``paste.httpexceptions.HTTPUnauthorized`` so that the
+ ``WWW-Authenticate`` header is not required. 401 responses don't
+ *have* to have that header.
+
+* In ``paste.fixture.TestApp``: ``<form>`` tags that have to
+ ``action`` will preserve the existing query string. (Generally
+ relative links that are completely empty should but were not
+ preserving the query string)
+
+* Made ``paste.*`` compatible with `py2exe <http://www.py2exe.org/>`_
+ by adding a ``modulefinder`` call in ``__init__.py``
+
+* The ``paste.gzipper`` gzipping middleware wasn't changing the
+ Content-Length header properly; thanks to Brad Clements for the fix.
+
+* Fixed ``paste.proxy`` to not use anything based on the dict form of
+ ``httplib..HTTPMessage``. This form folds headers together in a way
+ that breaks ``Set-Cookie`` headers (two ``Set-Cookie`` headers would
+ be merged into one).
+
+* ``paste.request.parse_formvars`` didn't accept parameters in
+ ``CONTENT_TYPE``. ``prototype.js`` sets a ``charset`` parameter,
+ which caused a problem.
+
+* Added a ``__traceback_decorator__`` magic local variable, to allow
+ arbitrary manipulation of the output of
+ ``paste.exceptions.collector`` before formatting.
+
+* Added unicorn power to ``paste.pony`` (from Chad Whitacre)
+
+* For ``paste.httpserver`` SSL support: add support loading an
+ explicit certificate context, and using ``ssl_pem='*'`` create an
+ unsigned SSL certificate (from Jason Kirtland).
+
+* Fix some cases where ``paste.httpserver`` can have an orphaned
+ thread pool (causing the process to not shut down properly). Patch
+ from jek.
+
+1.0
+---
+
+* Fixed ``parsed_formvars`` potentially locking up on wsgi.input
+ after modification of ``QUERY_STRING``.
+
+* Fixed problem where ``paste.exceptions.errormiddleware`` didn't
+ expose the ``.close()`` method of the app_iter that it wraps (to
+ catch exceptions). This is a problem if the server about the
+ errormiddleware aborts the request; it should then call
+ ``.close()``, but won't necessarily exhaust the iterator to do so.
+
+* Added entry point for ``paste.translogger``
+ (``egg:Paste#translogger``)
+
+* Fixed some cases where long data (e.g., a file upload) would show up
+ in the error report, creating a very very large report. Also, put
+ in a monkeypatch for the ``cgi`` module so that
+ ``repr(uploaded_field)`` won't load the entire field into memory
+ (from its temporary file location).
+
+* Added a ``force_host`` option to ``paste.proxy.TransparentProxy``,
+ which will force all incoming requests to the same host, but leave
+ the ``Host`` header intact.
+
+* Added automatic cleanup of old sessions for ``paste.session``, from
+ Amir Salihefendic.
+
+* Quote the function name in tracebacks in the exception formatter;
+ Genshi has function names that use ``<>``.
+
+0.9.9
+-----
+
+* Fixed ``paste.response.HeaderDict`` ``get`` and ``setdefault``
+ methods to be case insensitive
+
+* Fix use of ``TestApp().post(params={'key': ['list of',
+ 'values']})`` as reported by Syver Enstad.
+
+* ``paste.fileapp.DataApp`` is now directly usable (was previously
+ only usable as an abstract base class).
+
+0.9.8
+-----
+
+* Fixed ``wsgiwrappers.WSGIResponse.delete_cookie``. It also now takes
+ optional path and domain arguments
+
+* ``wsgiwrappers.WSGIResponse`` now handles generator/iterator content
+ more cleanly, and properly encodes unicode content according to its
+ specified charset
+
+* Fixed ``wsgiwrappers.WSGIResponse`` mishandling multiple headers of
+ the same name
+
+* Added a Paste Deploy entry point for ``paste.auth.cookie``
+
+* Moved Paste Deploy dependencies out of top-level modules and into
+ Paste-Deploy-specific entry point functions. This should make Paste
+ more-or-less Paste Deploy independent. ``paste.urlparser`` and
+ ``paste.exceptions.errormiddleware`` still have some leftover bits.
+
+* Added another redirector type to ``paste.recursive``,
+ ``environ['paste.recursive.include_app_iter']`` which gives access
+ to the original app_iter.
+
+* Bug with ``wsgilib.catch_errors`` and app_iters with no ``close()``
+ method.
+
+* Long words in tracebacks weren't being wrapped correctly at all.
+ Also, large data would cause the wrapping routine to give a
+ recursion error. Now large data is truncated (at 1000 characters),
+ and recursion won't be a problem. Also, wrapping shouldn't lose
+ characters.
+
+* Better exception if you try to put a non-str into environ when using
+ ``paste.auth.cookie``
+
+* ``paste.exceptions.collector`` produces an
+ ``exc_data.exception_type`` that is a class, not a string. This
+ helps it get formatted better in Python 2.5.
+
+* All the tests pass on Python 2.5!
+
+* Added ``paste.proxy.TransparentProxy``, which just sends the request
+ described in the WSGI environ on without any modification. More
+ useful for WSGI clients than servers, it effectively allows any
+ WSGI-based request mechanism to act like an httplib-based request
+ mechanism.
+
+* Added a ``cache_max_age`` argument to
+ ``paste.urlparser.StaticURLParser``, which allows you to encourage
+ the caching of static files. Patch from Brad Clements.
+
+* Added ``suppress_http_headers`` to ``paste.proxy.Proxy``, which will
+ filter out HTTP headers from the request before passing it on.
+ Patch from Brad Clements.
+
+0.9.7
+-----
+
+* The ``EvalException`` 'full traceback' button is now only displayed
+ when the full traceback differs from the regular (includes hidden
+ frames).
+
+* Fixed ``EvalException`` returning a Content-type of 'text-html'
+ instead of 'text/html' in some cases.
+
+0.9.6
+-----
+
+* Renamed the ``paste.util.multidict.multidict`` class to
+ ``paste.util.multidict.MultiDict``
+
+0.9.5
+-----
+
+* Fixed a security vulnerability in ``paste.urlparser``'s StaticURLParser
+ and PkgResourcesParser where, with some servers, you could escape
+ the document root.
+
+* Significantly improved ``paste.httpserver``'s (egg:Paste#http)
+ performance. It now uses a thread pool: previously it created a new
+ thread for every request. To revert back to the old, slower behavior,
+ set::
+
+ use_threadpool = false
+
+ in the [server:main] section of the config file.
+
+* More control of where the output of ``paste.debug.prints`` goes.
+
+* Added a warning to ``paste.wsgilib.add_close`` if the upstream
+ app_iter consumer doesn't call the ``app_iter.close()`` method.
+
+* Fixed ``testapp.post(params={})``
+
+* Fixed ``paste.translogger.TransLogger`` to log to the Apache combined
+ log format as advertised.
+
+* Fixed ``paste.urlparser`` classes to handle quoted characters (e.g.
+ %20) in URL paths.
+
+* Changed ``paste.session`` to allow manipulating a session for the
+ first time after ``start_response`` is called.
+
+* Added ``paste.wsgilib.add_start_close`` which calls a function just
+ before returning the first chunk of the app_iter.
+
+* Changed ``paste.urlmap`` so that it matches domain-specific mappings
+ before domain-neutral mappings.
+
+* Fixed IE 6 potentially receiving the following ``"400 Bad Request"``
+ error on file downloads::
+
+ Please check your system clock.
+ According to this server, the time provided in the
+ If-Modified-Since header is in the future.
+
+* Added a 'no' keyword argument to ``TestResponse.mustcontain``, so
+ you can assert that a response does contain some strings at the same
+ time that you assert that a response *does not* contain other
+ strings, like::
+
+ res = app.get('/')
+ res.mustcontain('this must be there',
+ no=['error', 'unexpected'])
+
+* Fixed ``fileapp.FileApp`` to pay attention to the ``If-None-Match``
+ header, which does ETag matching; before only ``If-Modified-Since``
+ was supported, even though an ``ETag`` header was being sent; in
+ particular Firefox would then only send ``If-None-Match`` and so
+ conditional requests never worked.
+
+* Changed usage of ``paste.request.MultiDict`` to
+ ``paste.util.multidict``, particularly in ``paste.wsgiwrappers``
+ where ``request.GET`` returns a new style of dictionary interface.
+
+* Be more careful in ``paste.request.parse_formvars`` not to let
+ the ``cgi`` module read from ``wsgi.input`` when there are no
+ parsable variables in the input (based on ``CONTENT_TYPE``).
+
+0.9.4
+-----
+
+* This released was lost in a tragic clerical accident.
+
+0.9.3
+-----
+
+* 0.9.2 Included a version of MochiKit that was no longer compatible with
+ evalexception; 0.9.3 reverts to a previous version.
+
+* Change wsgi.run_once=False for ``paste.httpserver``
+
+* Added entry points for debug apps
+
+0.9.2
+-----
+
+* Fix in paste.urlmap when connecting with host:port.
+
+* Added ``/_debug/summary`` to evalexception, which gives a
+ JSON-formatted list of all the exceptions in memory.
+
+0.9.1
+-----
+
+* A fix for paste.errordocument, when doing an internal redirect from
+ a POST request (the request is rewritten as a GET request)
+
+0.9
+---
+
+* Added `paste.request.WSGIRequest
+ <class-paste.request.WSGIRequest.html>`_, a request object that
+ wraps the WSGI environment.
+
+* Added `paste.registry <module-paste.registry.html>`_, which is
+ middleware for registering threadlocal objects in a request.
+
+* Avoid annoying warning from paste.recursive
+
+* ``paste.httpserver`` now removes HTTPServer's transaction logging,
+ which was doing a reverse DNS lookup.
+
+* Added ``has_session`` to ``paste.session``
+
+* Allow for conditional ``paste.wsgilib.intercept_output`` which
+ should be slightly faster (and streamable) compared to doing the
+ condition manually.
+
+* Added entry point for `paste.proxy <module-paste.proxy.html>`_, plus
+ improvements from Brad Clements (support path in target, filter
+ request methods)
+
+* Added `paste.pony <module-paste.pony.html>`_ so pony power can be
+ added to any WSGI application.
+
+* Added port matching to ``paste.urlmap``.
+
+0.5
+---
+
+* Added `paste.auth.auth_tkt <module-paste.auth.auth_tkt.html>`_
+
+* Added `paste.auth.grantip <module-paste.auth.grantip.html>`_
+
+0.4.1
+-----
+
+* Some bug fixes to the `built-in HTTP server
+ <module-paste.httpserver.html>`_.
+
+* Experimental `paste.progress <module-paste.progress.html>`_
+ middleware for tracking upload progress
+
+* Some tweaking of how `paste.reload <module-paste.reload.html>`_
+ works, especially with respect to shutdown.
+
+0.4
+---
+
+* Fixed up paste documentation (especially for new packages/modules)
+
+* Added `paste.auth <module-paste.auth.html>`_ package for authentication
+ related WSGI middle-ware components:
+
+ - ``basic`` and ``digest`` HTTP authentication as described by RFC 2617
+
+ - support for Yale's Central Authentication System (``cas``)
+
+ - ``open_id`` supports single sign-on originally developed for
+ LiveJournal (see http://openid.net)
+
+ - ``cookie`` digitally signs cookies to record the current
+ authenticated user (``REMOTE_USER``), session identifier
+ (``REMOTE_SESSION``), and other WSGI entries in the ``environ``.
+
+ - a ``form`` module (to be used with ``cookie`` or an equivalent)
+ provides a simple HTML based form authentication.
+
+ - the ``multi`` module is an *experimental* mechanism for choosing
+ an authentication mechanism based on the WSGI ``environ``
+
+* Added `paste.httpserver <module-paste.httpserver.html>`_ module which
+ provides a very simple WSGI server built upon python's
+ ``BaseHTTPServer``; this server has support for several features:
+
+ - support for SSL connections via OpenSSL
+
+ - support for HTTP/1.1 ``100 Continue`` messages as required by the
+ WSGI specification (many HTTP server implementations don't do this)
+
+ - implemented as a Mix-In so that it can be used with other
+ more enchanted versions of ``BaseHTTPServer``
+
+ - support for 'Keep-Alive' (standard in HTTP/1.1) by either providing
+ a content-length or closing a connection if one is not available
+
+* Improved the `paste.httpexceptions <module-paste.httpexceptions.html>`_
+ module:
+
+ - added missing exception objects, and better descriptions
+
+ - fixed several bugs in how exceptions are caught and propagated
+
+ - usage as a ``wsgi_application()`` enables exceptions to be
+ returned without throwing or catching the error
+
+ - support for plain/text messages for text-only clients such as
+ curl, python's urllib, or Microsoft Excel
+
+ - allows customization of the HTML template for higher-level frameworks
+
+* Added `paste.httpheaders <module-paste.httpheaders.html>`_ module
+ to provide a uniform mechanism to access/update standard HTTP headers
+ in a WSGI ``environ`` and ``response_headers`` collection; it includes
+ specific support for:
+
+ - providing "common" header names and sorting them as suggested
+ by RFC 2616
+
+ - validated support for ``Cache-Control`` header construction
+
+ - validated support for ``Content-Disposition`` header construction
+
+ - parsing of ``If-Modified-Since`` and other date oriented headers
+
+ - parsing of Range header for partial-content delivery
+
+ - composition of HTTP/1.1 digest ``Authorization`` responses
+
+* Improved `paste.fileapp <module-paste.fileapp.html>`_ to support:
+
+ - static in-memory resources
+
+ - incremental downloading of files from disk
+
+ - responding to 'Range' requests to handle partial downloads
+
+ - allowing cache settings to be easily provided; including
+ support for HTTP/1.0 'Expires' and HTTP/1.1 'Cache-Control'
+
+* Added an *experimental* `paste.transaction
+ <module-paste.transaction.html>`_ module for handling
+ commit/rollback of standard DBAPI database connections
+
+* Added a `paste.util.datetimeutil <module-paste.util.datetimeutil.html>`_
+ module for parsing standard date/time user-generated text values
+
+* Added a `debug <module-paste.debug.html>`_ package, which includes:
+
+ - previous top-level modules ``prints``, ``profile``,
+ ``wdg_validate`` and ``doctest_webapp``
+
+ - a ``testserver`` module suitable to test HTTP socket
+ connections via ``py.test``
+
+* Re-factored `paste.wsgilib <module-paste.wsgilib.html>`_ into
+ several other modules:
+
+ - functions regarding header manipulation moved to
+ `paste.response <module-paste.response.html>`_
+
+ - functions regarding cookies and arguments moved to
+ `paste.request <module-paste.request.html>`_
+
+* Significant improvements to ``wsgiutils.wsgilib`` module:
+
+ - added a ``dump_environ`` application to help debugging
+
+ - fixes to ``raw_interactive`` to comply with WSGI specifications
+
+ - ``raw_interactive`` now logs all 5xx exceptions and sets HTTP_HOST
+
+* Added an argument ``no_profile`` to
+ `paste.debug.profile.profile_decorator
+ <module-paste.debug.profile.html#profile_decorator>`_; if that
+ option is false, then don't profile the function at all.
+
+* Changed `paste.lint <module-paste.lint.html>`_ to check that the
+ status contains a message (e.g., ``"404 Not Found"`` instead of just
+ ``"404"``). Check that environmental variables
+ ``HTTP_CONTENT_TYPE`` and ``HTTP_CONTENT_LENGTH`` are no present.
+ Made unknown ``REQUEST_METHOD`` a warning (not an error).
+
+* Added parameter ``cwd`` to `TestFileEnvironment.run
+ <class-paste.fixture.TestFileEnvironment.html#run>`_
+
+* `paste.fixture.TestApp <class-paste.fixture.TestApp.html>`_:
+
+ - Form filling code (use ``response.forms[0]`` to get a `form object
+ <class-paste.fixture.Form.html>`_)
+
+ - Added `click method
+ <class-paste.fixture.TestResponse.html#click>`_.
+
+ - Better attribute errors.
+
+ - You can force set hidden fields using
+ ``form.fields[name].force_value(value)`` (normally setting the
+ value of a hidden field is an error).
+
+ - Frameworks can now add custom attributes to the response object.
+
+* ``paste.wsgilib.capture_output`` is deprecated in favor of
+ `paste.wsgilib.intercept_output
+ <module-paste.wsgilib.html#intercept_output>`_
+
+* Remove use of exceptions in `paste.cascade.Cascade
+ <class-paste.cascade.Cascade.html>`_, which causes weird effects in
+ some cases. Generally we aren't using exceptions internally now,
+ only return status codes. Also in cascade, be careful to keep
+ cascaded requests from sharing the same environment.
+
+* ``paste.wsgilib.error_response`` is deprecated
+ (`paste.httpexceptions <module-paste.httpexceptions.html>`_ replaces
+ this with exception's ``.wsgi_application`` method).
+
+* Moved ``paste.login`` to the attic, since `paste.auth
+ <module-paste.auth.html>`_ pretty much replaces it.
+
+* `paste.urlparser <module-paste.urlparser.html>`_ improvements:
+
+ - Added an application `urlparser.StaticURLParser
+ <class-paste.urlparser.StaticURLParser.html>`_ for serving static
+ files.
+
+ - Added an application `urlparser.PkgResourcesParser
+ <class-paste.urlparser.PkgResourcesParser.html>`_ for serving static
+ files found with ``pkg_resources`` (e.g., out of zipped Eggs).
+
+ - Be less picky about ambiguous filenames when using `URLParser
+ <class-paste.urlparser.URLParser.html>`_; if an exact file match
+ exists, use that. (``file.gif.bak`` would cause a request for
+ ``file.gif`` to be ambiguous before)
+
+ - Now looks for a ``.wsgi_application`` attribute when serving
+ Python files/modules, as a general hook for returning a WSGI
+ application version of an object.
+
+* The `ErrorMiddleware
+ <class-paste.exceptions.errormiddleware.ErrorMiddleware.html>`_:
+
+ - Returns trimmed-down exceptions if there is a ``_`` GET variable
+ in the request (which is meant to signal an XMLHttpRequest).
+ Exceptions displayed in this context are best when they are smaller
+ and easier to display.
+
+ - Includes a text version of the traceback, for easier
+ copy-and-paste.
+
+ - Avoid printing exceptions to ``wsgi.errors`` if they are already
+ displayed elsewhere (at least by default).
+
+ - Highlight Python code.
+
+* Use ``pkg_resources.declare_namespace`` so that there are less
+ problems about confusing the ``paste`` package that is provided by
+ Paste, Paste Script, Paste Deploy, and Paste WebKit. Before you
+ could get one of these at random if you didn't use
+ ``pkg_resources.require`` first.
+
+* Cleaned up use of ``exc_info`` argument in ``start_response`` calls
+ (both accepting and producing), in a variety of places.
diff --git a/docs/paste-httpserver-threadpool.txt b/docs/paste-httpserver-threadpool.txt
new file mode 100644
index 0000000..e255a61
--- /dev/null
+++ b/docs/paste-httpserver-threadpool.txt
@@ -0,0 +1,150 @@
+The Paste HTTP Server Thread Pool
+=================================
+
+This document describes how the thread pool in ``paste.httpserver``
+works, and how it can adapt to problems.
+
+Note all of the configuration parameters listed here are prefixed with
+``threadpool_`` when running through a Paste Deploy configuration.
+
+Error Cases
+-----------
+
+When a WSGI application is called, it's possible that it will block
+indefinitely. There's two basic ways you can manage threads:
+
+* Start a thread on every request, close it down when the thread stops
+
+* Start a pool of threads, and reuse those threads for subsequent
+ requests
+
+In both cases things go wrong -- if you start a thread every request
+you will have an explosion of threads, and with it memory and a loss
+of performance. This can culminate in really high loads, swapping,
+and the whole site grinds to a halt.
+
+If you are using a pool of threads, all the threads can simply be used
+up. New requests go into a queue to be processed, but since that
+queue never moves forward everyone will just block. The site
+basically freezes, though memory usage doesn't generally get worse.
+
+Paste Thread Pool
+-----------------
+
+The thread pool in Paste has some options to walk the razor's edge
+between the two techniques, and to try to respond usefully in most
+cases.
+
+The pool tracks all workers threads. Threads can be in a few states:
+
+* Idle, waiting for a request ("idle")
+
+* Working on a request
+
+ * For a reasonable amount of time ("busy")
+
+ * For an unreasonably long amount of time ("hung")
+
+* Thread that should die
+
+ * An exception has been injected that should kill the thread, but it
+ hasn't happened yet ("dying")
+
+ * An exception has been injected, but the thread has persisted for
+ an unreasonable amount of time ("zombie")
+
+When a request comes in, if there are no idle worker threads waiting
+then the server looks at the workers; all workers are busy or hung.
+If too many are hung, another thread is opened up. The limit is if
+there are less than ``spawn_if_under`` busy threads. So if you have
+10 workers, ``spawn_if_under`` is 5, and there are 6 hung threads and
+4 busy threads, another thread will be opened (bringing the number of
+busy threads back to 5). Later those threads may be collected again
+if some of the threads become un-hung. A thread is hung if it has
+been working for longer than ``hung_thread_limit`` (default 30
+seconds).
+
+Every so often, the server will check all the threads for error
+conditions. This happens every ``hung_check_period`` requests
+(default 100). At this time if there are more than enough threads
+(because of ``spawn_if_under``) some threads may be collected. If any
+threads have been working for longer than ``kill_thread_limit``
+(default 1800 seconds, i.e., 30 minutes) then the thread will be
+killed.
+
+To kill a thread the ``ctypes`` module must be installed. This will
+raise an exception (``SystemExit``) in the thread, which should cause
+the thread to stop. It can take quite a while for this to actually
+take effect, sometimes on the order of several minutes. This uses a
+non-public API (hence the ``ctypes`` requirement), and so it might not
+work in all cases. I've tried it in pure Python code and with a hung
+socket, and in both cases it worked. As soon as the thread is killed
+(before it is actually dead) another worker is added to the pool.
+
+If the killed thread lives longer than ``dying_thread_limit`` (default
+300 seconds, 5 minutes) then it is considered a zombie.
+
+Zombie threads are not handled specially unless you set
+``max_zombies_before_die``. If you set this and there are more than
+this many zombie threads, then the entire process will be killed.
+This is useful if you are running the server under some process
+monitor, such as ``start-stop-daemon``, ``daemontools``, ``runit``, or
+with ``paster serve --monitor``. To make the process die, it may run
+``os._exit``, which is considered an impolite way to exit a process
+(akin to ``kill -9``). It *will* try to run the functions registered
+with ``atexit`` (except for the thread cleanup functions, which are
+the ones which will block so long as there are living threads).
+
+Notification
+------------
+
+If you set ``error_email`` (including setting it globally in a Paste
+Deploy ``[DEFAULT]`` section) then you will be notified of two error
+conditions: when hung threads are killed, and when the process is
+killed due to too many zombie threads.
+
+Missed Cases
+------------
+
+If you have a worker pool size of 10, and 11 slow or hung requests
+come in, the first 10 will get handed off but the server won't know
+yet that they will hang. The last request will stay stuck in a queue
+until another request comes in. When a later request comes later
+(after ``hung_thread_limit`` seconds) the server will notice the
+problem and add more threads, and the 11th request will come through.
+
+If a trickle of bad requests keeps coming in, the number of hung
+threads will keep increasing. At 100 the ``hung_check_period`` may
+not clean them up fast enough.
+
+Killing threads is not something Python really supports. Corruption
+of the process, memory leaks, or who knows what might occur. For the
+most part the threads seem to be killed in a fairly simple manner --
+an exception is raised, and ``finally`` blocks do get executed. But
+this hasn't been tried much in production, so there's not much
+experience with it.
+
+watch_threads
+-------------
+
+If you want to see what's going on in your process, you can install
+the application ``egg:Paste#watch_threads`` (in the
+``paste.debug.watchthreads`` module). This lets you see requests and
+how long they have been running. In Python 2.5 you can see tracebacks
+of the running requests; before that you can only see request data
+(URLs, User-Agent, etc). If you set ``allow_kill = true`` then you
+can also kill threads from the application. The thread pool is
+intended to run reliably without intervention, but this can help debug
+problems or give you some feeling of what causes problems in the site.
+
+This does open up privacy problems, as it gives you access to all the
+request data in the site, including cookies, IP addresses, etc. It
+shouldn't be left on in a public setting.
+
+socket_timeout
+--------------
+
+The HTTP server (not the thread pool) also accepts an argument
+``socket_timeout``. It is turned off by default. You might find it
+helpful to turn it on.
+
diff --git a/docs/test_server.ini b/docs/test_server.ini
new file mode 100644
index 0000000..7b9c096
--- /dev/null
+++ b/docs/test_server.ini
@@ -0,0 +1,42 @@
+[DEFAULT]
+error_email = ianb@colorstudy.com
+
+[app:main]
+use = egg:PasteScript#test
+
+[server:main]
+use = egg:Paste#http
+host = 127.0.0.1:8081
+# These options make it easier to trigger the thread pool catches
+# (i.e., threads are hung fast, killed fast, spawn fast, and the
+# whole process dies quickly due to zombies)
+threadpool_workers = 3
+threadpool_hung_thread_limit = 10
+threadpool_kill_thread_limit = 20
+threadpool_spawn_if_under = 2
+threadpool_max_zombie_threads_before_die = 2
+threadpool_hung_check_period = 1
+threadpool_dying_limit = 10
+
+[server:cherrypy]
+use = egg:PasteScript#cherrypy
+host = 127.0.0.1:8080
+
+[filter-app:watch_threads]
+use = egg:Paste#error_catcher
+debug = true
+next = watch_threads_inner
+
+[app:watch_threads_inner]
+use = egg:Paste#urlmap
+/bad = bad_app
+/ = watch_app
+
+[app:watch_app]
+use = egg:Paste#watch_threads
+allow_kill = true
+
+[app:bad_app]
+paste.app_factory = paste.debug.watchthreads:make_bad_app
+
+
diff --git a/docs/testing-applications.txt b/docs/testing-applications.txt
new file mode 100644
index 0000000..76ca495
--- /dev/null
+++ b/docs/testing-applications.txt
@@ -0,0 +1,156 @@
+Testing Applications with Paste
++++++++++++++++++++++++++++++++
+
+:author: Ian Bicking <ianb@colorstudy.com>
+:revision: $Rev$
+:date: $LastChangedDate$
+
+.. contents::
+
+Introduction
+============
+
+Paste includes functionality for testing your application in a
+convenient manner. These facilities are quite young, and feedback is
+invited. Feedback and discussion should take place on the
+`Paste-users list
+<http://groups.google.com/group/paste-users>`_.
+
+These facilities let you test your Paste and WSGI-based applications
+easily and without a server.
+
+.. include:: include/contact.txt
+
+The Tests Themselves
+====================
+
+The ``app`` object is a wrapper around your application, with many
+methods to make testing convenient. Here's an example test script::
+
+ def test_myapp():
+ res = app.get('/view', params={'id': 10})
+ # We just got /view?id=10
+ res.mustcontain('Item 10')
+ res = app.post('/view', params={'id': 10, 'name': 'New item
+ name'})
+ # The app does POST-and-redirect...
+ res = res.follow()
+ assert res.request.url == '/view?id=10'
+ res.mustcontain('New item name')
+ res.mustcontain('Item updated')
+
+The methods of the ``app`` object (a ``paste.tests.fixture.TestApp``
+object):
+
+``get(url, params={}, headers={}, status=None)``:
+ Gets the URL. URLs are based in the root of your application; no
+ domains are allowed. Parameters can be given as a dictionary, or
+ included directly in the ``url``. Headers can also be added.
+
+ This tests that the status is a ``200 OK`` or a redirect header,
+ unless you pass in a ``status``. A status of ``"*"`` will never
+ fail; or you can assert a specific status (like ``500``).
+
+ Also, if any errors are written to the error stream this will
+ raise an error.
+
+``post(url, params={}, headers={}, status=None, upload_files=())``:
+ POSTS to the URL. Like GET, except also allows for uploading
+ files. The uploaded files are a list of ``(field_name, filename,
+ file_content)``.
+
+ If you don't want to do a urlencoded post body, you can put a
+ ``content-type`` header in your header, and pass the body in as a
+ string with ``params``.
+
+The response object:
+
+``header(header_name, [default])``:
+ Returns the named header. It's an error if there is more than one
+ matching header. If you don't provide a default, it is an error
+ if there is no matching header.
+
+``all_headers(header_name):``
+ Returns a list of all matching headers.
+
+``follow(**kw)``:
+ Follows the redirect, returning the new response. It is an error
+ if this response wasn't a redirect. Any keyword arguments are
+ passed to ``app.get`` (e.g., ``status``).
+
+``x in res``:
+ Returns True if the string is found in the response. Whitespace
+ is normalized for this test.
+
+``mustcontain(*strings)``:
+ Raises an error if any of the strings are not found in the
+ response.
+
+``showbrowser()``:
+ Opens the HTML response in a browser; useful for debugging.
+
+``str(res)``:
+ Gives a slightly-compacted version of the response.
+
+``click(description=None, linkid=None, href=None, anchor=None, index=None, verbose=False)``:
+ Clicks the described link (`see docstring for more
+ <./class-paste.fixture.TestResponse.html#click>`_)
+
+``forms``:
+ Return a dictionary of forms; you can use both indexes (refer to
+ the forms in order) or the string ids of forms (if you've given
+ them ids) to identify the form. See `Form Submissions <#form-submissions>`_ for
+ more on the form objects.
+
+Request objects:
+
+``url``:
+ The url requested.
+
+``environ``:
+ The environment used for the request.
+
+``full_url``:
+ The url with query string.
+
+Form Submissions
+================
+
+You can fill out and submit forms from your tests. First you get the
+form::
+
+ res = testapp.get('/entry_form')
+ form = res.forms[0]
+
+Then you fill it in fields::
+
+ # when there's one unambiguous name field:
+ form['name'] = 'Bob'
+ # Enter something into the first field named 'age'
+ form.set('age', '45', index=1)
+
+Finally you submit::
+
+ # Submit with no particular submit button pressed:
+ form.submit()
+ # Or submit a button:
+ form.submit('submit_button_name')
+
+Framework Hooks
+===============
+
+Frameworks can detect that they are in a testing environment by the
+presence (and truth) of the WSGI environmental variable
+``"paste.testing"``.
+
+More generally, frameworks can detect that something (possibly a test
+fixture) is ready to catch unexpected errors by the presence and truth
+of ``"paste.throw_errors"`` (this is sometimes set outside of testing
+fixtures too, when an error-handling middleware is in place).
+
+Frameworks that want to expose the inner structure of the request may
+use ``"paste.testing_variables"``. This will be a dictionary -- any
+values put into that dictionary will become attributes of the response
+object. So if you do ``env["paste.testing_variables"]['template'] =
+template_name`` in your framework, then ``response.template`` will be
+``template_name``.
diff --git a/docs/url-parsing-with-wsgi.txt b/docs/url-parsing-with-wsgi.txt
new file mode 100644
index 0000000..856971f
--- /dev/null
+++ b/docs/url-parsing-with-wsgi.txt
@@ -0,0 +1,304 @@
+URL Parsing With WSGI And Paste
++++++++++++++++++++++++++++++++
+
+:author: Ian Bicking <ianb@colorstudy.com>
+:revision: $Rev$
+:date: $LastChangedDate$
+
+.. contents::
+
+Introduction and Audience
+=========================
+
+This document is intended for web framework authors and integrators,
+and people who want to understand the internal architecture of Paste.
+
+.. include:: include/contact.txt
+
+URL Parsing
+===========
+
+.. note::
+
+ Sometimes people use "URL", and sometimes "URI". I think URLs are
+ a subset of URIs. But in practice you'll almost never see URIs
+ that aren't URLs, and certainly not in Paste. URIs that aren't
+ URLs are abstract Identifiers, that cannot necessarily be used to
+ Locate the resource. This document is *all* about locating.
+
+Most generally, URL parsing is about taking a URL and determining what
+"resource" the URL refers to. "Resource" is a rather vague term,
+intentionally. It's really just a metaphor -- in reality there aren't
+any "resources" in HTTP; there are only requests and responses.
+
+In Paste, everything is about WSGI. But that can seem too fancy.
+There are four core things involved: the *request* (personified in the
+WSGI environment), the *response* (personified inthe
+``start_response`` callback and the return iterator), the WSGI
+application, and the server that calls that application. The
+application and request are objects, while the server and response are
+really more like actions than concrete objects.
+
+In this context, URL parsing is about mapping a URL to an
+*application* and a *request*. The request actually gets modified as
+it moves through different parts of the system. Two dictionary keys
+in particular relate to URLs -- ``SCRIPT_NAME`` and ``PATH_INFO`` --
+but any part of the environment can be modified as it is passed
+through the system.
+
+Dispatching
+===========
+
+.. note::
+
+ WSGI isn't object oriented? Well, if you look at it, you'll notice
+ there's no objects except built-in types, so it shouldn't be a
+ surprise. Additionally, the interface and promises of the objects
+ we do see are very minimal. An application doesn't have any
+ interface except one method -- ``__call__`` -- and that method
+ *does* things, it doesn't give any other information.
+
+Because WSGI is action-oriented, rather than object-oriented, it's
+more important what we *do*. "Finding" an application is probably an
+intermediate step, but "running" the application is our ultimate goal,
+and the only real judge of success. An application that isn't run is
+useless to us, because it doesn't have any other useful methods.
+
+So what we're really doing is *dispatching* -- we're handing the
+request and responsibility for the response off to another object
+(another actor, really). In the process we can actually retain some
+control -- we can capture and transform the response, and we can
+modify the request -- but that's not what the typical URL resolver will
+do.
+
+Motivations
+===========
+
+The most obvious kind of URL parsing is finding a WSGI application.
+
+Typically when a framework first supports WSGI or is integrated into
+Paste, it is "monolithic" with respect to URLs. That is, you define
+(in Paste, or maybe in Apache) a "root" URL, and everything under that
+goes into the framework. What the framework does internally, Paste
+does not know -- it probably finds internal objects to dispatch to,
+but the framework is opaque to Paste. Not just to Paste, but to
+any code that isn't in that framework.
+
+That means that we can't mix code from multiple frameworks, or as
+easily share services, or use WSGI middleware that doesn't apply to
+the entire framework/application.
+
+An example of someplace we might want to use an "application" that
+isn't part of the framework would be uploading large files. It's
+possible to keep track of upload progress, and report that back to the
+user, but no framework typically is capable of this. This is usually
+because the POST request is completely read and parsed before it
+invokes any application code.
+
+This is resolvable in WSGI -- a WSGI application can provide its own
+code to read and parse the POST request, and simultaneously report
+progress (usually in a way that *another* WSGI application/request can
+read and report to the user on that progress). This is an example
+where you want to allow "foreign" applications to be intermingled with
+framework application code.
+
+Finding Applications
+====================
+
+OK, enough theory. How does a URL parser work? Well, it is a WSGI
+application, and a WSGI server, in the typical "WSGI middleware"
+style. Except that it determines which application it will serve
+for each request.
+
+Let's consider Paste's ``URLParser`` (in ``paste.urlparser``). This
+class takes a directory name as its only required argument, and
+instances are WSGI applications.
+
+When a request comes in, the parser looks at ``PATH_INFO`` to see
+what's left to parse. ``SCRIPT_NAME`` represents where we are *now*;
+it's the part of the URL that has been parsed.
+
+There's a couple special cases:
+
+The empty string:
+
+ URLParser serves directories. When ``PATH_INFO`` is empty, that
+ means we got a request with no trailing ``/``, like say ``/blog``
+ If URLParser serves the ``blog`` directory, then this won't do --
+ the user is requesting the ``blog`` *page*. We have to redirect
+ them to ``/blog/``.
+
+A single ``/``:
+
+ So, we got a trailing ``/``. This means we need to serve the
+ "index" page. In URLParser, this is some file named ``index``,
+ though that's really an implementation detail. You could create
+ an index dynamically (like Apache's file listings), or whatever.
+
+Otherwise we get a string like ``/path...``. Note that ``PATH_INFO``
+*must* start with a ``/``, or it must be empty.
+
+URLParser pulls off the first part of the path. E.g., if
+``PATH_INFO`` is ``/blog/edit/285``, then the first part is ``blog``.
+It appends this to ``SCRIPT_NAME``, and strips it off ``PATH_INFO``
+(which becomes ``/edit/285``).
+
+It then searches for a file that matches "blog". In URLParser, this
+means it looks for a filename which matches that name (ignoring the
+extension). It then uses the type of that file (determined by
+extension) to create a WSGI application.
+
+One case is that the file is a directory. In that case, the
+application is *another* URLParser instance, this time with the new
+directory.
+
+URLParser actually allows per-extension "plugins" -- these are just
+functions that get a filename, and produce a WSGI application. One of
+these is ``make_py`` -- this function imports the module, and looks
+for special symbols; if it finds a symbol ``application``, it assumes
+this is a WSGI application that is ready to accept the request. If it
+finds a symbol that matches the name of the module (e.g., ``edit``),
+then it assumes that is an application *factory*, meaning that when
+you call it with no arguments you get a WSGI application.
+
+Another function takes "unknown" files (files for which no better
+constructor exists) and creates an application that simply responds
+with the contents of that file (and the appropriate ``Content-Type``).
+
+In any case, ``URLParser`` delegates as soon as it can. It doesn't
+parse the entire path -- it just finds the *next* application, which
+in turn may delegate to yet another application.
+
+Here's a very simple implementation of URLParser::
+
+ class URLParser(object):
+ def __init__(self, dir):
+ self.dir = dir
+ def __call__(self, environ, start_response):
+ segment = wsgilib.path_info_pop(environ)
+ if segment is None: # No trailing /
+ # do a redirect...
+ for filename in os.listdir(self.dir):
+ if os.path.splitext(filename)[0] == segment:
+ return self.serve_application(
+ environ, start_response, filename)
+ # do a 404 Not Found
+ def serve_application(self, environ, start_response, filename):
+ basename, ext = os.path.splitext(filename)
+ filename = os.path.join(self.dir, filename)
+ if os.path.isdir(filename):
+ return URLParser(filename)(environ, start_response)
+ elif ext == '.py':
+ module = import_module(filename)
+ if hasattr(module, 'application'):
+ return module.application(environ, start_response)
+ elif hasattr(module, basename):
+ return getattr(module, basename)(
+ environ, start_response)
+ else:
+ return wsgilib.send_file(filename)
+
+Modifying The Request
+=====================
+
+Well, URLParser is one kind of parser. But others are possible, and
+aren't too hard to write.
+
+Lets imagine a URL like ``/2004/05/01/edit``. It's likely that
+``/2004/05/01`` doesn't point to anything on file, but is really more
+of a "variable" that gets passed to ``edit``. So we can pull them off
+and put them somewhere. This is a good place for a WSGI extension.
+Lets put them in ``environ["app.url_date"]``.
+
+We'll pass one other applications in -- once we get the date (if any)
+we need to pass the request onto an application that can actually
+handle it. This "application" might be a URLParser or similar system
+(that figures out what ``/edit`` means).
+
+::
+
+ class GrabDate(object):
+ def __init__(self, subapp):
+ self.subapp = subapp
+ def __call__(self, environ, start_response):
+ date_parts = []
+ while len(date_parts) < 3:
+ first, rest = wsgilib.path_info_split(environ['PATH_INFO'])
+ try:
+ date_parts.append(int(first))
+ wsgilib.path_info_pop(environ)
+ except (ValueError, TypeError):
+ break
+ environ['app.date_parts'] = date_parts
+ return self.subapp(environ, start_response)
+
+This is really like traditional "middleware", in that it sits between
+the server and just one application.
+
+Assuming you put this class in the ``myapp.grabdate`` module, you
+could install it by adding this to your configuration::
+
+ middleware.append('myapp.grabdate.GrabDate')
+
+Object Publishing
+=================
+
+Besides looking in the filesystem, "object publishing" is another
+popular way to do URL parsing. This is pretty easy to implement as
+well -- it usually just means use ``getattr`` with the popped
+segments. But we'll implement a rough approximation of `Quixote's
+<http://www.mems-exchange.org/software/quixote/>`_ URL parsing::
+
+ class ObjectApp(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __call__(self, environ, start_response):
+ next = wsgilib.path_info_pop(environ)
+ if next is None:
+ # This is the object, lets serve it...
+ return self.publish(obj, environ, start_response)
+ next = next or '_q_index' # the default index method
+ if next in obj._q_export and getattr(obj, next, None):
+ return ObjectApp(getattr(obj, next))(
+ environ, start_reponse)
+ next_obj = obj._q_traverse(next)
+ if not next_obj:
+ # Do a 404
+ return ObjectApp(next_obj)(environ, start_response)
+
+ def publish(self, obj, environ, start_response):
+ if callable(obj):
+ output = str(obj())
+ else:
+ output = str(obj)
+ start_response('200 OK', [('Content-type', 'text/html')])
+ return [output]
+
+The ``publish`` object is a little weak, and functions like
+``_q_traverse`` aren't passed interesting information about the
+request, but this is only a rough approximation of the framework.
+Things to note:
+
+* The object has standard attributes and methods -- ``_q_exports``
+ (attributes that are public to the web) and ``_q_traverse``
+ (a way of overriding the traversal without having an attribute for
+ each possible path segment).
+
+* The object isn't rendered until the path is completely consumed
+ (when ``next`` is ``None``). This means ``_q_traverse`` has to
+ consume extra segments of the path. In this version ``_q_traverse``
+ is only given the next piece of the path; Quixote gives it the
+ entire path (as a list of segments).
+
+* ``publish`` is really a small and lame way to turn a Quixote object
+ into a WSGI application. For any serious framework you'd want to do
+ a better job than what I do here.
+
+* It would be even better if you used something like `Adaptation
+ <http://www.python.org/peps/pep-0246.html>`_ to convert objects into
+ applications. This would include removing the explicit creation of
+ new ``ObjectApp`` instances, which could also be a kind of fall-back
+ adaptation.
+
+Anyway, this example is less complete, but maybe it will get you
+thinking.
diff --git a/docs/web/default-site.css b/docs/web/default-site.css
new file mode 100644
index 0000000..a7819c8
--- /dev/null
+++ b/docs/web/default-site.css
@@ -0,0 +1,382 @@
+/*
+:Author: David Goodger, Ian Bicking
+:Contact: ianb@colorstudy.com
+:date: $Date: 2003/11/01 20:35:45 $
+:version: $Revision: 1.3 $
+:copyright: This stylesheet has been placed in the public domain.
+
+A modification of the default cascading style sheet (v.1.3) for the
+HTML output of Docutils.
+*/
+
+em, i {
+ /* Typically serif fonts have much nicer italics */
+ font-family: Times New Roman, Times, serif;
+}
+
+ul li {
+ list-style-type: circle;
+}
+
+a.toc-backref {
+ text-decoration: none;
+ color: black;
+}
+
+a.toc-backref:hover {
+ background-color: inherit;
+}
+
+h1 a:hover, h2 a:hover, h3 a:hover, h4 a:hover, h5 a:hover, h6:hover {
+ background-color: inherit;
+}
+
+cite {
+ font-style: normal;
+ font-family: monospace;
+ font-weight: bold;
+}
+
+dd {
+ margin-bottom: 0.5em;
+}
+
+div.abstract {
+ margin: 2em 5em;
+}
+
+div.abstract p.topic-title {
+ font-weight: bold;
+ text-align: center;
+}
+
+div.attention, div.caution, div.danger, div.error, div.hint,
+div.important, div.note, div.tip, div.warning {
+ background-color: #ccc;
+ width: 40%;
+ border: medium outset;
+ padding: 3px;
+ float: right
+}
+
+div.attention p.admonition-title, div.caution p.admonition-title,
+div.danger p.admonition-title, div.error p.admonition-title,
+div.warning p.admonition-title {
+ color: #c00;
+ font-weight: bold;
+ font-family: sans-serif;
+ text-align: center;
+ background-color: #999;
+ display: block;
+ margin: 0;
+}
+
+div.hint p.admonition-title, div.important p.admonition-title,
+div.note p.admonition-title, div.tip p.admonition-title {
+ font-weight: bold;
+ font-family: sans-serif;
+ text-align: center;
+ background-color: #999;
+ display: block;
+ margin: 0;
+}
+
+div.dedication {
+ margin: 2em 5em;
+ text-align: center;
+ font-style: italic;
+}
+
+div.dedication p.topic-title {
+ font-weight: bold;
+ font-style: normal;
+}
+
+div.figure {
+ margin-left: 2em;
+}
+
+div.footer, div.header {
+ font-size: smaller;
+}
+
+div.system-messages {
+ margin: 5em;
+}
+
+div.system-messages h1 {
+ color: red;
+}
+
+div.system-message {
+ border: medium outset;
+ padding: 1em;
+}
+
+div.system-message p.system-message-title {
+ color: red;
+ font-weight: bold;
+}
+
+div.topic {
+ margin: 2em;
+}
+
+h3 a.toc-backref, h4 a.toc-backref, h5 a.toc-backref,
+h6 a.toc-backref {
+ color: #000;
+}
+
+h1.title {
+ text-align: center;
+}
+
+h2.subtitle {
+ text-align: center;
+}
+
+hr {
+ width: 75%;
+}
+
+ol.simple, ul.simple {
+ margin-bottom: 1em;
+}
+
+ul.contents li {
+ list-style: none;
+ margin: 0;
+ padding: 0;
+}
+
+ul.contents {
+ position: fixed;
+ top: 0px;
+ right: 0px;
+ background-color: #fd9;
+ border-left: 1px solid #f70;
+ border-bottom: 1px solid #f70;
+ width: 200px;
+ padding: 0;
+ margin: 0;
+}
+
+ul.contents a {
+ padding: 2px 1em 2px 1em;
+ display: block;
+ text-decoration: none;
+ color: #400;
+}
+
+ul.contents a:hover {
+ background-color: #f80;
+ color: #fff;
+}
+
+ul.contents li.header {
+ padding: 5px 1px 3px 0;
+ font-weight: bold;
+}
+
+ol.arabic {
+ list-style: decimal;
+}
+
+ol.loweralpha {
+ list-style: lower-alpha;
+}
+
+ol.upperalpha {
+ list-style: upper-alpha;
+}
+
+ol.lowerroman {
+ list-style: lower-roman;
+}
+
+ol.upperroman {
+ list-style: upper-roman;
+}
+
+p.caption {
+ font-style: italic;
+}
+
+p.credits {
+ font-style: italic;
+ font-size: smaller;
+}
+
+p.first {
+ margin-top: 0;
+}
+
+p.label {
+ white-space: nowrap;
+}
+
+p.topic-title {
+ font-weight: bold;
+}
+
+pre.address {
+ margin-bottom: 0;
+ margin-top: 0;
+ font-family: serif;
+ font-size: 100%;
+}
+
+pre.line-block {
+ font-family: serif;
+ font-size: 100%;
+}
+
+pre.literal-block, pre.doctest-block {
+ margin-left: 2em;
+ margin-right: 2em;
+ background-color: #eee;
+ border: thin black solid;
+ padding: 5px;
+}
+
+span.classifier {
+ font-family: sans-serif;
+ font-style: oblique;
+}
+
+span.classifier-delimiter {
+ font-family: sans-serif;
+ font-weight: bold;
+}
+
+span.interpreted {
+ font-family: sans-serif;
+}
+
+span.option-argument {
+ font-style: italic;
+}
+
+span.pre {
+ white-space: pre;
+}
+
+span.problematic {
+ color: red;
+}
+
+table {
+ margin-top: 0.5em;
+ margin-bottom: 0.5em;
+}
+
+table.citation {
+ border-left: solid thin gray;
+ padding-left: 0.5ex
+}
+
+table.docinfo {
+}
+
+table.footnote {
+ border-left: solid thin black;
+ padding-left: 0.5ex;
+}
+
+td, th {
+ padding-left: 0.5em;
+ padding-right: 0.5em;
+ vertical-align: top;
+}
+
+td > p:first-child, th > p:first-child {
+ margin-top: 0em;
+}
+
+th.docinfo-name, th.field-name {
+ font-weight: bold;
+ text-align: left;
+ white-space: nowrap;
+}
+
+h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt {
+ font-size: 100%;
+}
+
+code, tt {
+ color: #006;
+}
+
+ul.auto-toc {
+ list-style-type: none;
+}
+
+/*****************************************
+ * Doctest embedded examples
+ *****************************************/
+
+span.doctest-url {
+ background-color: #eee;
+ border-top: 2px outset #666;
+ border-left: 2px outset #666;
+ border-right: 2px outset #666;
+ padding: 0.25em;
+}
+
+div.doctest-example {
+ border: outset 5px #666;
+ background-color: #eee;
+ font-family: default;
+ padding: 0.5em;
+}
+
+div.doctest-example h1 {
+ background-color: inherit;
+ border: none;
+ color: inherit;
+ font-family: default;
+}
+
+div.doctest-example tt {
+ color: inherit;
+}
+
+div.doctest-status {
+ background-color: #060;
+ color: #fff;
+}
+
+span.doctest-header {
+ background-color: #ccc;
+ font-family: monospace;
+}
+
+pre.doctest-errors {
+ border: none;
+ background-color: #333;
+ color: #600;
+}
+
+div.source-code {
+ background-color: #000;
+ border: inset #999 3px;
+ overflow: auto;
+}
+
+pre.source-code {
+ background-color: #000;
+ border: inset #999 3px;
+ overflow: auto;
+ font-family: monospace;
+ color: #fff;
+}
+
+span.source-filename {
+ background-color: #000;
+ border-top: 2px outset #999;
+ border-left: 2px outset #999;
+ border-right: 2px outset #999;
+ padding: 0.25em;
+ color: #fff
+}
+
diff --git a/docs/web/site.js b/docs/web/site.js
new file mode 100644
index 0000000..ed23575
--- /dev/null
+++ b/docs/web/site.js
@@ -0,0 +1,69 @@
+function setup_dropdowns() {
+ var els = document.getElementsByTagName('UL');
+ for (var i = 0; i < els.length; i++) {
+ var el = els[i];
+ if (el.className.search(/\bcontents\b/) > -1) {
+ enable_dropdown(el);
+ }
+ }
+}
+
+function enable_dropdown(el) {
+ var title = el.getElementsByTagName('LI')[0];
+ var plus_minus = document.createTextNode(' [-]');
+ if (title.childNodes[0].tagName != 'A') {
+ anchor = document.createElement('A');
+ while (title.childNodes.length) {
+ anchor.appendChild(title.childNodes[0]);
+ }
+ anchor.setAttribute('href', '#');
+ anchor.style.padding = '1px';
+ title.appendChild(anchor);
+ } else {
+ anchor = title.childNodes[0];
+ }
+ anchor.appendChild(plus_minus);
+ function show_hide() {
+ if (el.sub_hidden) {
+ set_sub_li(el, '');
+ anchor.removeChild(plus_minus);
+ plus_minus = document.createTextNode(' [-]');
+ anchor.appendChild(plus_minus);
+ } else {
+ set_sub_li(el, 'none');
+ anchor.removeChild(plus_minus);
+ plus_minus = document.createTextNode(' [+]');
+ anchor.appendChild(plus_minus);
+ }
+ el.sub_hidden = ! el.sub_hidden;
+ return false;
+ }
+ anchor.onclick = show_hide;
+ show_hide();
+}
+
+function set_sub_li(list, display) {
+ var sub = list.getElementsByTagName('LI');
+ for (var i = 1; i < sub.length; i++) {
+ sub[i].style.display = display;
+ }
+}
+
+function add_onload(func) {
+ if (window.onload) {
+ var old_onload = window.onload;
+ function new_onload() {
+ old_onload();
+ func();
+ }
+ window.onload = new_onload;
+ } else {
+ window.onload = func;
+ }
+}
+
+add_onload(setup_dropdowns);
+
+
+
+
diff --git a/docs/web/style.css b/docs/web/style.css
new file mode 100644
index 0000000..c358643
--- /dev/null
+++ b/docs/web/style.css
@@ -0,0 +1,90 @@
+body {
+ font-family: Helvetica,Arial,sans-serif;
+ margin: 0;
+ background-color: #fff;
+ color: #000;
+}
+
+i, em {
+ font-family: Times New Roman,Times,serif;
+}
+
+a:link {
+ color: #730;
+}
+
+a:visited {
+ color: #402;
+}
+
+a:hover {
+ background-color: #fd8;
+}
+
+div#header {
+ display: block;
+ background-color: #930;
+ color: #fd6;
+ border-bottom: 3px solid #f70;
+ padding: 3px;
+ font-size: 30px;
+}
+
+div#header h1 {
+ padding: 0;
+ margin: 0;
+ font-size: 1.5em;
+}
+
+div#nav {
+ float: left;
+ background-color: #fd9;
+ border: 1px solid #f70;
+ margin-right: 1em;
+ border-bottom: 1px solid #f70;
+ width: 200px;
+}
+
+div#nav ul {
+ padding: 0;
+ margin: 0;
+}
+
+div#nav li {
+ list-style: none;
+ margin: 0;
+}
+
+div#nav ul li ul li a {
+ padding: 2px 2em 2px 2em;
+ font-weight: normal;
+}
+
+div#nav a {
+ display: block;
+ padding: 2px 1em 2px 1em;
+ text-decoration: none;
+ color: #400;
+ font-weight: bold;
+}
+
+div#nav a:hover {
+ background-color: #f80;
+ color: #fff;
+}
+
+/* If I ever do menus that show the "current" page, that would be
+ by marking some "links" as selected */
+div#nav span.selected {
+ display: block;
+ font-weight: bold;
+ padding: 2px 1em 2px 1em;
+}
+
+div#body {
+ padding: 1em;
+}
+
+h1.page-title {
+ margin-top: 0;
+} \ No newline at end of file
diff --git a/paste/__init__.py b/paste/__init__.py
new file mode 100644
index 0000000..4e2d638
--- /dev/null
+++ b/paste/__init__.py
@@ -0,0 +1,17 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+try:
+ import pkg_resources
+ pkg_resources.declare_namespace(__name__)
+except ImportError:
+ # don't prevent use of paste if pkg_resources isn't installed
+ from pkgutil import extend_path
+ __path__ = extend_path(__path__, __name__)
+
+try:
+ import modulefinder
+except ImportError:
+ pass
+else:
+ for p in __path__:
+ modulefinder.AddPackagePath(__name__, p)
diff --git a/paste/auth/__init__.py b/paste/auth/__init__.py
new file mode 100644
index 0000000..186e2ef
--- /dev/null
+++ b/paste/auth/__init__.py
@@ -0,0 +1,9 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Package for authentication/identification of requests.
+
+The objective of this package is to provide single-focused middleware
+components that implement a particular specification. Integration of
+the components into a usable system is up to a higher-level framework.
+"""
diff --git a/paste/auth/auth_tkt.py b/paste/auth/auth_tkt.py
new file mode 100644
index 0000000..da8ddbd
--- /dev/null
+++ b/paste/auth/auth_tkt.py
@@ -0,0 +1,429 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+##########################################################################
+#
+# Copyright (c) 2005 Imaginary Landscape LLC and Contributors.
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+##########################################################################
+"""
+Implementation of cookie signing as done in `mod_auth_tkt
+<http://www.openfusion.com.au/labs/mod_auth_tkt/>`_.
+
+mod_auth_tkt is an Apache module that looks for these signed cookies
+and sets ``REMOTE_USER``, ``REMOTE_USER_TOKENS`` (a comma-separated
+list of groups) and ``REMOTE_USER_DATA`` (arbitrary string data).
+
+This module is an alternative to the ``paste.auth.cookie`` module;
+it's primary benefit is compatibility with mod_auth_tkt, which in turn
+makes it possible to use the same authentication process with
+non-Python code run under Apache.
+"""
+
+import time as time_mod
+try:
+ import hashlib
+except ImportError:
+ # mimic hashlib (will work for md5, fail for secure hashes)
+ import md5 as hashlib
+try:
+ from http.cookies import SimpleCookie
+except ImportError:
+ # Python 2
+ from Cookie import SimpleCookie
+from paste import request
+from urllib import quote as url_quote
+from urllib import unquote as url_unquote
+
+DEFAULT_DIGEST = hashlib.md5
+
+
+class AuthTicket(object):
+
+ """
+ This class represents an authentication token. You must pass in
+ the shared secret, the userid, and the IP address. Optionally you
+ can include tokens (a list of strings, representing role names),
+ 'user_data', which is arbitrary data available for your own use in
+ later scripts. Lastly, you can override the timestamp, cookie name,
+ whether to secure the cookie and the digest algorithm (for details
+ look at ``AuthTKTMiddleware``).
+
+ Once you provide all the arguments, use .cookie_value() to
+ generate the appropriate authentication ticket. .cookie()
+ generates a Cookie object, the str() of which is the complete
+ cookie header to be sent.
+
+ CGI usage::
+
+ token = auth_tkt.AuthTick('sharedsecret', 'username',
+ os.environ['REMOTE_ADDR'], tokens=['admin'])
+ print('Status: 200 OK')
+ print('Content-type: text/html')
+ print(token.cookie())
+ print("")
+ ... redirect HTML ...
+
+ Webware usage::
+
+ token = auth_tkt.AuthTick('sharedsecret', 'username',
+ self.request().environ()['REMOTE_ADDR'], tokens=['admin'])
+ self.response().setCookie('auth_tkt', token.cookie_value())
+
+ Be careful not to do an HTTP redirect after login; use meta
+ refresh or Javascript -- some browsers have bugs where cookies
+ aren't saved when set on a redirect.
+ """
+
+ def __init__(self, secret, userid, ip, tokens=(), user_data='',
+ time=None, cookie_name='auth_tkt',
+ secure=False, digest_algo=DEFAULT_DIGEST):
+ self.secret = secret
+ self.userid = userid
+ self.ip = ip
+ if not isinstance(tokens, basestring):
+ tokens = ','.join(tokens)
+ self.tokens = tokens
+ self.user_data = user_data
+ if time is None:
+ self.time = time_mod.time()
+ else:
+ self.time = time
+ self.cookie_name = cookie_name
+ self.secure = secure
+ if isinstance(digest_algo, str):
+ # correct specification of digest from hashlib or fail
+ self.digest_algo = getattr(hashlib, digest_algo)
+ else:
+ self.digest_algo = digest_algo
+
+ def digest(self):
+ return calculate_digest(
+ self.ip, self.time, self.secret, self.userid, self.tokens,
+ self.user_data, self.digest_algo)
+
+ def cookie_value(self):
+ v = '%s%08x%s!' % (self.digest(), int(self.time), url_quote(self.userid))
+ if self.tokens:
+ v += self.tokens + '!'
+ v += self.user_data
+ return v
+
+ def cookie(self):
+ c = SimpleCookie()
+ c[self.cookie_name] = self.cookie_value().encode('base64').strip().replace('\n', '')
+ c[self.cookie_name]['path'] = '/'
+ if self.secure:
+ c[self.cookie_name]['secure'] = 'true'
+ return c
+
+
+class BadTicket(Exception):
+ """
+ Exception raised when a ticket can't be parsed. If we get
+ far enough to determine what the expected digest should have
+ been, expected is set. This should not be shown by default,
+ but can be useful for debugging.
+ """
+ def __init__(self, msg, expected=None):
+ self.expected = expected
+ Exception.__init__(self, msg)
+
+
+def parse_ticket(secret, ticket, ip, digest_algo=DEFAULT_DIGEST):
+ """
+ Parse the ticket, returning (timestamp, userid, tokens, user_data).
+
+ If the ticket cannot be parsed, ``BadTicket`` will be raised with
+ an explanation.
+ """
+ if isinstance(digest_algo, str):
+ # correct specification of digest from hashlib or fail
+ digest_algo = getattr(hashlib, digest_algo)
+ digest_hexa_size = digest_algo().digest_size * 2
+ ticket = ticket.strip('"')
+ digest = ticket[:digest_hexa_size]
+ try:
+ timestamp = int(ticket[digest_hexa_size:digest_hexa_size + 8], 16)
+ except ValueError as e:
+ raise BadTicket('Timestamp is not a hex integer: %s' % e)
+ try:
+ userid, data = ticket[digest_hexa_size + 8:].split('!', 1)
+ except ValueError:
+ raise BadTicket('userid is not followed by !')
+ userid = url_unquote(userid)
+ if '!' in data:
+ tokens, user_data = data.split('!', 1)
+ else:
+ # @@: Is this the right order?
+ tokens = ''
+ user_data = data
+
+ expected = calculate_digest(ip, timestamp, secret,
+ userid, tokens, user_data,
+ digest_algo)
+
+ if expected != digest:
+ raise BadTicket('Digest signature is not correct',
+ expected=(expected, digest))
+
+ tokens = tokens.split(',')
+
+ return (timestamp, userid, tokens, user_data)
+
+
+# @@: Digest object constructor compatible with named ones in hashlib only
+def calculate_digest(ip, timestamp, secret, userid, tokens, user_data,
+ digest_algo):
+ secret = maybe_encode(secret)
+ userid = maybe_encode(userid)
+ tokens = maybe_encode(tokens)
+ user_data = maybe_encode(user_data)
+ digest0 = digest_algo(
+ encode_ip_timestamp(ip, timestamp) + secret + userid + '\0'
+ + tokens + '\0' + user_data).hexdigest()
+ digest = digest_algo(digest0 + secret).hexdigest()
+ return digest
+
+
+def encode_ip_timestamp(ip, timestamp):
+ ip_chars = ''.join(map(chr, map(int, ip.split('.'))))
+ t = int(timestamp)
+ ts = ((t & 0xff000000) >> 24,
+ (t & 0xff0000) >> 16,
+ (t & 0xff00) >> 8,
+ t & 0xff)
+ ts_chars = ''.join(map(chr, ts))
+ return ip_chars + ts_chars
+
+
+def maybe_encode(s, encoding='utf8'):
+ if isinstance(s, unicode):
+ s = s.encode(encoding)
+ return s
+
+
+class AuthTKTMiddleware(object):
+
+ """
+ Middleware that checks for signed cookies that match what
+ `mod_auth_tkt <http://www.openfusion.com.au/labs/mod_auth_tkt/>`_
+ looks for (if you have mod_auth_tkt installed, you don't need this
+ middleware, since Apache will set the environmental variables for
+ you).
+
+ Arguments:
+
+ ``secret``:
+ A secret that should be shared by any instances of this application.
+ If this app is served from more than one machine, they should all
+ have the same secret.
+
+ ``cookie_name``:
+ The name of the cookie to read and write from. Default ``auth_tkt``.
+
+ ``secure``:
+ If the cookie should be set as 'secure' (only sent over SSL) and if
+ the login must be over SSL. (Defaults to False)
+
+ ``httponly``:
+ If the cookie should be marked as HttpOnly, which means that it's
+ not accessible to JavaScript. (Defaults to False)
+
+ ``include_ip``:
+ If the cookie should include the user's IP address. If so, then
+ if they change IPs their cookie will be invalid.
+
+ ``logout_path``:
+ The path under this middleware that should signify a logout. The
+ page will be shown as usual, but the user will also be logged out
+ when they visit this page.
+
+ ``digest_algo``:
+ Digest algorithm specified as a name of the algorithm provided by
+ ``hashlib`` or as a compatible digest object constructor.
+ Defaults to ``md5``, as in mod_auth_tkt. The others currently
+ compatible with mod_auth_tkt are ``sha256`` and ``sha512``.
+
+ If used with mod_auth_tkt, then these settings (except logout_path) should
+ match the analogous Apache configuration settings.
+
+ This also adds two functions to the request:
+
+ ``environ['paste.auth_tkt.set_user'](userid, tokens='', user_data='')``
+
+ This sets a cookie that logs the user in. ``tokens`` is a
+ string (comma-separated groups) or a list of strings.
+ ``user_data`` is a string for your own use.
+
+ ``environ['paste.auth_tkt.logout_user']()``
+
+ Logs out the user.
+ """
+
+ def __init__(self, app, secret, cookie_name='auth_tkt', secure=False,
+ include_ip=True, logout_path=None, httponly=False,
+ no_domain_cookie=True, current_domain_cookie=True,
+ wildcard_cookie=True, digest_algo=DEFAULT_DIGEST):
+ self.app = app
+ self.secret = secret
+ self.cookie_name = cookie_name
+ self.secure = secure
+ self.httponly = httponly
+ self.include_ip = include_ip
+ self.logout_path = logout_path
+ self.no_domain_cookie = no_domain_cookie
+ self.current_domain_cookie = current_domain_cookie
+ self.wildcard_cookie = wildcard_cookie
+ if isinstance(digest_algo, str):
+ # correct specification of digest from hashlib or fail
+ self.digest_algo = getattr(hashlib, digest_algo)
+ else:
+ self.digest_algo = digest_algo
+
+ def __call__(self, environ, start_response):
+ cookies = request.get_cookies(environ)
+ if self.cookie_name in cookies:
+ cookie_value = cookies[self.cookie_name].value
+ else:
+ cookie_value = ''
+ if cookie_value:
+ if self.include_ip:
+ remote_addr = environ['REMOTE_ADDR']
+ else:
+ # mod_auth_tkt uses this dummy value when IP is not
+ # checked:
+ remote_addr = '0.0.0.0'
+ # @@: This should handle bad signatures better:
+ # Also, timeouts should cause cookie refresh
+ try:
+ timestamp, userid, tokens, user_data = parse_ticket(
+ self.secret, cookie_value, remote_addr, self.digest_algo)
+ tokens = ','.join(tokens)
+ environ['REMOTE_USER'] = userid
+ if environ.get('REMOTE_USER_TOKENS'):
+ # We want to add tokens/roles to what's there:
+ tokens = environ['REMOTE_USER_TOKENS'] + ',' + tokens
+ environ['REMOTE_USER_TOKENS'] = tokens
+ environ['REMOTE_USER_DATA'] = user_data
+ environ['AUTH_TYPE'] = 'cookie'
+ except BadTicket:
+ # bad credentials, just ignore without logging the user
+ # in or anything
+ pass
+ set_cookies = []
+
+ def set_user(userid, tokens='', user_data=''):
+ set_cookies.extend(self.set_user_cookie(
+ environ, userid, tokens, user_data))
+
+ def logout_user():
+ set_cookies.extend(self.logout_user_cookie(environ))
+
+ environ['paste.auth_tkt.set_user'] = set_user
+ environ['paste.auth_tkt.logout_user'] = logout_user
+ if self.logout_path and environ.get('PATH_INFO') == self.logout_path:
+ logout_user()
+
+ def cookie_setting_start_response(status, headers, exc_info=None):
+ headers.extend(set_cookies)
+ return start_response(status, headers, exc_info)
+
+ return self.app(environ, cookie_setting_start_response)
+
+ def set_user_cookie(self, environ, userid, tokens, user_data):
+ if not isinstance(tokens, basestring):
+ tokens = ','.join(tokens)
+ if self.include_ip:
+ remote_addr = environ['REMOTE_ADDR']
+ else:
+ remote_addr = '0.0.0.0'
+ ticket = AuthTicket(
+ self.secret,
+ userid,
+ remote_addr,
+ tokens=tokens,
+ user_data=user_data,
+ cookie_name=self.cookie_name,
+ secure=self.secure)
+ # @@: Should we set REMOTE_USER etc in the current
+ # environment right now as well?
+ cur_domain = environ.get('HTTP_HOST', environ.get('SERVER_NAME'))
+ wild_domain = '.' + cur_domain
+
+ cookie_options = ""
+ if self.secure:
+ cookie_options += "; secure"
+ if self.httponly:
+ cookie_options += "; HttpOnly"
+
+ cookies = []
+ if self.no_domain_cookie:
+ cookies.append(('Set-Cookie', '%s=%s; Path=/%s' % (
+ self.cookie_name, ticket.cookie_value(), cookie_options)))
+ if self.current_domain_cookie:
+ cookies.append(('Set-Cookie', '%s=%s; Path=/; Domain=%s%s' % (
+ self.cookie_name, ticket.cookie_value(), cur_domain,
+ cookie_options)))
+ if self.wildcard_cookie:
+ cookies.append(('Set-Cookie', '%s=%s; Path=/; Domain=%s%s' % (
+ self.cookie_name, ticket.cookie_value(), wild_domain,
+ cookie_options)))
+
+ return cookies
+
+ def logout_user_cookie(self, environ):
+ cur_domain = environ.get('HTTP_HOST', environ.get('SERVER_NAME'))
+ wild_domain = '.' + cur_domain
+ expires = 'Sat, 01-Jan-2000 12:00:00 GMT'
+ cookies = [
+ ('Set-Cookie', '%s=""; Expires="%s"; Path=/' % (self.cookie_name, expires)),
+ ('Set-Cookie', '%s=""; Expires="%s"; Path=/; Domain=%s' %
+ (self.cookie_name, expires, cur_domain)),
+ ('Set-Cookie', '%s=""; Expires="%s"; Path=/; Domain=%s' %
+ (self.cookie_name, expires, wild_domain)),
+ ]
+ return cookies
+
+
+def make_auth_tkt_middleware(
+ app,
+ global_conf,
+ secret=None,
+ cookie_name='auth_tkt',
+ secure=False,
+ include_ip=True,
+ logout_path=None):
+ """
+ Creates the `AuthTKTMiddleware
+ <class-paste.auth.auth_tkt.AuthTKTMiddleware.html>`_.
+
+ ``secret`` is requird, but can be set globally or locally.
+ """
+ from paste.deploy.converters import asbool
+ secure = asbool(secure)
+ include_ip = asbool(include_ip)
+ if secret is None:
+ secret = global_conf.get('secret')
+ if not secret:
+ raise ValueError(
+ "You must provide a 'secret' (in global or local configuration)")
+ return AuthTKTMiddleware(
+ app, secret, cookie_name, secure, include_ip, logout_path or None)
diff --git a/paste/auth/basic.py b/paste/auth/basic.py
new file mode 100644
index 0000000..24d1731
--- /dev/null
+++ b/paste/auth/basic.py
@@ -0,0 +1,122 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Basic HTTP/1.0 Authentication
+
+This module implements ``Basic`` authentication as described in
+HTTP/1.0 specification [1]_ . Do not use this module unless you
+are using SSL or need to work with very out-dated clients, instead
+use ``digest`` authentication.
+
+>>> from paste.wsgilib import dump_environ
+>>> from paste.httpserver import serve
+>>> # from paste.auth.basic import AuthBasicHandler
+>>> realm = 'Test Realm'
+>>> def authfunc(environ, username, password):
+... return username == password
+>>> serve(AuthBasicHandler(dump_environ, realm, authfunc))
+serving on...
+
+.. [1] http://www.w3.org/Protocols/HTTP/1.0/draft-ietf-http-spec.html#BasicAA
+"""
+from paste.httpexceptions import HTTPUnauthorized
+from paste.httpheaders import *
+
+class AuthBasicAuthenticator(object):
+ """
+ implements ``Basic`` authentication details
+ """
+ type = 'basic'
+ def __init__(self, realm, authfunc):
+ self.realm = realm
+ self.authfunc = authfunc
+
+ def build_authentication(self):
+ head = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % self.realm)
+ return HTTPUnauthorized(headers=head)
+
+ def authenticate(self, environ):
+ authorization = AUTHORIZATION(environ)
+ if not authorization:
+ return self.build_authentication()
+ (authmeth, auth) = authorization.split(' ', 1)
+ if 'basic' != authmeth.lower():
+ return self.build_authentication()
+ auth = auth.strip().decode('base64')
+ username, password = auth.split(':', 1)
+ if self.authfunc(environ, username, password):
+ return username
+ return self.build_authentication()
+
+ __call__ = authenticate
+
+class AuthBasicHandler(object):
+ """
+ HTTP/1.0 ``Basic`` authentication middleware
+
+ Parameters:
+
+ ``application``
+
+ The application object is called only upon successful
+ authentication, and can assume ``environ['REMOTE_USER']``
+ is set. If the ``REMOTE_USER`` is already set, this
+ middleware is simply pass-through.
+
+ ``realm``
+
+ This is a identifier for the authority that is requesting
+ authorization. It is shown to the user and should be unique
+ within the domain it is being used.
+
+ ``authfunc``
+
+ This is a mandatory user-defined function which takes a
+ ``environ``, ``username`` and ``password`` for its first
+ three arguments. It should return ``True`` if the user is
+ authenticated.
+
+ """
+ def __init__(self, application, realm, authfunc):
+ self.application = application
+ self.authenticate = AuthBasicAuthenticator(realm, authfunc)
+
+ def __call__(self, environ, start_response):
+ username = REMOTE_USER(environ)
+ if not username:
+ result = self.authenticate(environ)
+ if isinstance(result, str):
+ AUTH_TYPE.update(environ, 'basic')
+ REMOTE_USER.update(environ, result)
+ else:
+ return result.wsgi_application(environ, start_response)
+ return self.application(environ, start_response)
+
+middleware = AuthBasicHandler
+
+__all__ = ['AuthBasicHandler']
+
+def make_basic(app, global_conf, realm, authfunc, **kw):
+ """
+ Grant access via basic authentication
+
+ Config looks like this::
+
+ [filter:grant]
+ use = egg:Paste#auth_basic
+ realm=myrealm
+ authfunc=somepackage.somemodule:somefunction
+
+ """
+ from paste.util.import_string import eval_import
+ import types
+ authfunc = eval_import(authfunc)
+ assert isinstance(authfunc, types.FunctionType), "authfunc must resolve to a function"
+ return AuthBasicHandler(app, realm, authfunc)
+
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
diff --git a/paste/auth/cas.py b/paste/auth/cas.py
new file mode 100644
index 0000000..44e4e98
--- /dev/null
+++ b/paste/auth/cas.py
@@ -0,0 +1,99 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+CAS 1.0 Authentication
+
+The Central Authentication System is a straight-forward single sign-on
+mechanism developed by Yale University's ITS department. It has since
+enjoyed widespread success and is deployed at many major universities
+and some corporations.
+
+ https://clearinghouse.ja-sig.org/wiki/display/CAS/Home
+ http://www.yale.edu/tp/auth/usingcasatyale.html
+
+This implementation has the goal of maintaining current path arguments
+passed to the system so that it can be used as middleware at any stage
+of processing. It has the secondary goal of allowing for other
+authentication methods to be used concurrently.
+"""
+from six.moves.urllib.parse import urlencode
+from paste.request import construct_url
+from paste.httpexceptions import HTTPSeeOther, HTTPForbidden
+
+class CASLoginFailure(HTTPForbidden):
+ """ The exception raised if the authority returns 'no' """
+
+class CASAuthenticate(HTTPSeeOther):
+ """ The exception raised to authenticate the user """
+
+def AuthCASHandler(application, authority):
+ """
+ middleware to implement CAS 1.0 authentication
+
+ There are several possible outcomes:
+
+ 0. If the REMOTE_USER environment variable is already populated;
+ then this middleware is a no-op, and the request is passed along
+ to the application.
+
+ 1. If a query argument 'ticket' is found, then an attempt to
+ validate said ticket /w the authentication service done. If the
+ ticket is not validated; an 403 'Forbidden' exception is raised.
+ Otherwise, the REMOTE_USER variable is set with the NetID that
+ was validated and AUTH_TYPE is set to "cas".
+
+ 2. Otherwise, a 303 'See Other' is returned to the client directing
+ them to login using the CAS service. After logon, the service
+ will send them back to this same URL, only with a 'ticket' query
+ argument.
+
+ Parameters:
+
+ ``authority``
+
+ This is a fully-qualified URL to a CAS 1.0 service. The URL
+ should end with a '/' and have the 'login' and 'validate'
+ sub-paths as described in the CAS 1.0 documentation.
+
+ """
+ assert authority.endswith("/") and authority.startswith("http")
+ def cas_application(environ, start_response):
+ username = environ.get('REMOTE_USER','')
+ if username:
+ return application(environ, start_response)
+ qs = environ.get('QUERY_STRING','').split("&")
+ if qs and qs[-1].startswith("ticket="):
+ # assume a response from the authority
+ ticket = qs.pop().split("=", 1)[1]
+ environ['QUERY_STRING'] = "&".join(qs)
+ service = construct_url(environ)
+ args = urlencode(
+ {'service': service,'ticket': ticket})
+ requrl = authority + "validate?" + args
+ result = urlopen(requrl).read().split("\n")
+ if 'yes' == result[0]:
+ environ['REMOTE_USER'] = result[1]
+ environ['AUTH_TYPE'] = 'cas'
+ return application(environ, start_response)
+ exce = CASLoginFailure()
+ else:
+ service = construct_url(environ)
+ args = urlencode({'service': service})
+ location = authority + "login?" + args
+ exce = CASAuthenticate(location)
+ return exce.wsgi_application(environ, start_response)
+ return cas_application
+
+middleware = AuthCASHandler
+
+__all__ = ['CASLoginFailure', 'CASAuthenticate', 'AuthCASHandler' ]
+
+if '__main__' == __name__:
+ authority = "https://secure.its.yale.edu/cas/servlet/"
+ from paste.wsgilib import dump_environ
+ from paste.httpserver import serve
+ from paste.httpexceptions import *
+ serve(HTTPExceptionHandler(
+ AuthCASHandler(dump_environ, authority)))
diff --git a/paste/auth/cookie.py b/paste/auth/cookie.py
new file mode 100644
index 0000000..8f11d1b
--- /dev/null
+++ b/paste/auth/cookie.py
@@ -0,0 +1,405 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Cookie "Saved" Authentication
+
+This authentication middleware saves the current REMOTE_USER,
+REMOTE_SESSION, and any other environment variables specified in a
+cookie so that it can be retrieved during the next request without
+requiring re-authentication. This uses a session cookie on the client
+side (so it goes away when the user closes their window) and does
+server-side expiration.
+
+Following is a very simple example where a form is presented asking for
+a user name (no actual checking), and dummy session identifier (perhaps
+corresponding to a database session id) is stored in the cookie.
+
+::
+
+ >>> from paste.httpserver import serve
+ >>> from paste.fileapp import DataApp
+ >>> from paste.httpexceptions import *
+ >>> from paste.auth.cookie import AuthCookieHandler
+ >>> from paste.wsgilib import parse_querystring
+ >>> def testapp(environ, start_response):
+ ... user = dict(parse_querystring(environ)).get('user','')
+ ... if user:
+ ... environ['REMOTE_USER'] = user
+ ... environ['REMOTE_SESSION'] = 'a-session-id'
+ ... if environ.get('REMOTE_USER'):
+ ... page = '<html><body>Welcome %s (%s)</body></html>'
+ ... page %= (environ['REMOTE_USER'], environ['REMOTE_SESSION'])
+ ... else:
+ ... page = ('<html><body><form><input name="user" />'
+ ... '<input type="submit" /></form></body></html>')
+ ... return DataApp(page, content_type="text/html")(
+ ... environ, start_response)
+ >>> serve(AuthCookieHandler(testapp))
+ serving on...
+
+"""
+
+import hmac, base64, random, six, time, warnings
+try:
+ from hashlib import sha1
+except ImportError:
+ # NOTE: We have to use the callable with hashlib (hashlib.sha1),
+ # otherwise hmac only accepts the sha module object itself
+ import sha as sha1
+from paste.request import get_cookies
+
+def make_time(value):
+ return time.strftime("%Y%m%d%H%M", time.gmtime(value))
+_signature_size = len(hmac.new(b'x', b'x', sha1).digest())
+_header_size = _signature_size + len(make_time(time.time()))
+
+# @@: Should this be using urllib.quote?
+# build encode/decode functions to safely pack away values
+_encode = [('\\', '\\x5c'), ('"', '\\x22'),
+ ('=', '\\x3d'), (';', '\\x3b')]
+_decode = [(v, k) for (k, v) in _encode]
+_decode.reverse()
+def encode(s, sublist = _encode):
+ return six.moves.reduce((lambda a, b: a.replace(b[0], b[1])), sublist, str(s))
+decode = lambda s: encode(s, _decode)
+
+class CookieTooLarge(RuntimeError):
+ def __init__(self, content, cookie):
+ RuntimeError.__init__("Signed cookie exceeds maximum size of 4096")
+ self.content = content
+ self.cookie = cookie
+
+_all_chars = ''.join([chr(x) for x in range(0, 255)])
+def new_secret():
+ """ returns a 64 byte secret """
+ secret = ''.join(random.sample(_all_chars, 64))
+ if six.PY3:
+ secret = secret.encode('utf8')
+ return secret
+
+class AuthCookieSigner(object):
+ """
+ save/restore ``environ`` entries via digially signed cookie
+
+ This class converts content into a timed and digitally signed
+ cookie, as well as having the facility to reverse this procedure.
+ If the cookie, after the content is encoded and signed exceeds the
+ maximum length (4096), then CookieTooLarge exception is raised.
+
+ The timeout of the cookie is handled on the server side for a few
+ reasons. First, if a 'Expires' directive is added to a cookie, then
+ the cookie becomes persistent (lasting even after the browser window
+ has closed). Second, the user's clock may be wrong (perhaps
+ intentionally). The timeout is specified in minutes; and expiration
+ date returned is rounded to one second.
+
+ Constructor Arguments:
+
+ ``secret``
+
+ This is a secret key if you want to syncronize your keys so
+ that the cookie will be good across a cluster of computers.
+ It is recommended via the HMAC specification (RFC 2104) that
+ the secret key be 64 bytes since this is the block size of
+ the hashing. If you do not provide a secret key, a random
+ one is generated each time you create the handler; this
+ should be sufficient for most cases.
+
+ ``timeout``
+
+ This is the time (in minutes) from which the cookie is set
+ to expire. Note that on each request a new (replacement)
+ cookie is sent, hence this is effectively a session timeout
+ parameter for your entire cluster. If you do not provide a
+ timeout, it is set at 30 minutes.
+
+ ``maxlen``
+
+ This is the maximum size of the *signed* cookie; hence the
+ actual content signed will be somewhat less. If the cookie
+ goes over this size, a ``CookieTooLarge`` exception is
+ raised so that unexpected handling of cookies on the client
+ side are avoided. By default this is set at 4k (4096 bytes),
+ which is the standard cookie size limit.
+
+ """
+ def __init__(self, secret = None, timeout = None, maxlen = None):
+ self.timeout = timeout or 30
+ if isinstance(timeout, six.string_types):
+ raise ValueError(
+ "Timeout must be a number (minutes), not a string (%r)"
+ % timeout)
+ self.maxlen = maxlen or 4096
+ self.secret = secret or new_secret()
+
+ def sign(self, content):
+ """
+ Sign the content returning a valid cookie (that does not
+ need to be escaped and quoted). The expiration of this
+ cookie is handled server-side in the auth() function.
+ """
+ timestamp = make_time(time.time() + 60*self.timeout)
+ if six.PY3:
+ content = content.encode('utf8')
+ timestamp = timestamp.encode('utf8')
+ cookie = base64.encodestring(
+ hmac.new(self.secret, content, sha1).digest() +
+ timestamp +
+ content)
+ cookie = cookie.replace(b"/", b"_").replace(b"=", b"~")
+ cookie = cookie.replace(b'\n', b'').replace(b'\r', b'')
+ if len(cookie) > self.maxlen:
+ raise CookieTooLarge(content, cookie)
+ return cookie
+
+ def auth(self, cookie):
+ """
+ Authenticate the cooke using the signature, verify that it
+ has not expired; and return the cookie's content
+ """
+ decode = base64.decodestring(
+ cookie.replace("_", "/").replace("~", "="))
+ signature = decode[:_signature_size]
+ expires = decode[_signature_size:_header_size]
+ content = decode[_header_size:]
+ if signature == hmac.new(self.secret, content, sha1).digest():
+ if int(expires) > int(make_time(time.time())):
+ return content
+ else:
+ # This is the normal case of an expired cookie; just
+ # don't bother doing anything here.
+ pass
+ else:
+ # This case can happen if the server is restarted with a
+ # different secret; or if the user's IP address changed
+ # due to a proxy. However, it could also be a break-in
+ # attempt -- so should it be reported?
+ pass
+
+class AuthCookieEnviron(list):
+ """
+ a list of environment keys to be saved via cookie
+
+ An instance of this object, found at ``environ['paste.auth.cookie']``
+ lists the `environ` keys that were restored from or will be added
+ to the digially signed cookie. This object can be accessed from an
+ `environ` variable by using this module's name.
+ """
+ def __init__(self, handler, scanlist):
+ list.__init__(self, scanlist)
+ self.handler = handler
+ def append(self, value):
+ if value in self:
+ return
+ list.append(self, str(value))
+
+class AuthCookieHandler(object):
+ """
+ the actual handler that should be put in your middleware stack
+
+ This middleware uses cookies to stash-away a previously authenticated
+ user (and perhaps other variables) so that re-authentication is not
+ needed. This does not implement sessions; and therefore N servers
+ can be syncronized to accept the same saved authentication if they
+ all use the same cookie_name and secret.
+
+ By default, this handler scans the `environ` for the REMOTE_USER
+ and REMOTE_SESSION key; if found, it is stored. It can be
+ configured to scan other `environ` keys as well -- but be careful
+ not to exceed 2-3k (so that the encoded and signed cookie does not
+ exceed 4k). You can ask it to handle other environment variables
+ by doing:
+
+ ``environ['paste.auth.cookie'].append('your.environ.variable')``
+
+
+ Constructor Arguments:
+
+ ``application``
+
+ This is the wrapped application which will have access to
+ the ``environ['REMOTE_USER']`` restored by this middleware.
+
+ ``cookie_name``
+
+ The name of the cookie used to store this content, by default
+ it is ``PASTE_AUTH_COOKIE``.
+
+ ``scanlist``
+
+ This is the initial set of ``environ`` keys to
+ save/restore to the signed cookie. By default is consists
+ only of ``REMOTE_USER`` and ``REMOTE_SESSION``; any tuple
+ or list of environment keys will work. However, be
+ careful, as the total saved size is limited to around 3k.
+
+ ``signer``
+
+ This is the signer object used to create the actual cookie
+ values, by default, it is ``AuthCookieSigner`` and is passed
+ the remaining arguments to this function: ``secret``,
+ ``timeout``, and ``maxlen``.
+
+ At this time, each cookie is individually signed. To store more
+ than the 4k of data; it is possible to sub-class this object to
+ provide different ``environ_name`` and ``cookie_name``
+ """
+ environ_name = 'paste.auth.cookie'
+ cookie_name = 'PASTE_AUTH_COOKIE'
+ signer_class = AuthCookieSigner
+ environ_class = AuthCookieEnviron
+
+ def __init__(self, application, cookie_name=None, scanlist=None,
+ signer=None, secret=None, timeout=None, maxlen=None):
+ if not signer:
+ signer = self.signer_class(secret, timeout, maxlen)
+ self.signer = signer
+ self.scanlist = scanlist or ('REMOTE_USER','REMOTE_SESSION')
+ self.application = application
+ self.cookie_name = cookie_name or self.cookie_name
+
+ def __call__(self, environ, start_response):
+ if self.environ_name in environ:
+ raise AssertionError("AuthCookie already installed!")
+ scanlist = self.environ_class(self, self.scanlist)
+ jar = get_cookies(environ)
+ if self.cookie_name in jar:
+ content = self.signer.auth(jar[self.cookie_name].value)
+ if content:
+ for pair in content.split(";"):
+ (k, v) = pair.split("=")
+ k = decode(k)
+ if k not in scanlist:
+ scanlist.append(k)
+ if k in environ:
+ continue
+ environ[k] = decode(v)
+ if 'REMOTE_USER' == k:
+ environ['AUTH_TYPE'] = 'cookie'
+ environ[self.environ_name] = scanlist
+ if "paste.httpexceptions" in environ:
+ warnings.warn("Since paste.httpexceptions is hooked in your "
+ "processing chain before paste.auth.cookie, if an "
+ "HTTPRedirection is raised, the cookies this module sets "
+ "will not be included in your response.\n")
+
+ def response_hook(status, response_headers, exc_info=None):
+ """
+ Scan the environment for keys specified in the scanlist,
+ pack up their values, signs the content and issues a cookie.
+ """
+ scanlist = environ.get(self.environ_name)
+ assert scanlist and isinstance(scanlist, self.environ_class)
+ content = []
+ for k in scanlist:
+ v = environ.get(k)
+ if v is not None:
+ if type(v) is not str:
+ raise ValueError(
+ "The value of the environmental variable %r "
+ "is not a str (only str is allowed; got %r)"
+ % (k, v))
+ content.append("%s=%s" % (encode(k), encode(v)))
+ if content:
+ content = ";".join(content)
+ content = self.signer.sign(content)
+ if six.PY3:
+ content = content.decode('utf8')
+ cookie = '%s=%s; Path=/;' % (self.cookie_name, content)
+ if 'https' == environ['wsgi.url_scheme']:
+ cookie += ' secure;'
+ response_headers.append(('Set-Cookie', cookie))
+ return start_response(status, response_headers, exc_info)
+ return self.application(environ, response_hook)
+
+middleware = AuthCookieHandler
+
+# Paste Deploy entry point:
+def make_auth_cookie(
+ app, global_conf,
+ # Should this get picked up from global_conf somehow?:
+ cookie_name='PASTE_AUTH_COOKIE',
+ scanlist=('REMOTE_USER', 'REMOTE_SESSION'),
+ # signer cannot be set
+ secret=None,
+ timeout=30,
+ maxlen=4096):
+ """
+ This middleware uses cookies to stash-away a previously
+ authenticated user (and perhaps other variables) so that
+ re-authentication is not needed. This does not implement
+ sessions; and therefore N servers can be syncronized to accept the
+ same saved authentication if they all use the same cookie_name and
+ secret.
+
+ By default, this handler scans the `environ` for the REMOTE_USER
+ and REMOTE_SESSION key; if found, it is stored. It can be
+ configured to scan other `environ` keys as well -- but be careful
+ not to exceed 2-3k (so that the encoded and signed cookie does not
+ exceed 4k). You can ask it to handle other environment variables
+ by doing:
+
+ ``environ['paste.auth.cookie'].append('your.environ.variable')``
+
+ Configuration:
+
+ ``cookie_name``
+
+ The name of the cookie used to store this content, by
+ default it is ``PASTE_AUTH_COOKIE``.
+
+ ``scanlist``
+
+ This is the initial set of ``environ`` keys to
+ save/restore to the signed cookie. By default is consists
+ only of ``REMOTE_USER`` and ``REMOTE_SESSION``; any
+ space-separated list of environment keys will work.
+ However, be careful, as the total saved size is limited to
+ around 3k.
+
+ ``secret``
+
+ The secret that will be used to sign the cookies. If you
+ don't provide one (and none is set globally) then a random
+ secret will be created. Each time the server is restarted
+ a new secret will then be created and all cookies will
+ become invalid! This can be any string value.
+
+ ``timeout``
+
+ The time to keep the cookie, expressed in minutes. This
+ is handled server-side, so a new cookie with a new timeout
+ is added to every response.
+
+ ``maxlen``
+
+ The maximum length of the cookie that is sent (default 4k,
+ which is a typical browser maximum)
+
+ """
+ if isinstance(scanlist, six.string_types):
+ scanlist = scanlist.split()
+ if secret is None and global_conf.get('secret'):
+ secret = global_conf['secret']
+ try:
+ timeout = int(timeout)
+ except ValueError:
+ raise ValueError('Bad value for timeout (must be int): %r'
+ % timeout)
+ try:
+ maxlen = int(maxlen)
+ except ValueError:
+ raise ValueError('Bad value for maxlen (must be int): %r'
+ % maxlen)
+ return AuthCookieHandler(
+ app, cookie_name=cookie_name, scanlist=scanlist,
+ secret=secret, timeout=timeout, maxlen=maxlen)
+
+__all__ = ['AuthCookieHandler', 'AuthCookieSigner', 'AuthCookieEnviron']
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
+
diff --git a/paste/auth/digest.py b/paste/auth/digest.py
new file mode 100644
index 0000000..553bd88
--- /dev/null
+++ b/paste/auth/digest.py
@@ -0,0 +1,254 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Digest HTTP/1.1 Authentication
+
+This module implements ``Digest`` authentication as described by
+RFC 2617 [1]_ .
+
+Basically, you just put this module before your application, and it
+takes care of requesting and handling authentication requests. This
+module has been tested with several common browsers "out-in-the-wild".
+
+>>> from paste.wsgilib import dump_environ
+>>> from paste.httpserver import serve
+>>> # from paste.auth.digest import digest_password, AuthDigestHandler
+>>> realm = 'Test Realm'
+>>> def authfunc(environ, realm, username):
+... return digest_password(realm, username, username)
+>>> serve(AuthDigestHandler(dump_environ, realm, authfunc))
+serving on...
+
+This code has not been audited by a security expert, please use with
+caution (or better yet, report security holes). At this time, this
+implementation does not provide for further challenges, nor does it
+support Authentication-Info header. It also uses md5, and an option
+to use sha would be a good thing.
+
+.. [1] http://www.faqs.org/rfcs/rfc2617.html
+"""
+from paste.httpexceptions import HTTPUnauthorized
+from paste.httpheaders import *
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+import time, random
+from six.moves.urllib.parse import quote as url_quote
+import six
+
+def _split_auth_string(auth_string):
+ """ split a digest auth string into individual key=value strings """
+ prev = None
+ for item in auth_string.split(","):
+ try:
+ if prev.count('"') == 1:
+ prev = "%s,%s" % (prev, item)
+ continue
+ except AttributeError:
+ if prev == None:
+ prev = item
+ continue
+ else:
+ raise StopIteration
+ yield prev.strip()
+ prev = item
+
+ yield prev.strip()
+ return
+
+def _auth_to_kv_pairs(auth_string):
+ """ split a digest auth string into key, value pairs """
+ for item in _split_auth_string(auth_string):
+ (k, v) = item.split("=", 1)
+ if v.startswith('"') and len(v) > 1 and v.endswith('"'):
+ v = v[1:-1]
+ yield (k, v)
+
+def digest_password(realm, username, password):
+ """ construct the appropriate hashcode needed for HTTP digest """
+ content = "%s:%s:%s" % (username, realm, password)
+ if six.PY3:
+ content = content.encode('utf8')
+ return md5(content).hexdigest()
+
+class AuthDigestAuthenticator(object):
+ """ implementation of RFC 2617 - HTTP Digest Authentication """
+ def __init__(self, realm, authfunc):
+ self.nonce = {} # list to prevent replay attacks
+ self.authfunc = authfunc
+ self.realm = realm
+
+ def build_authentication(self, stale = ''):
+ """ builds the authentication error """
+ content = "%s:%s" % (time.time(), random.random())
+ if six.PY3:
+ content = content.encode('utf-8')
+ nonce = md5(content).hexdigest()
+
+ content = "%s:%s" % (time.time(), random.random())
+ if six.PY3:
+ content = content.encode('utf-8')
+ opaque = md5(content).hexdigest()
+
+ self.nonce[nonce] = None
+ parts = {'realm': self.realm, 'qop': 'auth',
+ 'nonce': nonce, 'opaque': opaque }
+ if stale:
+ parts['stale'] = 'true'
+ head = ", ".join(['%s="%s"' % (k, v) for (k, v) in parts.items()])
+ head = [("WWW-Authenticate", 'Digest %s' % head)]
+ return HTTPUnauthorized(headers=head)
+
+ def compute(self, ha1, username, response, method,
+ path, nonce, nc, cnonce, qop):
+ """ computes the authentication, raises error if unsuccessful """
+ if not ha1:
+ return self.build_authentication()
+ content = '%s:%s' % (method, path)
+ if six.PY3:
+ content = content.encode('utf8')
+ ha2 = md5(content).hexdigest()
+ if qop:
+ chk = "%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2)
+ else:
+ chk = "%s:%s:%s" % (ha1, nonce, ha2)
+ if six.PY3:
+ chk = chk.encode('utf8')
+ if response != md5(chk).hexdigest():
+ if nonce in self.nonce:
+ del self.nonce[nonce]
+ return self.build_authentication()
+ pnc = self.nonce.get(nonce,'00000000')
+ if pnc is not None and nc <= pnc:
+ if nonce in self.nonce:
+ del self.nonce[nonce]
+ return self.build_authentication(stale = True)
+ self.nonce[nonce] = nc
+ return username
+
+ def authenticate(self, environ):
+ """ This function takes a WSGI environment and authenticates
+ the request returning authenticated user or error.
+ """
+ method = REQUEST_METHOD(environ)
+ fullpath = url_quote(SCRIPT_NAME(environ)) + url_quote(PATH_INFO(environ))
+ authorization = AUTHORIZATION(environ)
+ if not authorization:
+ return self.build_authentication()
+ (authmeth, auth) = authorization.split(" ", 1)
+ if 'digest' != authmeth.lower():
+ return self.build_authentication()
+ amap = dict(_auth_to_kv_pairs(auth))
+ try:
+ username = amap['username']
+ authpath = amap['uri']
+ nonce = amap['nonce']
+ realm = amap['realm']
+ response = amap['response']
+ assert authpath.split("?", 1)[0] in fullpath
+ assert realm == self.realm
+ qop = amap.get('qop', '')
+ cnonce = amap.get('cnonce', '')
+ nc = amap.get('nc', '00000000')
+ if qop:
+ assert 'auth' == qop
+ assert nonce and nc
+ except:
+ return self.build_authentication()
+ ha1 = self.authfunc(environ, realm, username)
+ return self.compute(ha1, username, response, method, authpath,
+ nonce, nc, cnonce, qop)
+
+ __call__ = authenticate
+
+class AuthDigestHandler(object):
+ """
+ middleware for HTTP Digest authentication (RFC 2617)
+
+ This component follows the procedure below:
+
+ 0. If the REMOTE_USER environment variable is already populated;
+ then this middleware is a no-op, and the request is passed
+ along to the application.
+
+ 1. If the HTTP_AUTHORIZATION header was not provided or specifies
+ an algorithem other than ``digest``, then a HTTPUnauthorized
+ response is generated with the challenge.
+
+ 2. If the response is malformed or or if the user's credientials
+ do not pass muster, another HTTPUnauthorized is raised.
+
+ 3. If all goes well, and the user's credintials pass; then
+ REMOTE_USER environment variable is filled in and the
+ AUTH_TYPE is listed as 'digest'.
+
+ Parameters:
+
+ ``application``
+
+ The application object is called only upon successful
+ authentication, and can assume ``environ['REMOTE_USER']``
+ is set. If the ``REMOTE_USER`` is already set, this
+ middleware is simply pass-through.
+
+ ``realm``
+
+ This is a identifier for the authority that is requesting
+ authorization. It is shown to the user and should be unique
+ within the domain it is being used.
+
+ ``authfunc``
+
+ This is a callback function which performs the actual
+ authentication; the signature of this callback is:
+
+ authfunc(environ, realm, username) -> hashcode
+
+ This module provides a 'digest_password' helper function
+ which can help construct the hashcode; it is recommended
+ that the hashcode is stored in a database, not the user's
+ actual password (since you only need the hashcode).
+ """
+ def __init__(self, application, realm, authfunc):
+ self.authenticate = AuthDigestAuthenticator(realm, authfunc)
+ self.application = application
+
+ def __call__(self, environ, start_response):
+ username = REMOTE_USER(environ)
+ if not username:
+ result = self.authenticate(environ)
+ if isinstance(result, str):
+ AUTH_TYPE.update(environ,'digest')
+ REMOTE_USER.update(environ, result)
+ else:
+ return result.wsgi_application(environ, start_response)
+ return self.application(environ, start_response)
+
+middleware = AuthDigestHandler
+
+__all__ = ['digest_password', 'AuthDigestHandler' ]
+
+def make_digest(app, global_conf, realm, authfunc, **kw):
+ """
+ Grant access via digest authentication
+
+ Config looks like this::
+
+ [filter:grant]
+ use = egg:Paste#auth_digest
+ realm=myrealm
+ authfunc=somepackage.somemodule:somefunction
+
+ """
+ from paste.util.import_string import eval_import
+ import types
+ authfunc = eval_import(authfunc)
+ assert isinstance(authfunc, types.FunctionType), "authfunc must resolve to a function"
+ return AuthDigestHandler(app, realm, authfunc)
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
diff --git a/paste/auth/form.py b/paste/auth/form.py
new file mode 100644
index 0000000..9be82a2
--- /dev/null
+++ b/paste/auth/form.py
@@ -0,0 +1,149 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Authentication via HTML Form
+
+This is a very simple HTML form login screen that asks for the username
+and password. This middleware component requires that an authorization
+function taking the name and passsword and that it be placed in your
+application stack. This class does not include any session management
+code or way to save the user's authorization; however, it is easy enough
+to put ``paste.auth.cookie`` in your application stack.
+
+>>> from paste.wsgilib import dump_environ
+>>> from paste.httpserver import serve
+>>> from paste.auth.cookie import AuthCookieHandler
+>>> from paste.auth.form import AuthFormHandler
+>>> def authfunc(environ, username, password):
+... return username == password
+>>> serve(AuthCookieHandler(
+... AuthFormHandler(dump_environ, authfunc)))
+serving on...
+
+"""
+from paste.request import construct_url, parse_formvars
+
+TEMPLATE = """\
+<html>
+ <head><title>Please Login!</title></head>
+ <body>
+ <h1>Please Login</h1>
+ <form action="%s" method="post">
+ <dl>
+ <dt>Username:</dt>
+ <dd><input type="text" name="username"></dd>
+ <dt>Password:</dt>
+ <dd><input type="password" name="password"></dd>
+ </dl>
+ <input type="submit" name="authform" />
+ <hr />
+ </form>
+ </body>
+</html>
+"""
+
+class AuthFormHandler(object):
+ """
+ HTML-based login middleware
+
+ This causes a HTML form to be returned if ``REMOTE_USER`` is
+ not found in the ``environ``. If the form is returned, the
+ ``username`` and ``password`` combination are given to a
+ user-supplied authentication function, ``authfunc``. If this
+ is successful, then application processing continues.
+
+ Parameters:
+
+ ``application``
+
+ The application object is called only upon successful
+ authentication, and can assume ``environ['REMOTE_USER']``
+ is set. If the ``REMOTE_USER`` is already set, this
+ middleware is simply pass-through.
+
+ ``authfunc``
+
+ This is a mandatory user-defined function which takes a
+ ``environ``, ``username`` and ``password`` for its first
+ three arguments. It should return ``True`` if the user is
+ authenticated.
+
+ ``template``
+
+ This is an optional (a default is provided) HTML
+ fragment that takes exactly one ``%s`` substution
+ argument; which *must* be used for the form's ``action``
+ to ensure that this middleware component does not alter
+ the current path. The HTML form must use ``POST`` and
+ have two input names: ``username`` and ``password``.
+
+ Since the authentication form is submitted (via ``POST``)
+ neither the ``PATH_INFO`` nor the ``QUERY_STRING`` are accessed,
+ and hence the current path remains _unaltered_ through the
+ entire authentication process. If authentication succeeds, the
+ ``REQUEST_METHOD`` is converted from a ``POST`` to a ``GET``,
+ so that a redirect is unnecessary (unlike most form auth
+ implementations)
+ """
+
+ def __init__(self, application, authfunc, template=None):
+ self.application = application
+ self.authfunc = authfunc
+ self.template = template or TEMPLATE
+
+ def __call__(self, environ, start_response):
+ username = environ.get('REMOTE_USER','')
+ if username:
+ return self.application(environ, start_response)
+
+ if 'POST' == environ['REQUEST_METHOD']:
+ formvars = parse_formvars(environ, include_get_vars=False)
+ username = formvars.get('username')
+ password = formvars.get('password')
+ if username and password:
+ if self.authfunc(environ, username, password):
+ environ['AUTH_TYPE'] = 'form'
+ environ['REMOTE_USER'] = username
+ environ['REQUEST_METHOD'] = 'GET'
+ environ['CONTENT_LENGTH'] = ''
+ environ['CONTENT_TYPE'] = ''
+ del environ['paste.parsed_formvars']
+ return self.application(environ, start_response)
+
+ content = self.template % construct_url(environ)
+ start_response("200 OK", [('Content-Type', 'text/html'),
+ ('Content-Length', str(len(content)))])
+ return [content]
+
+middleware = AuthFormHandler
+
+__all__ = ['AuthFormHandler']
+
+def make_form(app, global_conf, realm, authfunc, **kw):
+ """
+ Grant access via form authentication
+
+ Config looks like this::
+
+ [filter:grant]
+ use = egg:Paste#auth_form
+ realm=myrealm
+ authfunc=somepackage.somemodule:somefunction
+
+ """
+ from paste.util.import_string import eval_import
+ import types
+ authfunc = eval_import(authfunc)
+ assert isinstance(authfunc, types.FunctionType), "authfunc must resolve to a function"
+ template = kw.get('template')
+ if template is not None:
+ template = eval_import(template)
+ assert isinstance(template, str), "template must resolve to a string"
+
+ return AuthFormHandler(app, authfunc, template)
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
diff --git a/paste/auth/grantip.py b/paste/auth/grantip.py
new file mode 100644
index 0000000..3fe6e1c
--- /dev/null
+++ b/paste/auth/grantip.py
@@ -0,0 +1,114 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Grant roles and logins based on IP address.
+"""
+import six
+from paste.util import ip4
+
+class GrantIPMiddleware(object):
+
+ """
+ On each request, ``ip_map`` is checked against ``REMOTE_ADDR``
+ and logins and roles are assigned based on that.
+
+ ``ip_map`` is a map of {ip_mask: (username, roles)}. Either
+ ``username`` or ``roles`` may be None. Roles may also be prefixed
+ with ``-``, like ``'-system'`` meaning that role should be
+ revoked. ``'__remove__'`` for a username will remove the username.
+
+ If ``clobber_username`` is true (default) then any user
+ specification will override the current value of ``REMOTE_USER``.
+ ``'__remove__'`` will always clobber the username.
+
+ ``ip_mask`` is something that `paste.util.ip4:IP4Range
+ <class-paste.util.ip4.IP4Range.html>`_ can parse. Simple IP
+ addresses, IP/mask, ip<->ip ranges, and hostnames are allowed.
+ """
+
+ def __init__(self, app, ip_map, clobber_username=True):
+ self.app = app
+ self.ip_map = []
+ for key, value in ip_map.items():
+ self.ip_map.append((ip4.IP4Range(key),
+ self._convert_user_role(value[0], value[1])))
+ self.clobber_username = clobber_username
+
+ def _convert_user_role(self, username, roles):
+ if roles and isinstance(roles, six.string_types):
+ roles = roles.split(',')
+ return (username, roles)
+
+ def __call__(self, environ, start_response):
+ addr = ip4.ip2int(environ['REMOTE_ADDR'], False)
+ remove_user = False
+ add_roles = []
+ for range, (username, roles) in self.ip_map:
+ if addr in range:
+ if roles:
+ add_roles.extend(roles)
+ if username == '__remove__':
+ remove_user = True
+ elif username:
+ if (not environ.get('REMOTE_USER')
+ or self.clobber_username):
+ environ['REMOTE_USER'] = username
+ if (remove_user and 'REMOTE_USER' in environ):
+ del environ['REMOTE_USER']
+ if roles:
+ self._set_roles(environ, add_roles)
+ return self.app(environ, start_response)
+
+ def _set_roles(self, environ, roles):
+ cur_roles = environ.get('REMOTE_USER_TOKENS', '').split(',')
+ # Get rid of empty roles:
+ cur_roles = list(filter(None, cur_roles))
+ remove_roles = []
+ for role in roles:
+ if role.startswith('-'):
+ remove_roles.append(role[1:])
+ else:
+ if role not in cur_roles:
+ cur_roles.append(role)
+ for role in remove_roles:
+ if role in cur_roles:
+ cur_roles.remove(role)
+ environ['REMOTE_USER_TOKENS'] = ','.join(cur_roles)
+
+
+def make_grantip(app, global_conf, clobber_username=False, **kw):
+ """
+ Grant roles or usernames based on IP addresses.
+
+ Config looks like this::
+
+ [filter:grant]
+ use = egg:Paste#grantip
+ clobber_username = true
+ # Give localhost system role (no username):
+ 127.0.0.1 = -:system
+ # Give everyone in 192.168.0.* editor role:
+ 192.168.0.0/24 = -:editor
+ # Give one IP the username joe:
+ 192.168.0.7 = joe
+ # And one IP is should not be logged in:
+ 192.168.0.10 = __remove__:-editor
+
+ """
+ from paste.deploy.converters import asbool
+ clobber_username = asbool(clobber_username)
+ ip_map = {}
+ for key, value in kw.items():
+ if ':' in value:
+ username, role = value.split(':', 1)
+ else:
+ username = value
+ role = ''
+ if username == '-':
+ username = ''
+ if role == '-':
+ role = ''
+ ip_map[key] = value
+ return GrantIPMiddleware(app, ip_map, clobber_username)
+
+
diff --git a/paste/auth/multi.py b/paste/auth/multi.py
new file mode 100644
index 0000000..b378fa6
--- /dev/null
+++ b/paste/auth/multi.py
@@ -0,0 +1,79 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Authentication via Multiple Methods
+
+In some environments, the choice of authentication method to be used
+depends upon the environment and is not "fixed". This middleware allows
+N authentication methods to be registered along with a goodness function
+which determines which method should be used. The following example
+demonstrates how to use both form and digest authentication in a server
+stack; by default it uses form-based authentication unless
+``*authmeth=digest`` is specified as a query argument.
+
+>>> from paste.auth import form, cookie, digest, multi
+>>> from paste.wsgilib import dump_environ
+>>> from paste.httpserver import serve
+>>>
+>>> multi = multi.MultiHandler(dump_environ)
+>>> def authfunc(environ, realm, user):
+... return digest.digest_password(realm, user, user)
+>>> multi.add_method('digest', digest.middleware, "Test Realm", authfunc)
+>>> multi.set_query_argument('digest')
+>>>
+>>> def authfunc(environ, username, password):
+... return username == password
+>>> multi.add_method('form', form.middleware, authfunc)
+>>> multi.set_default('form')
+>>> serve(cookie.middleware(multi))
+serving on...
+
+"""
+
+class MultiHandler(object):
+ """
+ Multiple Authentication Handler
+
+ This middleware provides two othogonal facilities:
+
+ - a manner to register any number of authentication middlewares
+
+ - a mechanism to register predicates which cause one of the
+ registered middlewares to be used depending upon the request
+
+ If none of the predicates returns True, then the application is
+ invoked directly without middleware
+ """
+ def __init__(self, application):
+ self.application = application
+ self.default = application
+ self.binding = {}
+ self.predicate = []
+ def add_method(self, name, factory, *args, **kwargs):
+ self.binding[name] = factory(self.application, *args, **kwargs)
+ def add_predicate(self, name, checker):
+ self.predicate.append((checker, self.binding[name]))
+ def set_default(self, name):
+ """ set default authentication method """
+ self.default = self.binding[name]
+ def set_query_argument(self, name, key = '*authmeth', value = None):
+ """ choose authentication method based on a query argument """
+ lookfor = "%s=%s" % (key, value or name)
+ self.add_predicate(name,
+ lambda environ: lookfor in environ.get('QUERY_STRING',''))
+ def __call__(self, environ, start_response):
+ for (checker, binding) in self.predicate:
+ if checker(environ):
+ return binding(environ, start_response)
+ return self.default(environ, start_response)
+
+middleware = MultiHandler
+
+__all__ = ['MultiHandler']
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
+
diff --git a/paste/auth/open_id.py b/paste/auth/open_id.py
new file mode 100644
index 0000000..f79f7f8
--- /dev/null
+++ b/paste/auth/open_id.py
@@ -0,0 +1,413 @@
+# (c) 2005 Ben Bangert
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+OpenID Authentication (Consumer)
+
+OpenID is a distributed authentication system for single sign-on originally
+developed at/for LiveJournal.com.
+
+ http://openid.net/
+
+URL. You can have multiple identities in the same way you can have multiple
+URLs. All OpenID does is provide a way to prove that you own a URL (identity).
+And it does this without passing around your password, your email address, or
+anything you don't want it to. There's no profile exchange component at all:
+your profiile is your identity URL, but recipients of your identity can then
+learn more about you from any public, semantically interesting documents
+linked thereunder (FOAF, RSS, Atom, vCARD, etc.).
+
+``Note``: paste.auth.openid requires installation of the Python-OpenID
+libraries::
+
+ http://www.openidenabled.com/
+
+This module is based highly off the consumer.py that Python OpenID comes with.
+
+Using the OpenID Middleware
+===========================
+
+Using the OpenID middleware is fairly easy, the most minimal example using the
+basic login form thats included::
+
+ # Add to your wsgi app creation
+ from paste.auth import open_id
+
+ wsgi_app = open_id.middleware(wsgi_app, '/somewhere/to/store/openid/data')
+
+You will now have the OpenID form available at /oid on your site. Logging in will
+verify that the login worked.
+
+A more complete login should involve having the OpenID middleware load your own
+login page after verifying the OpenID URL so that you can retain the login
+information in your webapp (session, cookies, etc.)::
+
+ wsgi_app = open_id.middleware(wsgi_app, '/somewhere/to/store/openid/data',
+ login_redirect='/your/login/code')
+
+Your login code should then be configured to retrieve 'paste.auth.open_id' for
+the users OpenID URL. If this key does not exist, the user has not logged in.
+
+Once the login is retrieved, it should be saved in your webapp, and the user
+should be redirected to wherever they would normally go after a successful
+login.
+"""
+
+__all__ = ['AuthOpenIDHandler']
+
+import cgi
+import urlparse
+import re
+import six
+
+import paste.request
+from paste import httpexceptions
+
+def quoteattr(s):
+ qs = cgi.escape(s, 1)
+ return '"%s"' % (qs,)
+
+# You may need to manually add the openid package into your
+# python path if you don't have it installed with your system python.
+# If so, uncomment the line below, and change the path where you have
+# Python-OpenID.
+# sys.path.append('/path/to/openid/')
+
+from openid.store import filestore
+from openid.consumer import consumer
+from openid.oidutil import appendArgs
+
+class AuthOpenIDHandler(object):
+ """
+ This middleware implements OpenID Consumer behavior to authenticate a
+ URL against an OpenID Server.
+ """
+
+ def __init__(self, app, data_store_path, auth_prefix='/oid',
+ login_redirect=None, catch_401=False,
+ url_to_username=None):
+ """
+ Initialize the OpenID middleware
+
+ ``app``
+ Your WSGI app to call
+
+ ``data_store_path``
+ Directory to store crypto data in for use with OpenID servers.
+
+ ``auth_prefix``
+ Location for authentication process/verification
+
+ ``login_redirect``
+ Location to load after successful process of login
+
+ ``catch_401``
+ If true, then any 401 responses will turn into open ID login
+ requirements.
+
+ ``url_to_username``
+ A function called like ``url_to_username(environ, url)``, which should
+ return a string username. If not given, the URL will be the username.
+ """
+ store = filestore.FileOpenIDStore(data_store_path)
+ self.oidconsumer = consumer.OpenIDConsumer(store)
+
+ self.app = app
+ self.auth_prefix = auth_prefix
+ self.data_store_path = data_store_path
+ self.login_redirect = login_redirect
+ self.catch_401 = catch_401
+ self.url_to_username = url_to_username
+
+ def __call__(self, environ, start_response):
+ if environ['PATH_INFO'].startswith(self.auth_prefix):
+ # Let's load everything into a request dict to pass around easier
+ request = dict(environ=environ, start=start_response, body=[])
+ request['base_url'] = paste.request.construct_url(environ, with_path_info=False,
+ with_query_string=False)
+
+ path = re.sub(self.auth_prefix, '', environ['PATH_INFO'])
+ request['parsed_uri'] = urlparse.urlparse(path)
+ request['query'] = dict(paste.request.parse_querystring(environ))
+
+ path = request['parsed_uri'][2]
+ if path == '/' or not path:
+ return self.render(request)
+ elif path == '/verify':
+ return self.do_verify(request)
+ elif path == '/process':
+ return self.do_process(request)
+ else:
+ return self.not_found(request)
+ else:
+ if self.catch_401:
+ return self.catch_401_app_call(environ, start_response)
+ return self.app(environ, start_response)
+
+ def catch_401_app_call(self, environ, start_response):
+ """
+ Call the application, and redirect if the app returns a 401 response
+ """
+ was_401 = []
+ def replacement_start_response(status, headers, exc_info=None):
+ if int(status.split(None, 1)) == 401:
+ # @@: Do I need to append something to go back to where we
+ # came from?
+ was_401.append(1)
+ def dummy_writer(v):
+ pass
+ return dummy_writer
+ else:
+ return start_response(status, headers, exc_info)
+ app_iter = self.app(environ, replacement_start_response)
+ if was_401:
+ try:
+ list(app_iter)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ redir_url = paste.request.construct_url(environ, with_path_info=False,
+ with_query_string=False)
+ exc = httpexceptions.HTTPTemporaryRedirect(redir_url)
+ return exc.wsgi_application(environ, start_response)
+ else:
+ return app_iter
+
+ def do_verify(self, request):
+ """Process the form submission, initating OpenID verification.
+ """
+
+ # First, make sure that the user entered something
+ openid_url = request['query'].get('openid_url')
+ if not openid_url:
+ return self.render(request, 'Enter an identity URL to verify.',
+ css_class='error', form_contents=openid_url)
+
+ oidconsumer = self.oidconsumer
+
+ # Then, ask the library to begin the authorization.
+ # Here we find out the identity server that will verify the
+ # user's identity, and get a token that allows us to
+ # communicate securely with the identity server.
+ status, info = oidconsumer.beginAuth(openid_url)
+
+ # If the URL was unusable (either because of network
+ # conditions, a server error, or that the response returned
+ # was not an OpenID identity page), the library will return
+ # an error code. Let the user know that that URL is unusable.
+ if status in [consumer.HTTP_FAILURE, consumer.PARSE_ERROR]:
+ if status == consumer.HTTP_FAILURE:
+ fmt = 'Failed to retrieve <q>%s</q>'
+ else:
+ fmt = 'Could not find OpenID information in <q>%s</q>'
+
+ message = fmt % (cgi.escape(openid_url),)
+ return self.render(request, message, css_class='error', form_contents=openid_url)
+ elif status == consumer.SUCCESS:
+ # The URL was a valid identity URL. Now we construct a URL
+ # that will get us to process the server response. We will
+ # need the token from the beginAuth call when processing
+ # the response. A cookie or a session object could be used
+ # to accomplish this, but for simplicity here we just add
+ # it as a query parameter of the return-to URL.
+ return_to = self.build_url(request, 'process', token=info.token)
+
+ # Now ask the library for the URL to redirect the user to
+ # his OpenID server. It is required for security that the
+ # return_to URL must be under the specified trust_root. We
+ # just use the base_url for this server as a trust root.
+ redirect_url = oidconsumer.constructRedirect(
+ info, return_to, trust_root=request['base_url'])
+
+ # Send the redirect response
+ return self.redirect(request, redirect_url)
+ else:
+ assert False, 'Not reached'
+
+ def do_process(self, request):
+ """Handle the redirect from the OpenID server.
+ """
+ oidconsumer = self.oidconsumer
+
+ # retrieve the token from the environment (in this case, the URL)
+ token = request['query'].get('token', '')
+
+ # Ask the library to check the response that the server sent
+ # us. Status is a code indicating the response type. info is
+ # either None or a string containing more information about
+ # the return type.
+ status, info = oidconsumer.completeAuth(token, request['query'])
+
+ css_class = 'error'
+ openid_url = None
+ if status == consumer.FAILURE and info:
+ # In the case of failure, if info is non-None, it is the
+ # URL that we were verifying. We include it in the error
+ # message to help the user figure out what happened.
+ openid_url = info
+ fmt = "Verification of %s failed."
+ message = fmt % (cgi.escape(openid_url),)
+ elif status == consumer.SUCCESS:
+ # Success means that the transaction completed without
+ # error. If info is None, it means that the user cancelled
+ # the verification.
+ css_class = 'alert'
+ if info:
+ # This is a successful verification attempt. If this
+ # was a real application, we would do our login,
+ # comment posting, etc. here.
+ openid_url = info
+ if self.url_to_username:
+ username = self.url_to_username(request['environ'], openid_url)
+ else:
+ username = openid_url
+ if 'paste.auth_tkt.set_user' in request['environ']:
+ request['environ']['paste.auth_tkt.set_user'](username)
+ if not self.login_redirect:
+ fmt = ("If you had supplied a login redirect path, you would have "
+ "been redirected there. "
+ "You have successfully verified %s as your identity.")
+ message = fmt % (cgi.escape(openid_url),)
+ else:
+ # @@: This stuff doesn't make sense to me; why not a remote redirect?
+ request['environ']['paste.auth.open_id'] = openid_url
+ request['environ']['PATH_INFO'] = self.login_redirect
+ return self.app(request['environ'], request['start'])
+ #exc = httpexceptions.HTTPTemporaryRedirect(self.login_redirect)
+ #return exc.wsgi_application(request['environ'], request['start'])
+ else:
+ # cancelled
+ message = 'Verification cancelled'
+ else:
+ # Either we don't understand the code or there is no
+ # openid_url included with the error. Give a generic
+ # failure message. The library should supply debug
+ # information in a log.
+ message = 'Verification failed.'
+
+ return self.render(request, message, css_class, openid_url)
+
+ def build_url(self, request, action, **query):
+ """Build a URL relative to the server base_url, with the given
+ query parameters added."""
+ base = urlparse.urljoin(request['base_url'], self.auth_prefix + '/' + action)
+ return appendArgs(base, query)
+
+ def redirect(self, request, redirect_url):
+ """Send a redirect response to the given URL to the browser."""
+ response_headers = [('Content-type', 'text/plain'),
+ ('Location', redirect_url)]
+ request['start']('302 REDIRECT', response_headers)
+ return ["Redirecting to %s" % redirect_url]
+
+ def not_found(self, request):
+ """Render a page with a 404 return code and a message."""
+ fmt = 'The path <q>%s</q> was not understood by this server.'
+ msg = fmt % (request['parsed_uri'],)
+ openid_url = request['query'].get('openid_url')
+ return self.render(request, msg, 'error', openid_url, status='404 Not Found')
+
+ def render(self, request, message=None, css_class='alert', form_contents=None,
+ status='200 OK', title="Python OpenID Consumer"):
+ """Render a page."""
+ response_headers = [('Content-type', 'text/html')]
+ request['start'](str(status), response_headers)
+
+ self.page_header(request, title)
+ if message:
+ request['body'].append("<div class='%s'>" % (css_class,))
+ request['body'].append(message)
+ request['body'].append("</div>")
+ self.page_footer(request, form_contents)
+ return request['body']
+
+ def page_header(self, request, title):
+ """Render the page header"""
+ request['body'].append('''\
+<html>
+ <head><title>%s</title></head>
+ <style type="text/css">
+ * {
+ font-family: verdana,sans-serif;
+ }
+ body {
+ width: 50em;
+ margin: 1em;
+ }
+ div {
+ padding: .5em;
+ }
+ table {
+ margin: none;
+ padding: none;
+ }
+ .alert {
+ border: 1px solid #e7dc2b;
+ background: #fff888;
+ }
+ .error {
+ border: 1px solid #ff0000;
+ background: #ffaaaa;
+ }
+ #verify-form {
+ border: 1px solid #777777;
+ background: #dddddd;
+ margin-top: 1em;
+ padding-bottom: 0em;
+ }
+ </style>
+ <body>
+ <h1>%s</h1>
+ <p>
+ This example consumer uses the <a
+ href="http://openid.schtuff.com/">Python OpenID</a> library. It
+ just verifies that the URL that you enter is your identity URL.
+ </p>
+''' % (title, title))
+
+ def page_footer(self, request, form_contents):
+ """Render the page footer"""
+ if not form_contents:
+ form_contents = ''
+
+ request['body'].append('''\
+ <div id="verify-form">
+ <form method="get" action=%s>
+ Identity&nbsp;URL:
+ <input type="text" name="openid_url" value=%s />
+ <input type="submit" value="Verify" />
+ </form>
+ </div>
+ </body>
+</html>
+''' % (quoteattr(self.build_url(request, 'verify')), quoteattr(form_contents)))
+
+
+middleware = AuthOpenIDHandler
+
+def make_open_id_middleware(
+ app,
+ global_conf,
+ # Should this default to something, or inherit something from global_conf?:
+ data_store_path,
+ auth_prefix='/oid',
+ login_redirect=None,
+ catch_401=False,
+ url_to_username=None,
+ apply_auth_tkt=False,
+ auth_tkt_logout_path=None):
+ from paste.deploy.converters import asbool
+ from paste.util import import_string
+ catch_401 = asbool(catch_401)
+ if url_to_username and isinstance(url_to_username, six.string_types):
+ url_to_username = import_string.eval_import(url_to_username)
+ apply_auth_tkt = asbool(apply_auth_tkt)
+ new_app = AuthOpenIDHandler(
+ app, data_store_path=data_store_path, auth_prefix=auth_prefix,
+ login_redirect=login_redirect, catch_401=catch_401,
+ url_to_username=url_to_username or None)
+ if apply_auth_tkt:
+ from paste.auth import auth_tkt
+ new_app = auth_tkt.make_auth_tkt_middleware(
+ new_app, global_conf, logout_path=auth_tkt_logout_path)
+ return new_app
diff --git a/paste/cascade.py b/paste/cascade.py
new file mode 100644
index 0000000..8207ae3
--- /dev/null
+++ b/paste/cascade.py
@@ -0,0 +1,133 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Cascades through several applications, so long as applications
+return ``404 Not Found``.
+"""
+from paste import httpexceptions
+from paste.util import converters
+import tempfile
+from cStringIO import StringIO
+
+__all__ = ['Cascade']
+
+def make_cascade(loader, global_conf, catch='404', **local_conf):
+ """
+ Entry point for Paste Deploy configuration
+
+ Expects configuration like::
+
+ [composit:cascade]
+ use = egg:Paste#cascade
+ # all start with 'app' and are sorted alphabetically
+ app1 = foo
+ app2 = bar
+ ...
+ catch = 404 500 ...
+ """
+ catch = map(int, converters.aslist(catch))
+ apps = []
+ for name, value in local_conf.items():
+ if not name.startswith('app'):
+ raise ValueError(
+ "Bad configuration key %r (=%r); all configuration keys "
+ "must start with 'app'"
+ % (name, value))
+ app = loader.get_app(value, global_conf=global_conf)
+ apps.append((name, app))
+ apps.sort()
+ apps = [app for name, app in apps]
+ return Cascade(apps, catch=catch)
+
+class Cascade(object):
+
+ """
+ Passed a list of applications, ``Cascade`` will try each of them
+ in turn. If one returns a status code listed in ``catch`` (by
+ default just ``404 Not Found``) then the next application is
+ tried.
+
+ If all applications fail, then the last application's failure
+ response is used.
+
+ Instances of this class are WSGI applications.
+ """
+
+ def __init__(self, applications, catch=(404,)):
+ self.apps = applications
+ self.catch_codes = {}
+ self.catch_exceptions = []
+ for error in catch:
+ if isinstance(error, str):
+ error = int(error.split(None, 1)[0])
+ if isinstance(error, httpexceptions.HTTPException):
+ exc = error
+ code = error.code
+ else:
+ exc = httpexceptions.get_exception(error)
+ code = error
+ self.catch_codes[code] = exc
+ self.catch_exceptions.append(exc)
+ self.catch_exceptions = tuple(self.catch_exceptions)
+
+ def __call__(self, environ, start_response):
+ """
+ WSGI application interface
+ """
+ failed = []
+ def repl_start_response(status, headers, exc_info=None):
+ code = int(status.split(None, 1)[0])
+ if code in self.catch_codes:
+ failed.append(None)
+ return _consuming_writer
+ return start_response(status, headers, exc_info)
+
+ try:
+ length = int(environ.get('CONTENT_LENGTH', 0) or 0)
+ except ValueError:
+ length = 0
+ if length > 0:
+ # We have to copy wsgi.input
+ copy_wsgi_input = True
+ if length > 4096 or length < 0:
+ f = tempfile.TemporaryFile()
+ if length < 0:
+ f.write(environ['wsgi.input'].read())
+ else:
+ copy_len = length
+ while copy_len > 0:
+ chunk = environ['wsgi.input'].read(min(copy_len, 4096))
+ if not chunk:
+ raise IOError("Request body truncated")
+ f.write(chunk)
+ copy_len -= len(chunk)
+ f.seek(0)
+ else:
+ f = StringIO(environ['wsgi.input'].read(length))
+ environ['wsgi.input'] = f
+ else:
+ copy_wsgi_input = False
+ for app in self.apps[:-1]:
+ environ_copy = environ.copy()
+ if copy_wsgi_input:
+ environ_copy['wsgi.input'].seek(0)
+ failed = []
+ try:
+ v = app(environ_copy, repl_start_response)
+ if not failed:
+ return v
+ else:
+ if hasattr(v, 'close'):
+ # Exhaust the iterator first:
+ list(v)
+ # then close:
+ v.close()
+ except self.catch_exceptions:
+ pass
+ if copy_wsgi_input:
+ environ['wsgi.input'].seek(0)
+ return self.apps[-1](environ, start_response)
+
+def _consuming_writer(s):
+ pass
diff --git a/paste/cgiapp.py b/paste/cgiapp.py
new file mode 100644
index 0000000..e5a62f4
--- /dev/null
+++ b/paste/cgiapp.py
@@ -0,0 +1,280 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Application that runs a CGI script.
+"""
+import os
+import sys
+import subprocess
+from six.moves.urllib.parse import quote
+try:
+ import select
+except ImportError:
+ select = None
+import six
+
+from paste.util import converters
+
+__all__ = ['CGIError', 'CGIApplication']
+
+class CGIError(Exception):
+ """
+ Raised when the CGI script can't be found or doesn't
+ act like a proper CGI script.
+ """
+
+class CGIApplication(object):
+
+ """
+ This object acts as a proxy to a CGI application. You pass in the
+ script path (``script``), an optional path to search for the
+ script (if the name isn't absolute) (``path``). If you don't give
+ a path, then ``$PATH`` will be used.
+ """
+
+ def __init__(self,
+ global_conf,
+ script,
+ path=None,
+ include_os_environ=True,
+ query_string=None):
+ if global_conf:
+ raise NotImplemented(
+ "global_conf is no longer supported for CGIApplication "
+ "(use make_cgi_application); please pass None instead")
+ self.script_filename = script
+ if path is None:
+ path = os.environ.get('PATH', '').split(':')
+ self.path = path
+ if '?' in script:
+ assert query_string is None, (
+ "You cannot have '?' in your script name (%r) and also "
+ "give a query_string (%r)" % (script, query_string))
+ script, query_string = script.split('?', 1)
+ if os.path.abspath(script) != script:
+ # relative path
+ for path_dir in self.path:
+ if os.path.exists(os.path.join(path_dir, script)):
+ self.script = os.path.join(path_dir, script)
+ break
+ else:
+ raise CGIError(
+ "Script %r not found in path %r"
+ % (script, self.path))
+ else:
+ self.script = script
+ self.include_os_environ = include_os_environ
+ self.query_string = query_string
+
+ def __call__(self, environ, start_response):
+ if 'REQUEST_URI' not in environ:
+ environ['REQUEST_URI'] = (
+ quote(environ.get('SCRIPT_NAME', ''))
+ + quote(environ.get('PATH_INFO', '')))
+ if self.include_os_environ:
+ cgi_environ = os.environ.copy()
+ else:
+ cgi_environ = {}
+ for name in environ:
+ # Should unicode values be encoded?
+ if (name.upper() == name
+ and isinstance(environ[name], str)):
+ cgi_environ[name] = environ[name]
+ if self.query_string is not None:
+ old = cgi_environ.get('QUERY_STRING', '')
+ if old:
+ old += '&'
+ cgi_environ['QUERY_STRING'] = old + self.query_string
+ cgi_environ['SCRIPT_FILENAME'] = self.script
+ proc = subprocess.Popen(
+ [self.script],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=cgi_environ,
+ cwd=os.path.dirname(self.script),
+ )
+ writer = CGIWriter(environ, start_response)
+ if select and sys.platform != 'win32':
+ proc_communicate(
+ proc,
+ stdin=StdinReader.from_environ(environ),
+ stdout=writer,
+ stderr=environ['wsgi.errors'])
+ else:
+ stdout, stderr = proc.communicate(StdinReader.from_environ(environ).read())
+ if stderr:
+ environ['wsgi.errors'].write(stderr)
+ writer.write(stdout)
+ if not writer.headers_finished:
+ start_response(writer.status, writer.headers)
+ return []
+
+class CGIWriter(object):
+
+ def __init__(self, environ, start_response):
+ self.environ = environ
+ self.start_response = start_response
+ self.status = '200 OK'
+ self.headers = []
+ self.headers_finished = False
+ self.writer = None
+ self.buffer = b''
+
+ def write(self, data):
+ if self.headers_finished:
+ self.writer(data)
+ return
+ self.buffer += data
+ while b'\n' in self.buffer:
+ if b'\r\n' in self.buffer and self.buffer.find(b'\r\n') < self.buffer.find(b'\n'):
+ line1, self.buffer = self.buffer.split(b'\r\n', 1)
+ else:
+ line1, self.buffer = self.buffer.split(b'\n', 1)
+ if not line1:
+ self.headers_finished = True
+ self.writer = self.start_response(
+ self.status, self.headers)
+ self.writer(self.buffer)
+ del self.buffer
+ del self.headers
+ del self.status
+ break
+ elif b':' not in line1:
+ raise CGIError(
+ "Bad header line: %r" % line1)
+ else:
+ name, value = line1.split(b':', 1)
+ value = value.lstrip()
+ name = name.strip()
+ if six.PY3:
+ name = name.decode('utf8')
+ value = value.decode('utf8')
+ if name.lower() == 'status':
+ if ' ' not in value:
+ # WSGI requires this space, sometimes CGI scripts don't set it:
+ value = '%s General' % value
+ self.status = value
+ else:
+ self.headers.append((name, value))
+
+class StdinReader(object):
+
+ def __init__(self, stdin, content_length):
+ self.stdin = stdin
+ self.content_length = content_length
+
+ @classmethod
+ def from_environ(cls, environ):
+ length = environ.get('CONTENT_LENGTH')
+ if length:
+ length = int(length)
+ else:
+ length = 0
+ return cls(environ['wsgi.input'], length)
+
+ def read(self, size=None):
+ if not self.content_length:
+ return b''
+ if size is None:
+ text = self.stdin.read(self.content_length)
+ else:
+ text = self.stdin.read(min(self.content_length, size))
+ self.content_length -= len(text)
+ return text
+
+def proc_communicate(proc, stdin=None, stdout=None, stderr=None):
+ """
+ Run the given process, piping input/output/errors to the given
+ file-like objects (which need not be actual file objects, unlike
+ the arguments passed to Popen). Wait for process to terminate.
+
+ Note: this is taken from the posix version of
+ subprocess.Popen.communicate, but made more general through the
+ use of file-like objects.
+ """
+ read_set = []
+ write_set = []
+ input_buffer = b''
+ trans_nl = proc.universal_newlines and hasattr(open, 'newlines')
+
+ if proc.stdin:
+ # Flush stdio buffer. This might block, if the user has
+ # been writing to .stdin in an uncontrolled fashion.
+ proc.stdin.flush()
+ if input:
+ write_set.append(proc.stdin)
+ else:
+ proc.stdin.close()
+ else:
+ assert stdin is None
+ if proc.stdout:
+ read_set.append(proc.stdout)
+ else:
+ assert stdout is None
+ if proc.stderr:
+ read_set.append(proc.stderr)
+ else:
+ assert stderr is None
+
+ while read_set or write_set:
+ rlist, wlist, xlist = select.select(read_set, write_set, [])
+
+ if proc.stdin in wlist:
+ # When select has indicated that the file is writable,
+ # we can write up to PIPE_BUF bytes without risk
+ # blocking. POSIX defines PIPE_BUF >= 512
+ next, input_buffer = input_buffer, b''
+ next_len = 512-len(next)
+ if next_len:
+ next += stdin.read(next_len)
+ if not next:
+ proc.stdin.close()
+ write_set.remove(proc.stdin)
+ else:
+ bytes_written = os.write(proc.stdin.fileno(), next)
+ if bytes_written < len(next):
+ input_buffer = next[bytes_written:]
+
+ if proc.stdout in rlist:
+ data = os.read(proc.stdout.fileno(), 1024)
+ if data == b"":
+ proc.stdout.close()
+ read_set.remove(proc.stdout)
+ if trans_nl:
+ data = proc._translate_newlines(data)
+ stdout.write(data)
+
+ if proc.stderr in rlist:
+ data = os.read(proc.stderr.fileno(), 1024)
+ if data == b"":
+ proc.stderr.close()
+ read_set.remove(proc.stderr)
+ if trans_nl:
+ data = proc._translate_newlines(data)
+ stderr.write(data)
+
+ try:
+ proc.wait()
+ except OSError as e:
+ if e.errno != 10:
+ raise
+
+def make_cgi_application(global_conf, script, path=None, include_os_environ=None,
+ query_string=None):
+ """
+ Paste Deploy interface for :class:`CGIApplication`
+
+ This object acts as a proxy to a CGI application. You pass in the
+ script path (``script``), an optional path to search for the
+ script (if the name isn't absolute) (``path``). If you don't give
+ a path, then ``$PATH`` will be used.
+ """
+ if path is None:
+ path = global_conf.get('path') or global_conf.get('PATH')
+ include_os_environ = converters.asbool(include_os_environ)
+ return CGIApplication(
+ None,
+ script, path=path, include_os_environ=include_os_environ,
+ query_string=query_string)
diff --git a/paste/cgitb_catcher.py b/paste/cgitb_catcher.py
new file mode 100644
index 0000000..f88ffb8
--- /dev/null
+++ b/paste/cgitb_catcher.py
@@ -0,0 +1,121 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+WSGI middleware
+
+Captures any exceptions and prints a pretty report. See the `cgitb
+documentation <http://python.org/doc/current/lib/module-cgitb.html>`_
+for more.
+"""
+
+import cgitb
+import six
+from six.moves import cStringIO as StringIO
+import sys
+
+from paste.util import converters
+
+class NoDefault(object):
+ pass
+
+class CgitbMiddleware(object):
+
+ def __init__(self, app,
+ global_conf=None,
+ display=NoDefault,
+ logdir=None,
+ context=5,
+ format="html"):
+ self.app = app
+ if global_conf is None:
+ global_conf = {}
+ if display is NoDefault:
+ display = global_conf.get('debug')
+ if isinstance(display, six.string_types):
+ display = converters.asbool(display)
+ self.display = display
+ self.logdir = logdir
+ self.context = int(context)
+ self.format = format
+
+ def __call__(self, environ, start_response):
+ try:
+ app_iter = self.app(environ, start_response)
+ return self.catching_iter(app_iter, environ)
+ except:
+ exc_info = sys.exc_info()
+ start_response('500 Internal Server Error',
+ [('content-type', 'text/html')],
+ exc_info)
+ response = self.exception_handler(exc_info, environ)
+ if six.PY3:
+ response = response.encode('utf8')
+ return [response]
+
+ def catching_iter(self, app_iter, environ):
+ if not app_iter:
+ raise StopIteration
+ error_on_close = False
+ try:
+ for v in app_iter:
+ yield v
+ if hasattr(app_iter, 'close'):
+ error_on_close = True
+ app_iter.close()
+ except:
+ response = self.exception_handler(sys.exc_info(), environ)
+ if not error_on_close and hasattr(app_iter, 'close'):
+ try:
+ app_iter.close()
+ except:
+ close_response = self.exception_handler(
+ sys.exc_info(), environ)
+ response += (
+ '<hr noshade>Error in .close():<br>%s'
+ % close_response)
+ if six.PY3:
+ response = response.encode('utf8')
+ yield response
+
+ def exception_handler(self, exc_info, environ):
+ dummy_file = StringIO()
+ hook = cgitb.Hook(file=dummy_file,
+ display=self.display,
+ logdir=self.logdir,
+ context=self.context,
+ format=self.format)
+ hook(*exc_info)
+ return dummy_file.getvalue()
+
+def make_cgitb_middleware(app, global_conf,
+ display=NoDefault,
+ logdir=None,
+ context=5,
+ format='html'):
+ """
+ Wraps the application in the ``cgitb`` (standard library)
+ error catcher.
+
+ display:
+ If true (or debug is set in the global configuration)
+ then the traceback will be displayed in the browser
+
+ logdir:
+ Writes logs of all errors in that directory
+
+ context:
+ Number of lines of context to show around each line of
+ source code
+ """
+ from paste.deploy.converters import asbool
+ if display is not NoDefault:
+ display = asbool(display)
+ if 'debug' in global_conf:
+ global_conf['debug'] = asbool(global_conf['debug'])
+ return CgitbMiddleware(
+ app, global_conf=global_conf,
+ display=display,
+ logdir=logdir,
+ context=context,
+ format=format)
diff --git a/paste/config.py b/paste/config.py
new file mode 100644
index 0000000..c531579
--- /dev/null
+++ b/paste/config.py
@@ -0,0 +1,120 @@
+# (c) 2006 Ian Bicking, Philip Jenvey and contributors
+# Written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""Paste Configuration Middleware and Objects"""
+from paste.registry import RegistryManager, StackedObjectProxy
+
+__all__ = ['DispatchingConfig', 'CONFIG', 'ConfigMiddleware']
+
+class DispatchingConfig(StackedObjectProxy):
+ """
+ This is a configuration object that can be used globally,
+ imported, have references held onto. The configuration may differ
+ by thread (or may not).
+
+ Specific configurations are registered (and deregistered) either
+ for the process or for threads.
+ """
+ # @@: What should happen when someone tries to add this
+ # configuration to itself? Probably the conf should become
+ # resolved, and get rid of this delegation wrapper
+
+ def __init__(self, name='DispatchingConfig'):
+ super(DispatchingConfig, self).__init__(name=name)
+ self.__dict__['_process_configs'] = []
+
+ def push_thread_config(self, conf):
+ """
+ Make ``conf`` the active configuration for this thread.
+ Thread-local configuration always overrides process-wide
+ configuration.
+
+ This should be used like::
+
+ conf = make_conf()
+ dispatching_config.push_thread_config(conf)
+ try:
+ ... do stuff ...
+ finally:
+ dispatching_config.pop_thread_config(conf)
+ """
+ self._push_object(conf)
+
+ def pop_thread_config(self, conf=None):
+ """
+ Remove a thread-local configuration. If ``conf`` is given,
+ it is checked against the popped configuration and an error
+ is emitted if they don't match.
+ """
+ self._pop_object(conf)
+
+ def push_process_config(self, conf):
+ """
+ Like push_thread_config, but applies the configuration to
+ the entire process.
+ """
+ self._process_configs.append(conf)
+
+ def pop_process_config(self, conf=None):
+ self._pop_from(self._process_configs, conf)
+
+ def _pop_from(self, lst, conf):
+ popped = lst.pop()
+ if conf is not None and popped is not conf:
+ raise AssertionError(
+ "The config popped (%s) is not the same as the config "
+ "expected (%s)"
+ % (popped, conf))
+
+ def _current_obj(self):
+ try:
+ return super(DispatchingConfig, self)._current_obj()
+ except TypeError:
+ if self._process_configs:
+ return self._process_configs[-1]
+ raise AttributeError(
+ "No configuration has been registered for this process "
+ "or thread")
+ current = current_conf = _current_obj
+
+CONFIG = DispatchingConfig()
+
+no_config = object()
+class ConfigMiddleware(RegistryManager):
+ """
+ A WSGI middleware that adds a ``paste.config`` key (by default)
+ to the request environment, as well as registering the
+ configuration temporarily (for the length of the request) with
+ ``paste.config.CONFIG`` (or any other ``DispatchingConfig``
+ object).
+ """
+
+ def __init__(self, application, config, dispatching_config=CONFIG,
+ environ_key='paste.config'):
+ """
+ This delegates all requests to `application`, adding a *copy*
+ of the configuration `config`.
+ """
+ def register_config(environ, start_response):
+ popped_config = environ.get(environ_key, no_config)
+ current_config = environ[environ_key] = config.copy()
+ environ['paste.registry'].register(dispatching_config,
+ current_config)
+
+ try:
+ app_iter = application(environ, start_response)
+ finally:
+ if popped_config is no_config:
+ environ.pop(environ_key, None)
+ else:
+ environ[environ_key] = popped_config
+ return app_iter
+
+ super(self.__class__, self).__init__(register_config)
+
+def make_config_filter(app, global_conf, **local_conf):
+ conf = global_conf.copy()
+ conf.update(local_conf)
+ return ConfigMiddleware(app, conf)
+
+make_config_middleware = ConfigMiddleware.__doc__
diff --git a/paste/cowbell/__init__.py b/paste/cowbell/__init__.py
new file mode 100644
index 0000000..5a0d22d
--- /dev/null
+++ b/paste/cowbell/__init__.py
@@ -0,0 +1,104 @@
+# Cowbell images: http://commons.wikimedia.org/wiki/Image:Cowbell-1.jpg
+import os
+import re
+from paste.fileapp import FileApp
+from paste.response import header_value, remove_header
+
+SOUND = "http://www.c-eye.net/eyeon/WalkenWAVS/explorestudiospace.wav"
+
+class MoreCowbell(object):
+ def __init__(self, app):
+ self.app = app
+ def __call__(self, environ, start_response):
+ path_info = environ.get('PATH_INFO', '')
+ script_name = environ.get('SCRIPT_NAME', '')
+ for filename in ['bell-ascending.png', 'bell-descending.png']:
+ if path_info == '/.cowbell/'+ filename:
+ app = FileApp(os.path.join(os.path.dirname(__file__), filename))
+ return app(environ, start_response)
+ type = []
+ body = []
+ def repl_start_response(status, headers, exc_info=None):
+ ct = header_value(headers, 'content-type')
+ if ct and ct.startswith('text/html'):
+ type.append(ct)
+ remove_header(headers, 'content-length')
+ start_response(status, headers, exc_info)
+ return body.append
+ return start_response(status, headers, exc_info)
+ app_iter = self.app(environ, repl_start_response)
+ if type:
+ # Got text/html
+ body.extend(app_iter)
+ body = ''.join(body)
+ body = insert_head(body, self.javascript.replace('__SCRIPT_NAME__', script_name))
+ body = insert_body(body, self.resources.replace('__SCRIPT_NAME__', script_name))
+ return [body]
+ else:
+ return app_iter
+
+ javascript = '''\
+<script type="text/javascript">
+var cowbellState = 'hidden';
+var lastCowbellPosition = null;
+function showSomewhere() {
+ var sec, el;
+ if (cowbellState == 'hidden') {
+ el = document.getElementById('cowbell-ascending');
+ lastCowbellPosition = [parseInt(Math.random()*(window.innerWidth-200)),
+ parseInt(Math.random()*(window.innerHeight-200))];
+ el.style.left = lastCowbellPosition[0] + 'px';
+ el.style.top = lastCowbellPosition[1] + 'px';
+ el.style.display = '';
+ cowbellState = 'ascending';
+ sec = 1;
+ } else if (cowbellState == 'ascending') {
+ document.getElementById('cowbell-ascending').style.display = 'none';
+ el = document.getElementById('cowbell-descending');
+ el.style.left = lastCowbellPosition[0] + 'px';
+ el.style.top = lastCowbellPosition[1] + 'px';
+ el.style.display = '';
+ cowbellState = 'descending';
+ sec = 1;
+ } else {
+ document.getElementById('cowbell-descending').style.display = 'none';
+ cowbellState = 'hidden';
+ sec = Math.random()*20;
+ }
+ setTimeout(showSomewhere, sec*1000);
+}
+setTimeout(showSomewhere, Math.random()*20*1000);
+</script>
+'''
+
+ resources = '''\
+<div id="cowbell-ascending" style="display: none; position: fixed">
+<img src="__SCRIPT_NAME__/.cowbell/bell-ascending.png">
+</div>
+<div id="cowbell-descending" style="display: none; position: fixed">
+<img src="__SCRIPT_NAME__/.cowbell/bell-descending.png">
+</div>
+'''
+
+def insert_head(body, text):
+ end_head = re.search(r'</head>', body, re.I)
+ if end_head:
+ return body[:end_head.start()] + text + body[end_head.end():]
+ else:
+ return text + body
+
+def insert_body(body, text):
+ end_body = re.search(r'</body>', body, re.I)
+ if end_body:
+ return body[:end_body.start()] + text + body[end_body.end():]
+ else:
+ return body + text
+
+def make_cowbell(global_conf, app):
+ return MoreCowbell(app)
+
+if __name__ == '__main__':
+ from paste.debug.debugapp import SimpleApplication
+ app = MoreCowbell(SimpleApplication())
+ from paste.httpserver import serve
+ serve(app)
diff --git a/paste/cowbell/bell-ascending.png b/paste/cowbell/bell-ascending.png
new file mode 100644
index 0000000..42f33db
--- /dev/null
+++ b/paste/cowbell/bell-ascending.png
Binary files differ
diff --git a/paste/cowbell/bell-descending.png b/paste/cowbell/bell-descending.png
new file mode 100644
index 0000000..dac8012
--- /dev/null
+++ b/paste/cowbell/bell-descending.png
Binary files differ
diff --git a/paste/debug/__init__.py b/paste/debug/__init__.py
new file mode 100644
index 0000000..daef7cc
--- /dev/null
+++ b/paste/debug/__init__.py
@@ -0,0 +1,5 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Package for debugging and development tools
+"""
diff --git a/paste/debug/debugapp.py b/paste/debug/debugapp.py
new file mode 100755
index 0000000..f752c36
--- /dev/null
+++ b/paste/debug/debugapp.py
@@ -0,0 +1,79 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Various Applications for Debugging/Testing Purposes
+"""
+
+import time
+__all__ = ['SimpleApplication', 'SlowConsumer']
+
+
+class SimpleApplication(object):
+ """
+ Produces a simple web page
+ """
+ def __call__(self, environ, start_response):
+ body = b"<html><body>simple</body></html>"
+ start_response("200 OK", [('Content-Type', 'text/html'),
+ ('Content-Length', str(len(body)))])
+ return [body]
+
+class SlowConsumer(object):
+ """
+ Consumes an upload slowly...
+
+ NOTE: This should use the iterator form of ``wsgi.input``,
+ but it isn't implemented in paste.httpserver.
+ """
+ def __init__(self, chunk_size = 4096, delay = 1, progress = True):
+ self.chunk_size = chunk_size
+ self.delay = delay
+ self.progress = True
+
+ def __call__(self, environ, start_response):
+ size = 0
+ total = environ.get('CONTENT_LENGTH')
+ if total:
+ remaining = int(total)
+ while remaining > 0:
+ if self.progress:
+ print("%s of %s remaining" % (remaining, total))
+ if remaining > 4096:
+ chunk = environ['wsgi.input'].read(4096)
+ else:
+ chunk = environ['wsgi.input'].read(remaining)
+ if not chunk:
+ break
+ size += len(chunk)
+ remaining -= len(chunk)
+ if self.delay:
+ time.sleep(self.delay)
+ body = "<html><body>%d bytes</body></html>" % size
+ else:
+ body = ('<html><body>\n'
+ '<form method="post" enctype="multipart/form-data">\n'
+ '<input type="file" name="file">\n'
+ '<input type="submit" >\n'
+ '</form></body></html>\n')
+ print("bingles")
+ start_response("200 OK", [('Content-Type', 'text/html'),
+ ('Content-Length', len(body))])
+ return [body]
+
+def make_test_app(global_conf):
+ return SimpleApplication()
+
+make_test_app.__doc__ = SimpleApplication.__doc__
+
+def make_slow_app(global_conf, chunk_size=4096, delay=1, progress=True):
+ from paste.deploy.converters import asbool
+ return SlowConsumer(
+ chunk_size=int(chunk_size),
+ delay=int(delay),
+ progress=asbool(progress))
+
+make_slow_app.__doc__ = SlowConsumer.__doc__
diff --git a/paste/debug/doctest_webapp.py b/paste/debug/doctest_webapp.py
new file mode 100755
index 0000000..ffcfaa7
--- /dev/null
+++ b/paste/debug/doctest_webapp.py
@@ -0,0 +1,432 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+#!/usr/bin/env python2.4
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+These are functions for use when doctest-testing a document.
+"""
+
+import subprocess
+import doctest
+import os
+import sys
+import shutil
+import re
+import cgi
+import rfc822
+from cStringIO import StringIO
+from paste.util import PySourceColor
+
+
+here = os.path.abspath(__file__)
+paste_parent = os.path.dirname(
+ os.path.dirname(os.path.dirname(here)))
+
+def run(command):
+ data = run_raw(command)
+ if data:
+ print(data)
+
+def run_raw(command):
+ """
+ Runs the string command, returns any output.
+ """
+ proc = subprocess.Popen(command, shell=True,
+ stderr=subprocess.STDOUT,
+ stdout=subprocess.PIPE, env=_make_env())
+ data = proc.stdout.read()
+ proc.wait()
+ while data.endswith('\n') or data.endswith('\r'):
+ data = data[:-1]
+ if data:
+ data = '\n'.join(
+ [l for l in data.splitlines() if l])
+ return data
+ else:
+ return ''
+
+def run_command(command, name, and_print=False):
+ output = run_raw(command)
+ data = '$ %s\n%s' % (command, output)
+ show_file('shell-command', name, description='shell transcript',
+ data=data)
+ if and_print and output:
+ print(output)
+
+def _make_env():
+ env = os.environ.copy()
+ env['PATH'] = (env.get('PATH', '')
+ + ':'
+ + os.path.join(paste_parent, 'scripts')
+ + ':'
+ + os.path.join(paste_parent, 'paste', '3rd-party',
+ 'sqlobject-files', 'scripts'))
+ env['PYTHONPATH'] = (env.get('PYTHONPATH', '')
+ + ':'
+ + paste_parent)
+ return env
+
+def clear_dir(dir):
+ """
+ Clears (deletes) the given directory
+ """
+ shutil.rmtree(dir, True)
+
+def ls(dir=None, recurse=False, indent=0):
+ """
+ Show a directory listing
+ """
+ dir = dir or os.getcwd()
+ fns = os.listdir(dir)
+ fns.sort()
+ for fn in fns:
+ full = os.path.join(dir, fn)
+ if os.path.isdir(full):
+ fn = fn + '/'
+ print(' '*indent + fn)
+ if os.path.isdir(full) and recurse:
+ ls(dir=full, recurse=True, indent=indent+2)
+
+default_app = None
+default_url = None
+
+def set_default_app(app, url):
+ global default_app
+ global default_url
+ default_app = app
+ default_url = url
+
+def resource_filename(fn):
+ """
+ Returns the filename of the resource -- generally in the directory
+ resources/DocumentName/fn
+ """
+ return os.path.join(
+ os.path.dirname(sys.testing_document_filename),
+ 'resources',
+ os.path.splitext(os.path.basename(sys.testing_document_filename))[0],
+ fn)
+
+def show(path_info, example_name):
+ fn = resource_filename(example_name + '.html')
+ out = StringIO()
+ assert default_app is not None, (
+ "No default_app set")
+ url = default_url + path_info
+ out.write('<span class="doctest-url"><a href="%s">%s</a></span><br>\n'
+ % (url, url))
+ out.write('<div class="doctest-example">\n')
+ proc = subprocess.Popen(
+ ['paster', 'serve' '--server=console', '--no-verbose',
+ '--url=' + path_info],
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ env=_make_env())
+ stdout, errors = proc.communicate()
+ stdout = StringIO(stdout)
+ headers = rfc822.Message(stdout)
+ content = stdout.read()
+ for header, value in headers.items():
+ if header.lower() == 'status' and int(value.split()[0]) == 200:
+ continue
+ if header.lower() in ('content-type', 'content-length'):
+ continue
+ if (header.lower() == 'set-cookie'
+ and value.startswith('_SID_')):
+ continue
+ out.write('<span class="doctest-header">%s: %s</span><br>\n'
+ % (header, value))
+ lines = [l for l in content.splitlines() if l.strip()]
+ for line in lines:
+ out.write(line + '\n')
+ if errors:
+ out.write('<pre class="doctest-errors">%s</pre>'
+ % errors)
+ out.write('</div>\n')
+ result = out.getvalue()
+ if not os.path.exists(fn):
+ f = open(fn, 'wb')
+ f.write(result)
+ f.close()
+ else:
+ f = open(fn, 'rb')
+ expected = f.read()
+ f.close()
+ if not html_matches(expected, result):
+ print('Pages did not match. Expected from %s:' % fn)
+ print('-'*60)
+ print(expected)
+ print('='*60)
+ print('Actual output:')
+ print('-'*60)
+ print(result)
+
+def html_matches(pattern, text):
+ regex = re.escape(pattern)
+ regex = regex.replace(r'\.\.\.', '.*')
+ regex = re.sub(r'0x[0-9a-f]+', '.*', regex)
+ regex = '^%s$' % regex
+ return re.search(regex, text)
+
+def convert_docstring_string(data):
+ if data.startswith('\n'):
+ data = data[1:]
+ lines = data.splitlines()
+ new_lines = []
+ for line in lines:
+ if line.rstrip() == '.':
+ new_lines.append('')
+ else:
+ new_lines.append(line)
+ data = '\n'.join(new_lines) + '\n'
+ return data
+
+def create_file(path, version, data):
+ data = convert_docstring_string(data)
+ write_data(path, data)
+ show_file(path, version)
+
+def append_to_file(path, version, data):
+ data = convert_docstring_string(data)
+ f = open(path, 'a')
+ f.write(data)
+ f.close()
+ # I think these appends can happen so quickly (in less than a second)
+ # that the .pyc file doesn't appear to be expired, even though it
+ # is after we've made this change; so we have to get rid of the .pyc
+ # file:
+ if path.endswith('.py'):
+ pyc_file = path + 'c'
+ if os.path.exists(pyc_file):
+ os.unlink(pyc_file)
+ show_file(path, version, description='added to %s' % path,
+ data=data)
+
+def show_file(path, version, description=None, data=None):
+ ext = os.path.splitext(path)[1]
+ if data is None:
+ f = open(path, 'rb')
+ data = f.read()
+ f.close()
+ if ext == '.py':
+ html = ('<div class="source-code">%s</div>'
+ % PySourceColor.str2html(data, PySourceColor.dark))
+ else:
+ html = '<pre class="source-code">%s</pre>' % cgi.escape(data, 1)
+ html = '<span class="source-filename">%s</span><br>%s' % (
+ description or path, html)
+ write_data(resource_filename('%s.%s.gen.html' % (path, version)),
+ html)
+
+def call_source_highlight(input, format):
+ proc = subprocess.Popen(['source-highlight', '--out-format=html',
+ '--no-doc', '--css=none',
+ '--src-lang=%s' % format], shell=False,
+ stdout=subprocess.PIPE)
+ stdout, stderr = proc.communicate(input)
+ result = stdout
+ proc.wait()
+ return result
+
+
+def write_data(path, data):
+ dir = os.path.dirname(os.path.abspath(path))
+ if not os.path.exists(dir):
+ os.makedirs(dir)
+ f = open(path, 'wb')
+ f.write(data)
+ f.close()
+
+
+def change_file(path, changes):
+ f = open(os.path.abspath(path), 'rb')
+ lines = f.readlines()
+ f.close()
+ for change_type, line, text in changes:
+ if change_type == 'insert':
+ lines[line:line] = [text]
+ elif change_type == 'delete':
+ lines[line:text] = []
+ else:
+ assert 0, (
+ "Unknown change_type: %r" % change_type)
+ f = open(path, 'wb')
+ f.write(''.join(lines))
+ f.close()
+
+class LongFormDocTestParser(doctest.DocTestParser):
+
+ """
+ This parser recognizes some reST comments as commands, without
+ prompts or expected output, like:
+
+ .. run:
+
+ do_this(...
+ ...)
+ """
+
+ _EXAMPLE_RE = re.compile(r"""
+ # Source consists of a PS1 line followed by zero or more PS2 lines.
+ (?: (?P<source>
+ (?:^(?P<indent> [ ]*) >>> .*) # PS1 line
+ (?:\n [ ]* \.\.\. .*)*) # PS2 lines
+ \n?
+ # Want consists of any non-blank lines that do not start with PS1.
+ (?P<want> (?:(?![ ]*$) # Not a blank line
+ (?![ ]*>>>) # Not a line starting with PS1
+ .*$\n? # But any other line
+ )*))
+ |
+ (?: # This is for longer commands that are prefixed with a reST
+ # comment like '.. run:' (two colons makes that a directive).
+ # These commands cannot have any output.
+
+ (?:^\.\.[ ]*(?P<run>run):[ ]*\n) # Leading command/command
+ (?:[ ]*\n)? # Blank line following
+ (?P<runsource>
+ (?:(?P<runindent> [ ]+)[^ ].*$)
+ (?:\n [ ]+ .*)*)
+ )
+ |
+ (?: # This is for shell commands
+
+ (?P<shellsource>
+ (?:^(P<shellindent> [ ]*) [$] .*) # Shell line
+ (?:\n [ ]* [>] .*)*) # Continuation
+ \n?
+ # Want consists of any non-blank lines that do not start with $
+ (?P<shellwant> (?:(?![ ]*$)
+ (?![ ]*[$]$)
+ .*$\n?
+ )*))
+ """, re.MULTILINE | re.VERBOSE)
+
+ def _parse_example(self, m, name, lineno):
+ r"""
+ Given a regular expression match from `_EXAMPLE_RE` (`m`),
+ return a pair `(source, want)`, where `source` is the matched
+ example's source code (with prompts and indentation stripped);
+ and `want` is the example's expected output (with indentation
+ stripped).
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+
+ >>> def parseit(s):
+ ... p = LongFormDocTestParser()
+ ... return p._parse_example(p._EXAMPLE_RE.search(s), '<string>', 1)
+ >>> parseit('>>> 1\n1')
+ ('1', {}, '1', None)
+ >>> parseit('>>> (1\n... +1)\n2')
+ ('(1\n+1)', {}, '2', None)
+ >>> parseit('.. run:\n\n test1\n test2\n')
+ ('test1\ntest2', {}, '', None)
+ """
+ # Get the example's indentation level.
+ runner = m.group('run') or ''
+ indent = len(m.group('%sindent' % runner))
+
+ # Divide source into lines; check that they're properly
+ # indented; and then strip their indentation & prompts.
+ source_lines = m.group('%ssource' % runner).split('\n')
+ if runner:
+ self._check_prefix(source_lines[1:], ' '*indent, name, lineno)
+ else:
+ self._check_prompt_blank(source_lines, indent, name, lineno)
+ self._check_prefix(source_lines[2:], ' '*indent + '.', name, lineno)
+ if runner:
+ source = '\n'.join([sl[indent:] for sl in source_lines])
+ else:
+ source = '\n'.join([sl[indent+4:] for sl in source_lines])
+
+ if runner:
+ want = ''
+ exc_msg = None
+ else:
+ # Divide want into lines; check that it's properly indented; and
+ # then strip the indentation. Spaces before the last newline should
+ # be preserved, so plain rstrip() isn't good enough.
+ want = m.group('want')
+ want_lines = want.split('\n')
+ if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+ del want_lines[-1] # forget final newline & spaces after it
+ self._check_prefix(want_lines, ' '*indent, name,
+ lineno + len(source_lines))
+ want = '\n'.join([wl[indent:] for wl in want_lines])
+
+ # If `want` contains a traceback message, then extract it.
+ m = self._EXCEPTION_RE.match(want)
+ if m:
+ exc_msg = m.group('msg')
+ else:
+ exc_msg = None
+
+ # Extract options from the source.
+ options = self._find_options(source, name, lineno)
+
+ return source, options, want, exc_msg
+
+
+ def parse(self, string, name='<string>'):
+ """
+ Divide the given string into examples and intervening text,
+ and return them as a list of alternating Examples and strings.
+ Line numbers for the Examples are 0-based. The optional
+ argument `name` is a name identifying this string, and is only
+ used for error messages.
+ """
+ string = string.expandtabs()
+ # If all lines begin with the same indentation, then strip it.
+ min_indent = self._min_indent(string)
+ if min_indent > 0:
+ string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+ output = []
+ charno, lineno = 0, 0
+ # Find all doctest examples in the string:
+ for m in self._EXAMPLE_RE.finditer(string):
+ # Add the pre-example text to `output`.
+ output.append(string[charno:m.start()])
+ # Update lineno (lines before this example)
+ lineno += string.count('\n', charno, m.start())
+ # Extract info from the regexp match.
+ (source, options, want, exc_msg) = \
+ self._parse_example(m, name, lineno)
+ # Create an Example, and add it to the list.
+ if not self._IS_BLANK_OR_COMMENT(source):
+ # @@: Erg, this is the only line I need to change...
+ output.append(doctest.Example(
+ source, want, exc_msg,
+ lineno=lineno,
+ indent=min_indent+len(m.group('indent') or m.group('runindent')),
+ options=options))
+ # Update lineno (lines inside this example)
+ lineno += string.count('\n', m.start(), m.end())
+ # Update charno.
+ charno = m.end()
+ # Add any remaining post-example text to `output`.
+ output.append(string[charno:])
+ return output
+
+
+
+if __name__ == '__main__':
+ if sys.argv[1:] and sys.argv[1] == 'doctest':
+ doctest.testmod()
+ sys.exit()
+ if not paste_parent in sys.path:
+ sys.path.append(paste_parent)
+ for fn in sys.argv[1:]:
+ fn = os.path.abspath(fn)
+ # @@: OK, ick; but this module gets loaded twice
+ sys.testing_document_filename = fn
+ doctest.testfile(
+ fn, module_relative=False,
+ optionflags=doctest.ELLIPSIS|doctest.REPORT_ONLY_FIRST_FAILURE,
+ parser=LongFormDocTestParser())
+ new = os.path.splitext(fn)[0] + '.html'
+ assert new != fn
+ os.system('rst2html.py %s > %s' % (fn, new))
diff --git a/paste/debug/fsdiff.py b/paste/debug/fsdiff.py
new file mode 100644
index 0000000..6f9ec2d
--- /dev/null
+++ b/paste/debug/fsdiff.py
@@ -0,0 +1,408 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Module to find differences over time in a filesystem
+
+Basically this takes a snapshot of a directory, then sees what changes
+were made. The contents of the files are not checked, so you can
+detect that the content was changed, but not what the old version of
+the file was.
+"""
+
+import os
+from fnmatch import fnmatch
+from datetime import datetime
+
+try:
+ # Python 3
+ import collections.UserDict as IterableUserDict
+except ImportError:
+ # Python 2.5-2.7
+ from UserDict import IterableUserDict
+import operator
+import re
+
+__all__ = ['Diff', 'Snapshot', 'File', 'Dir', 'report_expected_diffs',
+ 'show_diff']
+
+class Diff(object):
+
+ """
+ Represents the difference between two snapshots
+ """
+
+ def __init__(self, before, after):
+ self.before = before
+ self.after = after
+ self._calculate()
+
+ def _calculate(self):
+ before = self.before.data
+ after = self.after.data
+ self.deleted = {}
+ self.updated = {}
+ self.created = after.copy()
+ for path, f in before.items():
+ if path not in after:
+ self.deleted[path] = f
+ continue
+ del self.created[path]
+ if f.mtime < after[path].mtime:
+ self.updated[path] = after[path]
+
+ def __str__(self):
+ return self.report()
+
+ def report(self, header=True, dates=False):
+ s = []
+ if header:
+ s.append('Difference in %s from %s to %s:' %
+ (self.before.base_path,
+ self.before.calculated,
+ self.after.calculated))
+ for name, files, show_size in [
+ ('created', self.created, True),
+ ('deleted', self.deleted, True),
+ ('updated', self.updated, True)]:
+ if files:
+ s.append('-- %s: -------------------' % name)
+ files = files.items()
+ files.sort()
+ last = ''
+ for path, f in files:
+ t = ' %s' % _space_prefix(last, path, indent=4,
+ include_sep=False)
+ last = path
+ if show_size and f.size != 'N/A':
+ t += ' (%s bytes)' % f.size
+ if dates:
+ parts = []
+ if self.before.get(path):
+ parts.append(self.before[path].mtime)
+ if self.after.get(path):
+ parts.append(self.after[path].mtime)
+ t += ' (mtime: %s)' % ('->'.join(map(repr, parts)))
+ s.append(t)
+ if len(s) == 1:
+ s.append(' (no changes)')
+ return '\n'.join(s)
+
+class Snapshot(IterableUserDict):
+
+ """
+ Represents a snapshot of a set of files. Has a dictionary-like
+ interface, keyed relative to ``base_path``
+ """
+
+ def __init__(self, base_path, files=None, ignore_wildcards=(),
+ ignore_paths=(), ignore_hidden=True):
+ self.base_path = base_path
+ self.ignore_wildcards = ignore_wildcards
+ self.ignore_hidden = ignore_hidden
+ self.ignore_paths = ignore_paths
+ self.calculated = None
+ self.data = files or {}
+ if files is None:
+ self.find_files()
+
+ ############################################################
+ ## File finding
+ ############################################################
+
+ def find_files(self):
+ """
+ Find all the files under the base path, and put them in
+ ``self.data``
+ """
+ self._find_traverse('', self.data)
+ self.calculated = datetime.now()
+
+ def _ignore_file(self, fn):
+ if fn in self.ignore_paths:
+ return True
+ if self.ignore_hidden and os.path.basename(fn).startswith('.'):
+ return True
+ for pat in self.ignore_wildcards:
+ if fnmatch(fn, pat):
+ return True
+ return False
+
+ def _find_traverse(self, path, result):
+ full = os.path.join(self.base_path, path)
+ if os.path.isdir(full):
+ if path:
+ # Don't actually include the base path
+ result[path] = Dir(self.base_path, path)
+ for fn in os.listdir(full):
+ fn = os.path.join(path, fn)
+ if self._ignore_file(fn):
+ continue
+ self._find_traverse(fn, result)
+ else:
+ result[path] = File(self.base_path, path)
+
+ def __repr__(self):
+ return '<%s in %r from %r>' % (
+ self.__class__.__name__, self.base_path,
+ self.calculated or '(no calculation done)')
+
+ def compare_expected(self, expected, comparison=operator.eq,
+ differ=None, not_found=None,
+ include_success=False):
+ """
+ Compares a dictionary of ``path: content`` to the
+ found files. Comparison is done by equality, or the
+ ``comparison(actual_content, expected_content)`` function given.
+
+ Returns dictionary of differences, keyed by path. Each
+ difference is either noted, or the output of
+ ``differ(actual_content, expected_content)`` is given.
+
+ If a file does not exist and ``not_found`` is given, then
+ ``not_found(path)`` is put in.
+ """
+ result = {}
+ for path in expected:
+ orig_path = path
+ path = path.strip('/')
+ if path not in self.data:
+ if not_found:
+ msg = not_found(path)
+ else:
+ msg = 'not found'
+ result[path] = msg
+ continue
+ expected_content = expected[orig_path]
+ file = self.data[path]
+ actual_content = file.bytes
+ if not comparison(actual_content, expected_content):
+ if differ:
+ msg = differ(actual_content, expected_content)
+ else:
+ if len(actual_content) < len(expected_content):
+ msg = 'differ (%i bytes smaller)' % (
+ len(expected_content) - len(actual_content))
+ elif len(actual_content) > len(expected_content):
+ msg = 'differ (%i bytes larger)' % (
+ len(actual_content) - len(expected_content))
+ else:
+ msg = 'diff (same size)'
+ result[path] = msg
+ elif include_success:
+ result[path] = 'same!'
+ return result
+
+ def diff_to_now(self):
+ return Diff(self, self.clone())
+
+ def clone(self):
+ return self.__class__(base_path=self.base_path,
+ ignore_wildcards=self.ignore_wildcards,
+ ignore_paths=self.ignore_paths,
+ ignore_hidden=self.ignore_hidden)
+
+class File(object):
+
+ """
+ Represents a single file found as the result of a command.
+
+ Has attributes:
+
+ ``path``:
+ The path of the file, relative to the ``base_path``
+
+ ``full``:
+ The full path
+
+ ``stat``:
+ The results of ``os.stat``. Also ``mtime`` and ``size``
+ contain the ``.st_mtime`` and ``st_size`` of the stat.
+
+ ``bytes``:
+ The contents of the file.
+
+ You may use the ``in`` operator with these objects (tested against
+ the contents of the file), and the ``.mustcontain()`` method.
+ """
+
+ file = True
+ dir = False
+
+ def __init__(self, base_path, path):
+ self.base_path = base_path
+ self.path = path
+ self.full = os.path.join(base_path, path)
+ self.stat = os.stat(self.full)
+ self.mtime = self.stat.st_mtime
+ self.size = self.stat.st_size
+ self._bytes = None
+
+ def bytes__get(self):
+ if self._bytes is None:
+ f = open(self.full, 'rb')
+ self._bytes = f.read()
+ f.close()
+ return self._bytes
+ bytes = property(bytes__get)
+
+ def __contains__(self, s):
+ return s in self.bytes
+
+ def mustcontain(self, s):
+ __tracebackhide__ = True
+ bytes = self.bytes
+ if s not in bytes:
+ print('Could not find %r in:' % s)
+ print(bytes)
+ assert s in bytes
+
+ def __repr__(self):
+ return '<%s %s:%s>' % (
+ self.__class__.__name__,
+ self.base_path, self.path)
+
+class Dir(File):
+
+ """
+ Represents a directory created by a command.
+ """
+
+ file = False
+ dir = True
+
+ def __init__(self, base_path, path):
+ self.base_path = base_path
+ self.path = path
+ self.full = os.path.join(base_path, path)
+ self.size = 'N/A'
+ self.mtime = 'N/A'
+
+ def __repr__(self):
+ return '<%s %s:%s>' % (
+ self.__class__.__name__,
+ self.base_path, self.path)
+
+ def bytes__get(self):
+ raise NotImplementedError(
+ "Directory %r doesn't have content" % self)
+
+ bytes = property(bytes__get)
+
+
+def _space_prefix(pref, full, sep=None, indent=None, include_sep=True):
+ """
+ Anything shared by pref and full will be replaced with spaces
+ in full, and full returned.
+
+ Example::
+
+ >>> _space_prefix('/foo/bar', '/foo')
+ ' /bar'
+ """
+ if sep is None:
+ sep = os.path.sep
+ pref = pref.split(sep)
+ full = full.split(sep)
+ padding = []
+ while pref and full and pref[0] == full[0]:
+ if indent is None:
+ padding.append(' ' * (len(full[0]) + len(sep)))
+ else:
+ padding.append(' ' * indent)
+ full.pop(0)
+ pref.pop(0)
+ if padding:
+ if include_sep:
+ return ''.join(padding) + sep + sep.join(full)
+ else:
+ return ''.join(padding) + sep.join(full)
+ else:
+ return sep.join(full)
+
+def report_expected_diffs(diffs, colorize=False):
+ """
+ Takes the output of compare_expected, and returns a string
+ description of the differences.
+ """
+ if not diffs:
+ return 'No differences'
+ diffs = diffs.items()
+ diffs.sort()
+ s = []
+ last = ''
+ for path, desc in diffs:
+ t = _space_prefix(last, path, indent=4, include_sep=False)
+ if colorize:
+ t = color_line(t, 11)
+ last = path
+ if len(desc.splitlines()) > 1:
+ cur_indent = len(re.search(r'^[ ]*', t).group(0))
+ desc = indent(cur_indent+2, desc)
+ if colorize:
+ t += '\n'
+ for line in desc.splitlines():
+ if line.strip().startswith('+'):
+ line = color_line(line, 10)
+ elif line.strip().startswith('-'):
+ line = color_line(line, 9)
+ else:
+ line = color_line(line, 14)
+ t += line+'\n'
+ else:
+ t += '\n' + desc
+ else:
+ t += ' '+desc
+ s.append(t)
+ s.append('Files with differences: %s' % len(diffs))
+ return '\n'.join(s)
+
+def color_code(foreground=None, background=None):
+ """
+ 0 black
+ 1 red
+ 2 green
+ 3 yellow
+ 4 blue
+ 5 magenta (purple)
+ 6 cyan
+ 7 white (gray)
+
+ Add 8 to get high-intensity
+ """
+ if foreground is None and background is None:
+ # Reset
+ return '\x1b[0m'
+ codes = []
+ if foreground is None:
+ codes.append('[39m')
+ elif foreground > 7:
+ codes.append('[1m')
+ codes.append('[%im' % (22+foreground))
+ else:
+ codes.append('[%im' % (30+foreground))
+ if background is None:
+ codes.append('[49m')
+ else:
+ codes.append('[%im' % (40+background))
+ return '\x1b' + '\x1b'.join(codes)
+
+def color_line(line, foreground=None, background=None):
+ match = re.search(r'^(\s*)', line)
+ return (match.group(1) + color_code(foreground, background)
+ + line[match.end():] + color_code())
+
+def indent(indent, text):
+ return '\n'.join(
+ [' '*indent + l for l in text.splitlines()])
+
+def show_diff(actual_content, expected_content):
+ actual_lines = [l.strip() for l in actual_content.splitlines()
+ if l.strip()]
+ expected_lines = [l.strip() for l in expected_content.splitlines()
+ if l.strip()]
+ if len(actual_lines) == len(expected_lines) == 1:
+ return '%r not %r' % (actual_lines[0], expected_lines[0])
+ if not actual_lines:
+ return 'Empty; should have:\n'+expected_content
+ import difflib
+ return '\n'.join(difflib.ndiff(actual_lines, expected_lines))
diff --git a/paste/debug/prints.py b/paste/debug/prints.py
new file mode 100644
index 0000000..b660bfa
--- /dev/null
+++ b/paste/debug/prints.py
@@ -0,0 +1,149 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware that displays everything that is printed inline in
+application pages.
+
+Anything printed during the request will get captured and included on
+the page. It will usually be included as a floating element in the
+top right hand corner of the page. If you want to override this
+you can include a tag in your template where it will be placed::
+
+ <pre id="paste-debug-prints"></pre>
+
+You might want to include ``style="white-space: normal"``, as all the
+whitespace will be quoted, and this allows the text to wrap if
+necessary.
+
+"""
+
+from cStringIO import StringIO
+import re
+import cgi
+from paste.util import threadedprint
+from paste import wsgilib
+from paste import response
+import six
+import sys
+
+_threadedprint_installed = False
+
+__all__ = ['PrintDebugMiddleware']
+
+class TeeFile(object):
+
+ def __init__(self, files):
+ self.files = files
+
+ def write(self, v):
+ if isinstance(v, unicode):
+ # WSGI is picky in this case
+ v = str(v)
+ for file in self.files:
+ file.write(v)
+
+class PrintDebugMiddleware(object):
+
+ """
+ This middleware captures all the printed statements, and inlines
+ them in HTML pages, so that you can see all the (debug-intended)
+ print statements in the page itself.
+
+ There are two keys added to the environment to control this:
+ ``environ['paste.printdebug_listeners']`` is a list of functions
+ that will be called everytime something is printed.
+
+ ``environ['paste.remove_printdebug']`` is a function that, if
+ called, will disable printing of output for that request.
+
+ If you have ``replace_stdout=True`` then stdout is replaced, not
+ captured.
+ """
+
+ log_template = (
+ '<pre style="width: 40%%; border: 2px solid #000; white-space: normal; '
+ 'background-color: #ffd; color: #000; float: right;">'
+ '<b style="border-bottom: 1px solid #000">Log messages</b><br>'
+ '%s</pre>')
+
+ def __init__(self, app, global_conf=None, force_content_type=False,
+ print_wsgi_errors=True, replace_stdout=False):
+ # @@: global_conf should be handled separately and only for
+ # the entry point
+ self.app = app
+ self.force_content_type = force_content_type
+ if isinstance(print_wsgi_errors, six.string_types):
+ from paste.deploy.converters import asbool
+ print_wsgi_errors = asbool(print_wsgi_errors)
+ self.print_wsgi_errors = print_wsgi_errors
+ self.replace_stdout = replace_stdout
+ self._threaded_print_stdout = None
+
+ def __call__(self, environ, start_response):
+ global _threadedprint_installed
+ if environ.get('paste.testing'):
+ # In a testing environment this interception isn't
+ # useful:
+ return self.app(environ, start_response)
+ if (not _threadedprint_installed
+ or self._threaded_print_stdout is not sys.stdout):
+ # @@: Not strictly threadsafe
+ _threadedprint_installed = True
+ threadedprint.install(leave_stdout=not self.replace_stdout)
+ self._threaded_print_stdout = sys.stdout
+ removed = []
+ def remove_printdebug():
+ removed.append(None)
+ environ['paste.remove_printdebug'] = remove_printdebug
+ logged = StringIO()
+ listeners = [logged]
+ environ['paste.printdebug_listeners'] = listeners
+ if self.print_wsgi_errors:
+ listeners.append(environ['wsgi.errors'])
+ replacement_stdout = TeeFile(listeners)
+ threadedprint.register(replacement_stdout)
+ try:
+ status, headers, body = wsgilib.intercept_output(
+ environ, self.app)
+ if status is None:
+ # Some error occurred
+ status = '500 Server Error'
+ headers = [('Content-type', 'text/html')]
+ start_response(status, headers)
+ if not body:
+ body = 'An error occurred'
+ content_type = response.header_value(headers, 'content-type')
+ if (removed or
+ (not self.force_content_type and
+ (not content_type
+ or not content_type.startswith('text/html')))):
+ if replacement_stdout == logged:
+ # Then the prints will be lost, unless...
+ environ['wsgi.errors'].write(logged.getvalue())
+ start_response(status, headers)
+ return [body]
+ response.remove_header(headers, 'content-length')
+ body = self.add_log(body, logged.getvalue())
+ start_response(status, headers)
+ return [body]
+ finally:
+ threadedprint.deregister()
+
+ _body_re = re.compile(r'<body[^>]*>', re.I)
+ _explicit_re = re.compile(r'<pre\s*[^>]*id="paste-debug-prints".*?>',
+ re.I+re.S)
+
+ def add_log(self, html, log):
+ if not log:
+ return html
+ text = cgi.escape(log)
+ text = text.replace('\n', '<br>')
+ text = text.replace(' ', '&nbsp; ')
+ match = self._explicit_re.search(html)
+ if not match:
+ text = self.log_template % text
+ match = self._body_re.search(html)
+ if not match:
+ return text + html
+ else:
+ return html[:match.end()] + text + html[match.end():]
diff --git a/paste/debug/profile.py b/paste/debug/profile.py
new file mode 100644
index 0000000..470a54a
--- /dev/null
+++ b/paste/debug/profile.py
@@ -0,0 +1,228 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware that profiles the request and displays profiling
+information at the bottom of each page.
+"""
+
+
+import sys
+import os
+import hotshot
+import hotshot.stats
+import threading
+import cgi
+import six
+import time
+from cStringIO import StringIO
+from paste import response
+
+__all__ = ['ProfileMiddleware', 'profile_decorator']
+
+class ProfileMiddleware(object):
+
+ """
+ Middleware that profiles all requests.
+
+ All HTML pages will have profiling information appended to them.
+ The data is isolated to that single request, and does not include
+ data from previous requests.
+
+ This uses the ``hotshot`` module, which affects performance of the
+ application. It also runs in a single-threaded mode, so it is
+ only usable in development environments.
+ """
+
+ style = ('clear: both; background-color: #ff9; color: #000; '
+ 'border: 2px solid #000; padding: 5px;')
+
+ def __init__(self, app, global_conf=None,
+ log_filename='profile.log.tmp',
+ limit=40):
+ self.app = app
+ self.lock = threading.Lock()
+ self.log_filename = log_filename
+ self.limit = limit
+
+ def __call__(self, environ, start_response):
+ catch_response = []
+ body = []
+ def replace_start_response(status, headers, exc_info=None):
+ catch_response.extend([status, headers])
+ start_response(status, headers, exc_info)
+ return body.append
+ def run_app():
+ app_iter = self.app(environ, replace_start_response)
+ try:
+ body.extend(app_iter)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ self.lock.acquire()
+ try:
+ prof = hotshot.Profile(self.log_filename)
+ prof.addinfo('URL', environ.get('PATH_INFO', ''))
+ try:
+ prof.runcall(run_app)
+ finally:
+ prof.close()
+ body = ''.join(body)
+ headers = catch_response[1]
+ content_type = response.header_value(headers, 'content-type')
+ if content_type is None or not content_type.startswith('text/html'):
+ # We can't add info to non-HTML output
+ return [body]
+ stats = hotshot.stats.load(self.log_filename)
+ stats.strip_dirs()
+ stats.sort_stats('time', 'calls')
+ output = capture_output(stats.print_stats, self.limit)
+ output_callers = capture_output(
+ stats.print_callers, self.limit)
+ body += '<pre style="%s">%s\n%s</pre>' % (
+ self.style, cgi.escape(output), cgi.escape(output_callers))
+ return [body]
+ finally:
+ self.lock.release()
+
+def capture_output(func, *args, **kw):
+ # Not threadsafe! (that's okay when ProfileMiddleware uses it,
+ # though, since it synchronizes itself.)
+ out = StringIO()
+ old_stdout = sys.stdout
+ sys.stdout = out
+ try:
+ func(*args, **kw)
+ finally:
+ sys.stdout = old_stdout
+ return out.getvalue()
+
+def profile_decorator(**options):
+
+ """
+ Profile a single function call.
+
+ Used around a function, like::
+
+ @profile_decorator(options...)
+ def ...
+
+ All calls to the function will be profiled. The options are
+ all keywords, and are:
+
+ log_file:
+ The filename to log to (or ``'stdout'`` or ``'stderr'``).
+ Default: stderr.
+ display_limit:
+ Only show the top N items, default: 20.
+ sort_stats:
+ A list of string-attributes to sort on. Default
+ ``('time', 'calls')``.
+ strip_dirs:
+ Strip directories/module names from files? Default True.
+ add_info:
+ If given, this info will be added to the report (for your
+ own tracking). Default: none.
+ log_filename:
+ The temporary filename to log profiling data to. Default;
+ ``./profile_data.log.tmp``
+ no_profile:
+ If true, then don't actually profile anything. Useful for
+ conditional profiling.
+ """
+
+ if options.get('no_profile'):
+ def decorator(func):
+ return func
+ return decorator
+ def decorator(func):
+ def replacement(*args, **kw):
+ return DecoratedProfile(func, **options)(*args, **kw)
+ return replacement
+ return decorator
+
+class DecoratedProfile(object):
+
+ lock = threading.Lock()
+
+ def __init__(self, func, **options):
+ self.func = func
+ self.options = options
+
+ def __call__(self, *args, **kw):
+ self.lock.acquire()
+ try:
+ return self.profile(self.func, *args, **kw)
+ finally:
+ self.lock.release()
+
+ def profile(self, func, *args, **kw):
+ ops = self.options
+ prof_filename = ops.get('log_filename', 'profile_data.log.tmp')
+ prof = hotshot.Profile(prof_filename)
+ prof.addinfo('Function Call',
+ self.format_function(func, *args, **kw))
+ if ops.get('add_info'):
+ prof.addinfo('Extra info', ops['add_info'])
+ exc_info = None
+ try:
+ start_time = time.time()
+ try:
+ result = prof.runcall(func, *args, **kw)
+ except:
+ exc_info = sys.exc_info()
+ end_time = time.time()
+ finally:
+ prof.close()
+ stats = hotshot.stats.load(prof_filename)
+ os.unlink(prof_filename)
+ if ops.get('strip_dirs', True):
+ stats.strip_dirs()
+ stats.sort_stats(*ops.get('sort_stats', ('time', 'calls')))
+ display_limit = ops.get('display_limit', 20)
+ output = capture_output(stats.print_stats, display_limit)
+ output_callers = capture_output(
+ stats.print_callers, display_limit)
+ output_file = ops.get('log_file')
+ if output_file in (None, 'stderr'):
+ f = sys.stderr
+ elif output_file in ('-', 'stdout'):
+ f = sys.stdout
+ else:
+ f = open(output_file, 'a')
+ f.write('\n%s\n' % ('-'*60))
+ f.write('Date: %s\n' % time.strftime('%c'))
+ f.write('Function call: %s\n'
+ % self.format_function(func, *args, **kw))
+ f.write('Wall time: %0.2f seconds\n'
+ % (end_time - start_time))
+ f.write(output)
+ f.write(output_callers)
+ if output_file not in (None, '-', 'stdout', 'stderr'):
+ f.close()
+ if exc_info:
+ # We captured an exception earlier, now we re-raise it
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ return result
+
+ def format_function(self, func, *args, **kw):
+ args = map(repr, args)
+ args.extend(
+ ['%s=%r' % (k, v) for k, v in kw.items()])
+ return '%s(%s)' % (func.__name__, ', '.join(args))
+
+
+def make_profile_middleware(
+ app, global_conf,
+ log_filename='profile.log.tmp',
+ limit=40):
+ """
+ Wrap the application in a component that will profile each
+ request. The profiling data is then appended to the output
+ of each page.
+
+ Note that this serializes all requests (i.e., removing
+ concurrency). Therefore never use this in production.
+ """
+ limit = int(limit)
+ return ProfileMiddleware(
+ app, log_filename=log_filename, limit=limit)
diff --git a/paste/debug/testserver.py b/paste/debug/testserver.py
new file mode 100755
index 0000000..8044c7c
--- /dev/null
+++ b/paste/debug/testserver.py
@@ -0,0 +1,93 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+WSGI Test Server
+
+This builds upon paste.util.baseserver to customize it for regressions
+where using raw_interactive won't do.
+
+
+"""
+import time
+from paste.httpserver import *
+
+class WSGIRegressionServer(WSGIServer):
+ """
+ A threaded WSGIServer for use in regression testing. To use this
+ module, call serve(application, regression=True), and then call
+ server.accept() to let it handle one request. When finished, use
+ server.stop() to shutdown the server. Note that all pending requests
+ are processed before the server shuts down.
+ """
+ defaulttimeout = 10
+ def __init__ (self, *args, **kwargs):
+ WSGIServer.__init__(self, *args, **kwargs)
+ self.stopping = []
+ self.pending = []
+ self.timeout = self.defaulttimeout
+ # this is a local connection, be quick
+ self.socket.settimeout(2)
+ def serve_forever(self):
+ from threading import Thread
+ thread = Thread(target=self.serve_pending)
+ thread.start()
+ def reset_expires(self):
+ if self.timeout:
+ self.expires = time.time() + self.timeout
+ def close_request(self, *args, **kwargs):
+ WSGIServer.close_request(self, *args, **kwargs)
+ self.pending.pop()
+ self.reset_expires()
+ def serve_pending(self):
+ self.reset_expires()
+ while not self.stopping or self.pending:
+ now = time.time()
+ if now > self.expires and self.timeout:
+ # note regression test doesn't handle exceptions in
+ # threads very well; so we just print and exit
+ print("\nWARNING: WSGIRegressionServer timeout exceeded\n")
+ break
+ if self.pending:
+ self.handle_request()
+ time.sleep(.1)
+ def stop(self):
+ """ stop the server (called from tester's thread) """
+ self.stopping.append(True)
+ def accept(self, count = 1):
+ """ accept another request (called from tester's thread) """
+ assert not self.stopping
+ [self.pending.append(True) for x in range(count)]
+
+def serve(application, host=None, port=None, handler=None):
+ server = WSGIRegressionServer(application, host, port, handler)
+ print("serving on %s:%s" % server.server_address)
+ server.serve_forever()
+ return server
+
+if __name__ == '__main__':
+ from six.moves.urllib.request import urlopen
+ from paste.wsgilib import dump_environ
+ server = serve(dump_environ)
+ baseuri = ("http://%s:%s" % server.server_address)
+
+ def fetch(path):
+ # tell the server to humor exactly one more request
+ server.accept(1)
+ # not needed; but this is what you do if the server
+ # may not respond in a resonable time period
+ import socket
+ socket.setdefaulttimeout(5)
+ # build a uri, fetch and return
+ return urlopen(baseuri + path).read()
+
+ assert "PATH_INFO: /foo" in fetch("/foo")
+ assert "PATH_INFO: /womble" in fetch("/womble")
+
+ # ok, let's make one more final request...
+ server.accept(1)
+ # and then schedule a stop()
+ server.stop()
+ # and then... fetch it...
+ urlopen(baseuri)
diff --git a/paste/debug/watchthreads.py b/paste/debug/watchthreads.py
new file mode 100644
index 0000000..b06ccea
--- /dev/null
+++ b/paste/debug/watchthreads.py
@@ -0,0 +1,347 @@
+"""
+Watches the key ``paste.httpserver.thread_pool`` to see how many
+threads there are and report on any wedged threads.
+"""
+import sys
+import cgi
+import time
+import traceback
+from cStringIO import StringIO
+from thread import get_ident
+from paste import httpexceptions
+from paste.request import construct_url, parse_formvars
+from paste.util.template import HTMLTemplate, bunch
+
+page_template = HTMLTemplate('''
+<html>
+ <head>
+ <style type="text/css">
+ body {
+ font-family: sans-serif;
+ }
+ table.environ tr td {
+ border-bottom: #bbb 1px solid;
+ }
+ table.environ tr td.bottom {
+ border-bottom: none;
+ }
+ table.thread {
+ border: 1px solid #000;
+ margin-bottom: 1em;
+ }
+ table.thread tr td {
+ border-bottom: #999 1px solid;
+ padding-right: 1em;
+ }
+ table.thread tr td.bottom {
+ border-bottom: none;
+ }
+ table.thread tr.this_thread td {
+ background-color: #006;
+ color: #fff;
+ }
+ a.button {
+ background-color: #ddd;
+ border: #aaa outset 2px;
+ text-decoration: none;
+ margin-top: 10px;
+ font-size: 80%;
+ color: #000;
+ }
+ a.button:hover {
+ background-color: #eee;
+ border: #bbb outset 2px;
+ }
+ a.button:active {
+ border: #bbb inset 2px;
+ }
+ </style>
+ <title>{{title}}</title>
+ </head>
+ <body>
+ <h1>{{title}}</h1>
+ {{if kill_thread_id}}
+ <div style="background-color: #060; color: #fff;
+ border: 2px solid #000;">
+ Thread {{kill_thread_id}} killed
+ </div>
+ {{endif}}
+ <div>Pool size: {{nworkers}}
+ {{if actual_workers > nworkers}}
+ + {{actual_workers-nworkers}} extra
+ {{endif}}
+ ({{nworkers_used}} used including current request)<br>
+ idle: {{len(track_threads["idle"])}},
+ busy: {{len(track_threads["busy"])}},
+ hung: {{len(track_threads["hung"])}},
+ dying: {{len(track_threads["dying"])}},
+ zombie: {{len(track_threads["zombie"])}}</div>
+
+{{for thread in threads}}
+
+<table class="thread">
+ <tr {{if thread.thread_id == this_thread_id}}class="this_thread"{{endif}}>
+ <td>
+ <b>Thread</b>
+ {{if thread.thread_id == this_thread_id}}
+ (<i>this</i> request)
+ {{endif}}</td>
+ <td>
+ <b>{{thread.thread_id}}
+ {{if allow_kill}}
+ <form action="{{script_name}}/kill" method="POST"
+ style="display: inline">
+ <input type="hidden" name="thread_id" value="{{thread.thread_id}}">
+ <input type="submit" value="kill">
+ </form>
+ {{endif}}
+ </b>
+ </td>
+ </tr>
+ <tr>
+ <td>Time processing request</td>
+ <td>{{thread.time_html|html}}</td>
+ </tr>
+ <tr>
+ <td>URI</td>
+ <td>{{if thread.uri == 'unknown'}}
+ unknown
+ {{else}}<a href="{{thread.uri}}">{{thread.uri_short}}</a>
+ {{endif}}
+ </td>
+ <tr>
+ <td colspan="2" class="bottom">
+ <a href="#" class="button" style="width: 9em; display: block"
+ onclick="
+ var el = document.getElementById('environ-{{thread.thread_id}}');
+ if (el.style.display) {
+ el.style.display = '';
+ this.innerHTML = \'&#9662; Hide environ\';
+ } else {
+ el.style.display = 'none';
+ this.innerHTML = \'&#9656; Show environ\';
+ }
+ return false
+ ">&#9656; Show environ</a>
+
+ <div id="environ-{{thread.thread_id}}" style="display: none">
+ {{if thread.environ:}}
+ <table class="environ">
+ {{for loop, item in looper(sorted(thread.environ.items()))}}
+ {{py:key, value=item}}
+ <tr>
+ <td {{if loop.last}}class="bottom"{{endif}}>{{key}}</td>
+ <td {{if loop.last}}class="bottom"{{endif}}>{{value}}</td>
+ </tr>
+ {{endfor}}
+ </table>
+ {{else}}
+ Thread is in process of starting
+ {{endif}}
+ </div>
+
+ {{if thread.traceback}}
+ <a href="#" class="button" style="width: 9em; display: block"
+ onclick="
+ var el = document.getElementById('traceback-{{thread.thread_id}}');
+ if (el.style.display) {
+ el.style.display = '';
+ this.innerHTML = \'&#9662; Hide traceback\';
+ } else {
+ el.style.display = 'none';
+ this.innerHTML = \'&#9656; Show traceback\';
+ }
+ return false
+ ">&#9656; Show traceback</a>
+
+ <div id="traceback-{{thread.thread_id}}" style="display: none">
+ <pre class="traceback">{{thread.traceback}}</pre>
+ </div>
+ {{endif}}
+
+ </td>
+ </tr>
+</table>
+
+{{endfor}}
+
+ </body>
+</html>
+''', name='watchthreads.page_template')
+
+class WatchThreads(object):
+
+ """
+ Application that watches the threads in ``paste.httpserver``,
+ showing the length each thread has been working on a request.
+
+ If allow_kill is true, then you can kill errant threads through
+ this application.
+
+ This application can expose private information (specifically in
+ the environment, like cookies), so it should be protected.
+ """
+
+ def __init__(self, allow_kill=False):
+ self.allow_kill = allow_kill
+
+ def __call__(self, environ, start_response):
+ if 'paste.httpserver.thread_pool' not in environ:
+ start_response('403 Forbidden', [('Content-type', 'text/plain')])
+ return ['You must use the threaded Paste HTTP server to use this application']
+ if environ.get('PATH_INFO') == '/kill':
+ return self.kill(environ, start_response)
+ else:
+ return self.show(environ, start_response)
+
+ def show(self, environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/html')])
+ form = parse_formvars(environ)
+ if form.get('kill'):
+ kill_thread_id = form['kill']
+ else:
+ kill_thread_id = None
+ thread_pool = environ['paste.httpserver.thread_pool']
+ nworkers = thread_pool.nworkers
+ now = time.time()
+
+
+ workers = thread_pool.worker_tracker.items()
+ workers.sort(key=lambda v: v[1][0])
+ threads = []
+ for thread_id, (time_started, worker_environ) in workers:
+ thread = bunch()
+ threads.append(thread)
+ if worker_environ:
+ thread.uri = construct_url(worker_environ)
+ else:
+ thread.uri = 'unknown'
+ thread.thread_id = thread_id
+ thread.time_html = format_time(now-time_started)
+ thread.uri_short = shorten(thread.uri)
+ thread.environ = worker_environ
+ thread.traceback = traceback_thread(thread_id)
+
+ page = page_template.substitute(
+ title="Thread Pool Worker Tracker",
+ nworkers=nworkers,
+ actual_workers=len(thread_pool.workers),
+ nworkers_used=len(workers),
+ script_name=environ['SCRIPT_NAME'],
+ kill_thread_id=kill_thread_id,
+ allow_kill=self.allow_kill,
+ threads=threads,
+ this_thread_id=get_ident(),
+ track_threads=thread_pool.track_threads())
+
+ return [page]
+
+ def kill(self, environ, start_response):
+ if not self.allow_kill:
+ exc = httpexceptions.HTTPForbidden(
+ 'Killing threads has not been enabled. Shame on you '
+ 'for trying!')
+ return exc(environ, start_response)
+ vars = parse_formvars(environ)
+ thread_id = int(vars['thread_id'])
+ thread_pool = environ['paste.httpserver.thread_pool']
+ if thread_id not in thread_pool.worker_tracker:
+ exc = httpexceptions.PreconditionFailed(
+ 'You tried to kill thread %s, but it is not working on '
+ 'any requests' % thread_id)
+ return exc(environ, start_response)
+ thread_pool.kill_worker(thread_id)
+ script_name = environ['SCRIPT_NAME'] or '/'
+ exc = httpexceptions.HTTPFound(
+ headers=[('Location', script_name+'?kill=%s' % thread_id)])
+ return exc(environ, start_response)
+
+def traceback_thread(thread_id):
+ """
+ Returns a plain-text traceback of the given thread, or None if it
+ can't get a traceback.
+ """
+ if not hasattr(sys, '_current_frames'):
+ # Only 2.5 has support for this, with this special function
+ return None
+ frames = sys._current_frames()
+ if not thread_id in frames:
+ return None
+ frame = frames[thread_id]
+ out = StringIO()
+ traceback.print_stack(frame, file=out)
+ return out.getvalue()
+
+hide_keys = ['paste.httpserver.thread_pool']
+
+def format_environ(environ):
+ if environ is None:
+ return environ_template.substitute(
+ key='---',
+ value='No environment registered for this thread yet')
+ environ_rows = []
+ for key, value in sorted(environ.items()):
+ if key in hide_keys:
+ continue
+ try:
+ if key.upper() != key:
+ value = repr(value)
+ environ_rows.append(
+ environ_template.substitute(
+ key=cgi.escape(str(key)),
+ value=cgi.escape(str(value))))
+ except Exception as e:
+ environ_rows.append(
+ environ_template.substitute(
+ key=cgi.escape(str(key)),
+ value='Error in <code>repr()</code>: %s' % e))
+ return ''.join(environ_rows)
+
+def format_time(time_length):
+ if time_length >= 60*60:
+ # More than an hour
+ time_string = '%i:%02i:%02i' % (int(time_length/60/60),
+ int(time_length/60) % 60,
+ time_length % 60)
+ elif time_length >= 120:
+ time_string = '%i:%02i' % (int(time_length/60),
+ time_length % 60)
+ elif time_length > 60:
+ time_string = '%i sec' % time_length
+ elif time_length > 1:
+ time_string = '%0.1f sec' % time_length
+ else:
+ time_string = '%0.2f sec' % time_length
+ if time_length < 5:
+ return time_string
+ elif time_length < 120:
+ return '<span style="color: #900">%s</span>' % time_string
+ else:
+ return '<span style="background-color: #600; color: #fff">%s</span>' % time_string
+
+def shorten(s):
+ if len(s) > 60:
+ return s[:40]+'...'+s[-10:]
+ else:
+ return s
+
+def make_watch_threads(global_conf, allow_kill=False):
+ from paste.deploy.converters import asbool
+ return WatchThreads(allow_kill=asbool(allow_kill))
+make_watch_threads.__doc__ = WatchThreads.__doc__
+
+def make_bad_app(global_conf, pause=0):
+ pause = int(pause)
+ def bad_app(environ, start_response):
+ import thread
+ if pause:
+ time.sleep(pause)
+ else:
+ count = 0
+ while 1:
+ print("I'm alive %s (%s)" % (count, thread.get_ident()))
+ time.sleep(10)
+ count += 1
+ start_response('200 OK', [('content-type', 'text/plain')])
+ return ['OK, paused %s seconds' % pause]
+ return bad_app
diff --git a/paste/debug/wdg_validate.py b/paste/debug/wdg_validate.py
new file mode 100644
index 0000000..225baf9
--- /dev/null
+++ b/paste/debug/wdg_validate.py
@@ -0,0 +1,118 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware that tests the validity of all generated HTML using the
+`WDG HTML Validator <http://www.htmlhelp.com/tools/validator/>`_
+"""
+
+from cStringIO import StringIO
+import subprocess
+from paste.response import header_value
+import re
+import cgi
+
+__all__ = ['WDGValidateMiddleware']
+
+class WDGValidateMiddleware(object):
+
+ """
+ Middleware that checks HTML and appends messages about the validity of
+ the HTML. Uses: http://www.htmlhelp.com/tools/validator/ -- interacts
+ with the command line client. Use the configuration ``wdg_path`` to
+ override the path (default: looks for ``validate`` in $PATH).
+
+ To install, in your web context's __init__.py::
+
+ def urlparser_wrap(environ, start_response, app):
+ return wdg_validate.WDGValidateMiddleware(app)(
+ environ, start_response)
+
+ Or in your configuration::
+
+ middleware.append('paste.wdg_validate.WDGValidateMiddleware')
+ """
+
+ _end_body_regex = re.compile(r'</body>', re.I)
+
+ def __init__(self, app, global_conf=None, wdg_path='validate'):
+ self.app = app
+ self.wdg_path = wdg_path
+
+ def __call__(self, environ, start_response):
+ output = StringIO()
+ response = []
+
+ def writer_start_response(status, headers, exc_info=None):
+ response.extend((status, headers))
+ start_response(status, headers, exc_info)
+ return output.write
+
+ app_iter = self.app(environ, writer_start_response)
+ try:
+ for s in app_iter:
+ output.write(s)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ page = output.getvalue()
+ status, headers = response
+ v = header_value(headers, 'content-type') or ''
+ if (not v.startswith('text/html')
+ and not v.startswith('text/xhtml')
+ and not v.startswith('application/xhtml')):
+ # Can't validate
+ # @@: Should validate CSS too... but using what?
+ return [page]
+ ops = []
+ if v.startswith('text/xhtml+xml'):
+ ops.append('--xml')
+ # @@: Should capture encoding too
+ html_errors = self.call_wdg_validate(
+ self.wdg_path, ops, page)
+ if html_errors:
+ page = self.add_error(page, html_errors)[0]
+ headers.remove(
+ ('Content-Length',
+ str(header_value(headers, 'content-length'))))
+ headers.append(('Content-Length', str(len(page))))
+ return [page]
+
+ def call_wdg_validate(self, wdg_path, ops, page):
+ if subprocess is None:
+ raise ValueError(
+ "This middleware requires the subprocess module from "
+ "Python 2.4")
+ proc = subprocess.Popen([wdg_path] + ops,
+ shell=False,
+ close_fds=True,
+ stdout=subprocess.PIPE,
+ stdin=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ stdout = proc.communicate(page)[0]
+ proc.wait()
+ return stdout
+
+ def add_error(self, html_page, html_errors):
+ add_text = ('<pre style="background-color: #ffd; color: #600; '
+ 'border: 1px solid #000;">%s</pre>'
+ % cgi.escape(html_errors))
+ match = self._end_body_regex.search(html_page)
+ if match:
+ return [html_page[:match.start()]
+ + add_text
+ + html_page[match.start():]]
+ else:
+ return [html_page + add_text]
+
+def make_wdg_validate_middleware(
+ app, global_conf, wdg_path='validate'):
+ """
+ Wraps the application in the WDG validator from
+ http://www.htmlhelp.com/tools/validator/
+
+ Validation errors are appended to the text of each page.
+ You can configure this by giving the path to the validate
+ executable (by default picked up from $PATH)
+ """
+ return WDGValidateMiddleware(
+ app, global_conf, wdg_path=wdg_path)
diff --git a/paste/errordocument.py b/paste/errordocument.py
new file mode 100644
index 0000000..34f2d4a
--- /dev/null
+++ b/paste/errordocument.py
@@ -0,0 +1,389 @@
+# (c) 2005-2006 James Gardner <james@pythonweb.org>
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware to display error documents for certain status codes
+
+The middleware in this module can be used to intercept responses with
+specified status codes and internally forward the request to an appropriate
+URL where the content can be displayed to the user as an error document.
+"""
+
+import warnings
+import sys
+from six.moves.urllib import parse as urlparse
+from paste.recursive import ForwardRequestException, RecursiveMiddleware, RecursionLoop
+from paste.util import converters
+from paste.response import replace_header
+import six
+
+def forward(app, codes):
+ """
+ Intercepts a response with a particular status code and returns the
+ content from a specified URL instead.
+
+ The arguments are:
+
+ ``app``
+ The WSGI application or middleware chain.
+
+ ``codes``
+ A dictionary of integer status codes and the URL to be displayed
+ if the response uses that code.
+
+ For example, you might want to create a static file to display a
+ "File Not Found" message at the URL ``/error404.html`` and then use
+ ``forward`` middleware to catch all 404 status codes and display the page
+ you created. In this example ``app`` is your exisiting WSGI
+ applicaiton::
+
+ from paste.errordocument import forward
+ app = forward(app, codes={404:'/error404.html'})
+
+ """
+ for code in codes:
+ if not isinstance(code, int):
+ raise TypeError('All status codes should be type int. '
+ '%s is not valid'%repr(code))
+
+ def error_codes_mapper(code, message, environ, global_conf, codes):
+ if code in codes:
+ return codes[code]
+ else:
+ return None
+
+ #return _StatusBasedRedirect(app, error_codes_mapper, codes=codes)
+ return RecursiveMiddleware(
+ StatusBasedForward(
+ app,
+ error_codes_mapper,
+ codes=codes,
+ )
+ )
+
+class StatusKeeper(object):
+ def __init__(self, app, status, url, headers):
+ self.app = app
+ self.status = status
+ self.url = url
+ self.headers = headers
+
+ def __call__(self, environ, start_response):
+ def keep_status_start_response(status, headers, exc_info=None):
+ for header, value in headers:
+ if header.lower() == 'set-cookie':
+ self.headers.append((header, value))
+ else:
+ replace_header(self.headers, header, value)
+ return start_response(self.status, self.headers, exc_info)
+ parts = self.url.split('?')
+ environ['PATH_INFO'] = parts[0]
+ if len(parts) > 1:
+ environ['QUERY_STRING'] = parts[1]
+ else:
+ environ['QUERY_STRING'] = ''
+ #raise Exception(self.url, self.status)
+ try:
+ return self.app(environ, keep_status_start_response)
+ except RecursionLoop as e:
+ line = 'Recursion error getting error page: %s\n' % e
+ if six.PY3:
+ line = line.encode('utf8')
+ environ['wsgi.errors'].write(line)
+ keep_status_start_response('500 Server Error', [('Content-type', 'text/plain')], sys.exc_info())
+ body = ('Error: %s. (Error page could not be fetched)'
+ % self.status)
+ if six.PY3:
+ body = body.encode('utf8')
+ return [body]
+
+
+class StatusBasedForward(object):
+ """
+ Middleware that lets you test a response against a custom mapper object to
+ programatically determine whether to internally forward to another URL and
+ if so, which URL to forward to.
+
+ If you don't need the full power of this middleware you might choose to use
+ the simpler ``forward`` middleware instead.
+
+ The arguments are:
+
+ ``app``
+ The WSGI application or middleware chain.
+
+ ``mapper``
+ A callable that takes a status code as the
+ first parameter, a message as the second, and accepts optional environ,
+ global_conf and named argments afterwards. It should return a
+ URL to forward to or ``None`` if the code is not to be intercepted.
+
+ ``global_conf``
+ Optional default configuration from your config file. If ``debug`` is
+ set to ``true`` a message will be written to ``wsgi.errors`` on each
+ internal forward stating the URL forwarded to.
+
+ ``**params``
+ Optional, any other configuration and extra arguments you wish to
+ pass which will in turn be passed back to the custom mapper object.
+
+ Here is an example where a ``404 File Not Found`` status response would be
+ redirected to the URL ``/error?code=404&message=File%20Not%20Found``. This
+ could be useful for passing the status code and message into another
+ application to display an error document:
+
+ .. code-block:: python
+
+ from paste.errordocument import StatusBasedForward
+ from paste.recursive import RecursiveMiddleware
+ from urllib import urlencode
+
+ def error_mapper(code, message, environ, global_conf, kw)
+ if code in [404, 500]:
+ params = urlencode({'message':message, 'code':code})
+ url = '/error?'%(params)
+ return url
+ else:
+ return None
+
+ app = RecursiveMiddleware(
+ StatusBasedForward(app, mapper=error_mapper),
+ )
+
+ """
+
+ def __init__(self, app, mapper, global_conf=None, **params):
+ if global_conf is None:
+ global_conf = {}
+ # @@: global_conf shouldn't really come in here, only in a
+ # separate make_status_based_forward function
+ if global_conf:
+ self.debug = converters.asbool(global_conf.get('debug', False))
+ else:
+ self.debug = False
+ self.application = app
+ self.mapper = mapper
+ self.global_conf = global_conf
+ self.params = params
+
+ def __call__(self, environ, start_response):
+ url = []
+
+ def change_response(status, headers, exc_info=None):
+ status_code = status.split(' ')
+ try:
+ code = int(status_code[0])
+ except (ValueError, TypeError):
+ raise Exception(
+ 'StatusBasedForward middleware '
+ 'received an invalid status code %s'%repr(status_code[0])
+ )
+ message = ' '.join(status_code[1:])
+ new_url = self.mapper(
+ code,
+ message,
+ environ,
+ self.global_conf,
+ **self.params
+ )
+ if not (new_url == None or isinstance(new_url, str)):
+ raise TypeError(
+ 'Expected the url to internally '
+ 'redirect to in the StatusBasedForward mapper'
+ 'to be a string or None, not %r' % new_url)
+ if new_url:
+ url.append([new_url, status, headers])
+ # We have to allow the app to write stuff, even though
+ # we'll ignore it:
+ return [].append
+ else:
+ return start_response(status, headers, exc_info)
+
+ app_iter = self.application(environ, change_response)
+ if url:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+
+ def factory(app):
+ return StatusKeeper(app, status=url[0][1], url=url[0][0],
+ headers=url[0][2])
+ raise ForwardRequestException(factory=factory)
+ else:
+ return app_iter
+
+def make_errordocument(app, global_conf, **kw):
+ """
+ Paste Deploy entry point to create a error document wrapper.
+
+ Use like::
+
+ [filter-app:main]
+ use = egg:Paste#errordocument
+ next = real-app
+ 500 = /lib/msg/500.html
+ 404 = /lib/msg/404.html
+ """
+ map = {}
+ for status, redir_loc in kw.items():
+ try:
+ status = int(status)
+ except ValueError:
+ raise ValueError('Bad status code: %r' % status)
+ map[status] = redir_loc
+ forwarder = forward(app, map)
+ return forwarder
+
+__pudge_all__ = [
+ 'forward',
+ 'make_errordocument',
+ 'empty_error',
+ 'make_empty_error',
+ 'StatusBasedForward',
+]
+
+
+###############################################################################
+## Deprecated
+###############################################################################
+
+def custom_forward(app, mapper, global_conf=None, **kw):
+ """
+ Deprectated; use StatusBasedForward instead.
+ """
+ warnings.warn(
+ "errordocuments.custom_forward has been deprecated; please "
+ "use errordocuments.StatusBasedForward",
+ DeprecationWarning, 2)
+ if global_conf is None:
+ global_conf = {}
+ return _StatusBasedRedirect(app, mapper, global_conf, **kw)
+
+class _StatusBasedRedirect(object):
+ """
+ Deprectated; use StatusBasedForward instead.
+ """
+ def __init__(self, app, mapper, global_conf=None, **kw):
+
+ warnings.warn(
+ "errordocuments._StatusBasedRedirect has been deprecated; please "
+ "use errordocuments.StatusBasedForward",
+ DeprecationWarning, 2)
+
+ if global_conf is None:
+ global_conf = {}
+ self.application = app
+ self.mapper = mapper
+ self.global_conf = global_conf
+ self.kw = kw
+ self.fallback_template = """
+ <html>
+ <head>
+ <title>Error %(code)s</title>
+ </html>
+ <body>
+ <h1>Error %(code)s</h1>
+ <p>%(message)s</p>
+ <hr>
+ <p>
+ Additionally an error occurred trying to produce an
+ error document. A description of the error was logged
+ to <tt>wsgi.errors</tt>.
+ </p>
+ </body>
+ </html>
+ """
+
+ def __call__(self, environ, start_response):
+ url = []
+ code_message = []
+ try:
+ def change_response(status, headers, exc_info=None):
+ new_url = None
+ parts = status.split(' ')
+ try:
+ code = int(parts[0])
+ except (ValueError, TypeError):
+ raise Exception(
+ '_StatusBasedRedirect middleware '
+ 'received an invalid status code %s'%repr(parts[0])
+ )
+ message = ' '.join(parts[1:])
+ new_url = self.mapper(
+ code,
+ message,
+ environ,
+ self.global_conf,
+ self.kw
+ )
+ if not (new_url == None or isinstance(new_url, str)):
+ raise TypeError(
+ 'Expected the url to internally '
+ 'redirect to in the _StatusBasedRedirect error_mapper'
+ 'to be a string or None, not %s'%repr(new_url)
+ )
+ if new_url:
+ url.append(new_url)
+ code_message.append([code, message])
+ return start_response(status, headers, exc_info)
+ app_iter = self.application(environ, change_response)
+ except:
+ try:
+ import sys
+ error = str(sys.exc_info()[1])
+ except:
+ error = ''
+ try:
+ code, message = code_message[0]
+ except:
+ code, message = ['', '']
+ environ['wsgi.errors'].write(
+ 'Error occurred in _StatusBasedRedirect '
+ 'intercepting the response: '+str(error)
+ )
+ return [self.fallback_template
+ % {'message': message, 'code': code}]
+ else:
+ if url:
+ url_ = url[0]
+ new_environ = {}
+ for k, v in environ.items():
+ if k != 'QUERY_STRING':
+ new_environ['QUERY_STRING'] = urlparse.urlparse(url_)[4]
+ else:
+ new_environ[k] = v
+ class InvalidForward(Exception):
+ pass
+ def eat_start_response(status, headers, exc_info=None):
+ """
+ We don't want start_response to do anything since it
+ has already been called
+ """
+ if status[:3] != '200':
+ raise InvalidForward(
+ "The URL %s to internally forward "
+ "to in order to create an error document did not "
+ "return a '200' status code." % url_
+ )
+ forward = environ['paste.recursive.forward']
+ old_start_response = forward.start_response
+ forward.start_response = eat_start_response
+ try:
+ app_iter = forward(url_, new_environ)
+ except InvalidForward:
+ code, message = code_message[0]
+ environ['wsgi.errors'].write(
+ 'Error occurred in '
+ '_StatusBasedRedirect redirecting '
+ 'to new URL: '+str(url[0])
+ )
+ return [
+ self.fallback_template%{
+ 'message':message,
+ 'code':code,
+ }
+ ]
+ else:
+ forward.start_response = old_start_response
+ return app_iter
+ else:
+ return app_iter
diff --git a/paste/evalexception/__init__.py b/paste/evalexception/__init__.py
new file mode 100644
index 0000000..a19cf85
--- /dev/null
+++ b/paste/evalexception/__init__.py
@@ -0,0 +1,7 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+An exception handler for interactive debugging
+"""
+from paste.evalexception.middleware import EvalException
+
diff --git a/paste/evalexception/evalcontext.py b/paste/evalexception/evalcontext.py
new file mode 100644
index 0000000..42f2efa
--- /dev/null
+++ b/paste/evalexception/evalcontext.py
@@ -0,0 +1,69 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+from six.moves import cStringIO as StringIO
+import traceback
+import threading
+import pdb
+import six
+import sys
+
+exec_lock = threading.Lock()
+
+class EvalContext(object):
+
+ """
+ Class that represents a interactive interface. It has its own
+ namespace. Use eval_context.exec_expr(expr) to run commands; the
+ output of those commands is returned, as are print statements.
+
+ This is essentially what doctest does, and is taken directly from
+ doctest.
+ """
+
+ def __init__(self, namespace, globs):
+ self.namespace = namespace
+ self.globs = globs
+
+ def exec_expr(self, s):
+ out = StringIO()
+ exec_lock.acquire()
+ save_stdout = sys.stdout
+ try:
+ debugger = _OutputRedirectingPdb(save_stdout)
+ debugger.reset()
+ pdb.set_trace = debugger.set_trace
+ sys.stdout = out
+ try:
+ code = compile(s, '<web>', "single", 0, 1)
+ six.exec_(code, self.globs, self.namespace)
+ debugger.set_continue()
+ except KeyboardInterrupt:
+ raise
+ except:
+ traceback.print_exc(file=out)
+ debugger.set_continue()
+ finally:
+ sys.stdout = save_stdout
+ exec_lock.release()
+ return out.getvalue()
+
+# From doctest
+class _OutputRedirectingPdb(pdb.Pdb):
+ """
+ A specialized version of the python debugger that redirects stdout
+ to a given stream when interacting with the user. Stdout is *not*
+ redirected when traced code is executed.
+ """
+ def __init__(self, out):
+ self.__out = out
+ pdb.Pdb.__init__(self)
+
+ def trace_dispatch(self, *args):
+ # Redirect stdout to the given stream.
+ save_stdout = sys.stdout
+ sys.stdout = self.__out
+ # Call Pdb's trace dispatch method.
+ try:
+ return pdb.Pdb.trace_dispatch(self, *args)
+ finally:
+ sys.stdout = save_stdout
diff --git a/paste/evalexception/media/MochiKit.packed.js b/paste/evalexception/media/MochiKit.packed.js
new file mode 100644
index 0000000..15027d9
--- /dev/null
+++ b/paste/evalexception/media/MochiKit.packed.js
@@ -0,0 +1,7829 @@
+/***
+
+ MochiKit.MochiKit 1.4.2 : PACKED VERSION
+
+ THIS FILE IS AUTOMATICALLY GENERATED. If creating patches, please
+ diff against the source tree, not this file.
+
+ See <http://mochikit.com/> for documentation, downloads, license, etc.
+
+ (c) 2005 Bob Ippolito. All rights Reserved.
+
+***/
+
+if(typeof (dojo)!="undefined"){
+dojo.provide("MochiKit.Base");
+}
+if(typeof (MochiKit)=="undefined"){
+MochiKit={};
+}
+if(typeof (MochiKit.Base)=="undefined"){
+MochiKit.Base={};
+}
+if(typeof (MochiKit.__export__)=="undefined"){
+MochiKit.__export__=(MochiKit.__compat__||(typeof (JSAN)=="undefined"&&typeof (dojo)=="undefined"));
+}
+MochiKit.Base.VERSION="1.4.2";
+MochiKit.Base.NAME="MochiKit.Base";
+MochiKit.Base.update=function(_1,_2){
+if(_1===null||_1===undefined){
+_1={};
+}
+for(var i=1;i<arguments.length;i++){
+var o=arguments[i];
+if(typeof (o)!="undefined"&&o!==null){
+for(var k in o){
+_1[k]=o[k];
+}
+}
+}
+return _1;
+};
+MochiKit.Base.update(MochiKit.Base,{__repr__:function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+},toString:function(){
+return this.__repr__();
+},camelize:function(_6){
+var _7=_6.split("-");
+var cc=_7[0];
+for(var i=1;i<_7.length;i++){
+cc+=_7[i].charAt(0).toUpperCase()+_7[i].substring(1);
+}
+return cc;
+},counter:function(n){
+if(arguments.length===0){
+n=1;
+}
+return function(){
+return n++;
+};
+},clone:function(_b){
+var me=arguments.callee;
+if(arguments.length==1){
+me.prototype=_b;
+return new me();
+}
+},_deps:function(_d,_e){
+if(!(_d in MochiKit)){
+MochiKit[_d]={};
+}
+if(typeof (dojo)!="undefined"){
+dojo.provide("MochiKit."+_d);
+}
+for(var i=0;i<_e.length;i++){
+if(typeof (dojo)!="undefined"){
+dojo.require("MochiKit."+_e[i]);
+}
+if(typeof (JSAN)!="undefined"){
+JSAN.use("MochiKit."+_e[i],[]);
+}
+if(!(_e[i] in MochiKit)){
+throw "MochiKit."+_d+" depends on MochiKit."+_e[i]+"!";
+}
+}
+},_flattenArray:function(res,lst){
+for(var i=0;i<lst.length;i++){
+var o=lst[i];
+if(o instanceof Array){
+arguments.callee(res,o);
+}else{
+res.push(o);
+}
+}
+return res;
+},flattenArray:function(lst){
+return MochiKit.Base._flattenArray([],lst);
+},flattenArguments:function(lst){
+var res=[];
+var m=MochiKit.Base;
+var _18=m.extend(null,arguments);
+while(_18.length){
+var o=_18.shift();
+if(o&&typeof (o)=="object"&&typeof (o.length)=="number"){
+for(var i=o.length-1;i>=0;i--){
+_18.unshift(o[i]);
+}
+}else{
+res.push(o);
+}
+}
+return res;
+},extend:function(_1b,obj,_1d){
+if(!_1d){
+_1d=0;
+}
+if(obj){
+var l=obj.length;
+if(typeof (l)!="number"){
+if(typeof (MochiKit.Iter)!="undefined"){
+obj=MochiKit.Iter.list(obj);
+l=obj.length;
+}else{
+throw new TypeError("Argument not an array-like and MochiKit.Iter not present");
+}
+}
+if(!_1b){
+_1b=[];
+}
+for(var i=_1d;i<l;i++){
+_1b.push(obj[i]);
+}
+}
+return _1b;
+},updatetree:function(_20,obj){
+if(_20===null||_20===undefined){
+_20={};
+}
+for(var i=1;i<arguments.length;i++){
+var o=arguments[i];
+if(typeof (o)!="undefined"&&o!==null){
+for(var k in o){
+var v=o[k];
+if(typeof (_20[k])=="object"&&typeof (v)=="object"){
+arguments.callee(_20[k],v);
+}else{
+_20[k]=v;
+}
+}
+}
+}
+return _20;
+},setdefault:function(_26,obj){
+if(_26===null||_26===undefined){
+_26={};
+}
+for(var i=1;i<arguments.length;i++){
+var o=arguments[i];
+for(var k in o){
+if(!(k in _26)){
+_26[k]=o[k];
+}
+}
+}
+return _26;
+},keys:function(obj){
+var _2c=[];
+for(var _2d in obj){
+_2c.push(_2d);
+}
+return _2c;
+},values:function(obj){
+var _2f=[];
+for(var _30 in obj){
+_2f.push(obj[_30]);
+}
+return _2f;
+},items:function(obj){
+var _32=[];
+var e;
+for(var _34 in obj){
+var v;
+try{
+v=obj[_34];
+}
+catch(e){
+continue;
+}
+_32.push([_34,v]);
+}
+return _32;
+},_newNamedError:function(_36,_37,_38){
+_38.prototype=new MochiKit.Base.NamedError(_36.NAME+"."+_37);
+_36[_37]=_38;
+},operator:{truth:function(a){
+return !!a;
+},lognot:function(a){
+return !a;
+},identity:function(a){
+return a;
+},not:function(a){
+return ~a;
+},neg:function(a){
+return -a;
+},add:function(a,b){
+return a+b;
+},sub:function(a,b){
+return a-b;
+},div:function(a,b){
+return a/b;
+},mod:function(a,b){
+return a%b;
+},mul:function(a,b){
+return a*b;
+},and:function(a,b){
+return a&b;
+},or:function(a,b){
+return a|b;
+},xor:function(a,b){
+return a^b;
+},lshift:function(a,b){
+return a<<b;
+},rshift:function(a,b){
+return a>>b;
+},zrshift:function(a,b){
+return a>>>b;
+},eq:function(a,b){
+return a==b;
+},ne:function(a,b){
+return a!=b;
+},gt:function(a,b){
+return a>b;
+},ge:function(a,b){
+return a>=b;
+},lt:function(a,b){
+return a<b;
+},le:function(a,b){
+return a<=b;
+},seq:function(a,b){
+return a===b;
+},sne:function(a,b){
+return a!==b;
+},ceq:function(a,b){
+return MochiKit.Base.compare(a,b)===0;
+},cne:function(a,b){
+return MochiKit.Base.compare(a,b)!==0;
+},cgt:function(a,b){
+return MochiKit.Base.compare(a,b)==1;
+},cge:function(a,b){
+return MochiKit.Base.compare(a,b)!=-1;
+},clt:function(a,b){
+return MochiKit.Base.compare(a,b)==-1;
+},cle:function(a,b){
+return MochiKit.Base.compare(a,b)!=1;
+},logand:function(a,b){
+return a&&b;
+},logor:function(a,b){
+return a||b;
+},contains:function(a,b){
+return b in a;
+}},forwardCall:function(_76){
+return function(){
+return this[_76].apply(this,arguments);
+};
+},itemgetter:function(_77){
+return function(arg){
+return arg[_77];
+};
+},typeMatcher:function(){
+var _79={};
+for(var i=0;i<arguments.length;i++){
+var typ=arguments[i];
+_79[typ]=typ;
+}
+return function(){
+for(var i=0;i<arguments.length;i++){
+if(!(typeof (arguments[i]) in _79)){
+return false;
+}
+}
+return true;
+};
+},isNull:function(){
+for(var i=0;i<arguments.length;i++){
+if(arguments[i]!==null){
+return false;
+}
+}
+return true;
+},isUndefinedOrNull:function(){
+for(var i=0;i<arguments.length;i++){
+var o=arguments[i];
+if(!(typeof (o)=="undefined"||o===null)){
+return false;
+}
+}
+return true;
+},isEmpty:function(obj){
+return !MochiKit.Base.isNotEmpty.apply(this,arguments);
+},isNotEmpty:function(obj){
+for(var i=0;i<arguments.length;i++){
+var o=arguments[i];
+if(!(o&&o.length)){
+return false;
+}
+}
+return true;
+},isArrayLike:function(){
+for(var i=0;i<arguments.length;i++){
+var o=arguments[i];
+var typ=typeof (o);
+if((typ!="object"&&!(typ=="function"&&typeof (o.item)=="function"))||o===null||typeof (o.length)!="number"||o.nodeType===3||o.nodeType===4){
+return false;
+}
+}
+return true;
+},isDateLike:function(){
+for(var i=0;i<arguments.length;i++){
+var o=arguments[i];
+if(typeof (o)!="object"||o===null||typeof (o.getTime)!="function"){
+return false;
+}
+}
+return true;
+},xmap:function(fn){
+if(fn===null){
+return MochiKit.Base.extend(null,arguments,1);
+}
+var _8a=[];
+for(var i=1;i<arguments.length;i++){
+_8a.push(fn(arguments[i]));
+}
+return _8a;
+},map:function(fn,lst){
+var m=MochiKit.Base;
+var itr=MochiKit.Iter;
+var _90=m.isArrayLike;
+if(arguments.length<=2){
+if(!_90(lst)){
+if(itr){
+lst=itr.list(lst);
+if(fn===null){
+return lst;
+}
+}else{
+throw new TypeError("Argument not an array-like and MochiKit.Iter not present");
+}
+}
+if(fn===null){
+return m.extend(null,lst);
+}
+var _91=[];
+for(var i=0;i<lst.length;i++){
+_91.push(fn(lst[i]));
+}
+return _91;
+}else{
+if(fn===null){
+fn=Array;
+}
+var _93=null;
+for(i=1;i<arguments.length;i++){
+if(!_90(arguments[i])){
+if(itr){
+return itr.list(itr.imap.apply(null,arguments));
+}else{
+throw new TypeError("Argument not an array-like and MochiKit.Iter not present");
+}
+}
+var l=arguments[i].length;
+if(_93===null||_93>l){
+_93=l;
+}
+}
+_91=[];
+for(i=0;i<_93;i++){
+var _95=[];
+for(var j=1;j<arguments.length;j++){
+_95.push(arguments[j][i]);
+}
+_91.push(fn.apply(this,_95));
+}
+return _91;
+}
+},xfilter:function(fn){
+var _98=[];
+if(fn===null){
+fn=MochiKit.Base.operator.truth;
+}
+for(var i=1;i<arguments.length;i++){
+var o=arguments[i];
+if(fn(o)){
+_98.push(o);
+}
+}
+return _98;
+},filter:function(fn,lst,_9d){
+var _9e=[];
+var m=MochiKit.Base;
+if(!m.isArrayLike(lst)){
+if(MochiKit.Iter){
+lst=MochiKit.Iter.list(lst);
+}else{
+throw new TypeError("Argument not an array-like and MochiKit.Iter not present");
+}
+}
+if(fn===null){
+fn=m.operator.truth;
+}
+if(typeof (Array.prototype.filter)=="function"){
+return Array.prototype.filter.call(lst,fn,_9d);
+}else{
+if(typeof (_9d)=="undefined"||_9d===null){
+for(var i=0;i<lst.length;i++){
+var o=lst[i];
+if(fn(o)){
+_9e.push(o);
+}
+}
+}else{
+for(i=0;i<lst.length;i++){
+o=lst[i];
+if(fn.call(_9d,o)){
+_9e.push(o);
+}
+}
+}
+}
+return _9e;
+},_wrapDumbFunction:function(_a2){
+return function(){
+switch(arguments.length){
+case 0:
+return _a2();
+case 1:
+return _a2(arguments[0]);
+case 2:
+return _a2(arguments[0],arguments[1]);
+case 3:
+return _a2(arguments[0],arguments[1],arguments[2]);
+}
+var _a3=[];
+for(var i=0;i<arguments.length;i++){
+_a3.push("arguments["+i+"]");
+}
+return eval("(func("+_a3.join(",")+"))");
+};
+},methodcaller:function(_a5){
+var _a6=MochiKit.Base.extend(null,arguments,1);
+if(typeof (_a5)=="function"){
+return function(obj){
+return _a5.apply(obj,_a6);
+};
+}else{
+return function(obj){
+return obj[_a5].apply(obj,_a6);
+};
+}
+},method:function(_a9,_aa){
+var m=MochiKit.Base;
+return m.bind.apply(this,m.extend([_aa,_a9],arguments,2));
+},compose:function(f1,f2){
+var _ae=[];
+var m=MochiKit.Base;
+if(arguments.length===0){
+throw new TypeError("compose() requires at least one argument");
+}
+for(var i=0;i<arguments.length;i++){
+var fn=arguments[i];
+if(typeof (fn)!="function"){
+throw new TypeError(m.repr(fn)+" is not a function");
+}
+_ae.push(fn);
+}
+return function(){
+var _b2=arguments;
+for(var i=_ae.length-1;i>=0;i--){
+_b2=[_ae[i].apply(this,_b2)];
+}
+return _b2[0];
+};
+},bind:function(_b4,_b5){
+if(typeof (_b4)=="string"){
+_b4=_b5[_b4];
+}
+var _b6=_b4.im_func;
+var _b7=_b4.im_preargs;
+var _b8=_b4.im_self;
+var m=MochiKit.Base;
+if(typeof (_b4)=="function"&&typeof (_b4.apply)=="undefined"){
+_b4=m._wrapDumbFunction(_b4);
+}
+if(typeof (_b6)!="function"){
+_b6=_b4;
+}
+if(typeof (_b5)!="undefined"){
+_b8=_b5;
+}
+if(typeof (_b7)=="undefined"){
+_b7=[];
+}else{
+_b7=_b7.slice();
+}
+m.extend(_b7,arguments,2);
+var _ba=function(){
+var _bb=arguments;
+var me=arguments.callee;
+if(me.im_preargs.length>0){
+_bb=m.concat(me.im_preargs,_bb);
+}
+var _bd=me.im_self;
+if(!_bd){
+_bd=this;
+}
+return me.im_func.apply(_bd,_bb);
+};
+_ba.im_self=_b8;
+_ba.im_func=_b6;
+_ba.im_preargs=_b7;
+return _ba;
+},bindLate:function(_be,_bf){
+var m=MochiKit.Base;
+if(typeof (_be)!="string"){
+return m.bind.apply(this,arguments);
+}
+var _c1=m.extend([],arguments,2);
+var _c2=function(){
+var _c3=arguments;
+var me=arguments.callee;
+if(me.im_preargs.length>0){
+_c3=m.concat(me.im_preargs,_c3);
+}
+var _c5=me.im_self;
+if(!_c5){
+_c5=this;
+}
+return _c5[me.im_func].apply(_c5,_c3);
+};
+_c2.im_self=_bf;
+_c2.im_func=_be;
+_c2.im_preargs=_c1;
+return _c2;
+},bindMethods:function(_c6){
+var _c7=MochiKit.Base.bind;
+for(var k in _c6){
+var _c9=_c6[k];
+if(typeof (_c9)=="function"){
+_c6[k]=_c7(_c9,_c6);
+}
+}
+},registerComparator:function(_ca,_cb,_cc,_cd){
+MochiKit.Base.comparatorRegistry.register(_ca,_cb,_cc,_cd);
+},_primitives:{"boolean":true,"string":true,"number":true},compare:function(a,b){
+if(a==b){
+return 0;
+}
+var _d0=(typeof (a)=="undefined"||a===null);
+var _d1=(typeof (b)=="undefined"||b===null);
+if(_d0&&_d1){
+return 0;
+}else{
+if(_d0){
+return -1;
+}else{
+if(_d1){
+return 1;
+}
+}
+}
+var m=MochiKit.Base;
+var _d3=m._primitives;
+if(!(typeof (a) in _d3&&typeof (b) in _d3)){
+try{
+return m.comparatorRegistry.match(a,b);
+}
+catch(e){
+if(e!=m.NotFound){
+throw e;
+}
+}
+}
+if(a<b){
+return -1;
+}else{
+if(a>b){
+return 1;
+}
+}
+var _d4=m.repr;
+throw new TypeError(_d4(a)+" and "+_d4(b)+" can not be compared");
+},compareDateLike:function(a,b){
+return MochiKit.Base.compare(a.getTime(),b.getTime());
+},compareArrayLike:function(a,b){
+var _d9=MochiKit.Base.compare;
+var _da=a.length;
+var _db=0;
+if(_da>b.length){
+_db=1;
+_da=b.length;
+}else{
+if(_da<b.length){
+_db=-1;
+}
+}
+for(var i=0;i<_da;i++){
+var cmp=_d9(a[i],b[i]);
+if(cmp){
+return cmp;
+}
+}
+return _db;
+},registerRepr:function(_de,_df,_e0,_e1){
+MochiKit.Base.reprRegistry.register(_de,_df,_e0,_e1);
+},repr:function(o){
+if(typeof (o)=="undefined"){
+return "undefined";
+}else{
+if(o===null){
+return "null";
+}
+}
+try{
+if(typeof (o.__repr__)=="function"){
+return o.__repr__();
+}else{
+if(typeof (o.repr)=="function"&&o.repr!=arguments.callee){
+return o.repr();
+}
+}
+return MochiKit.Base.reprRegistry.match(o);
+}
+catch(e){
+if(typeof (o.NAME)=="string"&&(o.toString==Function.prototype.toString||o.toString==Object.prototype.toString)){
+return o.NAME;
+}
+}
+try{
+var _e3=(o+"");
+}
+catch(e){
+return "["+typeof (o)+"]";
+}
+if(typeof (o)=="function"){
+_e3=_e3.replace(/^\s+/,"").replace(/\s+/g," ");
+_e3=_e3.replace(/,(\S)/,", $1");
+var idx=_e3.indexOf("{");
+if(idx!=-1){
+_e3=_e3.substr(0,idx)+"{...}";
+}
+}
+return _e3;
+},reprArrayLike:function(o){
+var m=MochiKit.Base;
+return "["+m.map(m.repr,o).join(", ")+"]";
+},reprString:function(o){
+return ("\""+o.replace(/(["\\])/g,"\\$1")+"\"").replace(/[\f]/g,"\\f").replace(/[\b]/g,"\\b").replace(/[\n]/g,"\\n").replace(/[\t]/g,"\\t").replace(/[\v]/g,"\\v").replace(/[\r]/g,"\\r");
+},reprNumber:function(o){
+return o+"";
+},registerJSON:function(_e9,_ea,_eb,_ec){
+MochiKit.Base.jsonRegistry.register(_e9,_ea,_eb,_ec);
+},evalJSON:function(){
+return eval("("+MochiKit.Base._filterJSON(arguments[0])+")");
+},_filterJSON:function(s){
+var m=s.match(/^\s*\/\*(.*)\*\/\s*$/);
+if(m){
+return m[1];
+}
+return s;
+},serializeJSON:function(o){
+var _f0=typeof (o);
+if(_f0=="number"||_f0=="boolean"){
+return o+"";
+}else{
+if(o===null){
+return "null";
+}else{
+if(_f0=="string"){
+var res="";
+for(var i=0;i<o.length;i++){
+var c=o.charAt(i);
+if(c=="\""){
+res+="\\\"";
+}else{
+if(c=="\\"){
+res+="\\\\";
+}else{
+if(c=="\b"){
+res+="\\b";
+}else{
+if(c=="\f"){
+res+="\\f";
+}else{
+if(c=="\n"){
+res+="\\n";
+}else{
+if(c=="\r"){
+res+="\\r";
+}else{
+if(c=="\t"){
+res+="\\t";
+}else{
+if(o.charCodeAt(i)<=31){
+var hex=o.charCodeAt(i).toString(16);
+if(hex.length<2){
+hex="0"+hex;
+}
+res+="\\u00"+hex.toUpperCase();
+}else{
+res+=c;
+}
+}
+}
+}
+}
+}
+}
+}
+}
+return "\""+res+"\"";
+}
+}
+}
+var me=arguments.callee;
+var _f6;
+if(typeof (o.__json__)=="function"){
+_f6=o.__json__();
+if(o!==_f6){
+return me(_f6);
+}
+}
+if(typeof (o.json)=="function"){
+_f6=o.json();
+if(o!==_f6){
+return me(_f6);
+}
+}
+if(_f0!="function"&&typeof (o.length)=="number"){
+var res=[];
+for(var i=0;i<o.length;i++){
+var val=me(o[i]);
+if(typeof (val)!="string"){
+continue;
+}
+res.push(val);
+}
+return "["+res.join(", ")+"]";
+}
+var m=MochiKit.Base;
+try{
+_f6=m.jsonRegistry.match(o);
+if(o!==_f6){
+return me(_f6);
+}
+}
+catch(e){
+if(e!=m.NotFound){
+throw e;
+}
+}
+if(_f0=="undefined"){
+throw new TypeError("undefined can not be serialized as JSON");
+}
+if(_f0=="function"){
+return null;
+}
+res=[];
+for(var k in o){
+var _fa;
+if(typeof (k)=="number"){
+_fa="\""+k+"\"";
+}else{
+if(typeof (k)=="string"){
+_fa=me(k);
+}else{
+continue;
+}
+}
+val=me(o[k]);
+if(typeof (val)!="string"){
+continue;
+}
+res.push(_fa+":"+val);
+}
+return "{"+res.join(", ")+"}";
+},objEqual:function(a,b){
+return (MochiKit.Base.compare(a,b)===0);
+},arrayEqual:function(_fd,arr){
+if(_fd.length!=arr.length){
+return false;
+}
+return (MochiKit.Base.compare(_fd,arr)===0);
+},concat:function(){
+var _ff=[];
+var _100=MochiKit.Base.extend;
+for(var i=0;i<arguments.length;i++){
+_100(_ff,arguments[i]);
+}
+return _ff;
+},keyComparator:function(key){
+var m=MochiKit.Base;
+var _104=m.compare;
+if(arguments.length==1){
+return function(a,b){
+return _104(a[key],b[key]);
+};
+}
+var _107=m.extend(null,arguments);
+return function(a,b){
+var rval=0;
+for(var i=0;(rval===0)&&(i<_107.length);i++){
+var key=_107[i];
+rval=_104(a[key],b[key]);
+}
+return rval;
+};
+},reverseKeyComparator:function(key){
+var _10e=MochiKit.Base.keyComparator.apply(this,arguments);
+return function(a,b){
+return _10e(b,a);
+};
+},partial:function(func){
+var m=MochiKit.Base;
+return m.bind.apply(this,m.extend([func,undefined],arguments,1));
+},listMinMax:function(_113,lst){
+if(lst.length===0){
+return null;
+}
+var cur=lst[0];
+var _116=MochiKit.Base.compare;
+for(var i=1;i<lst.length;i++){
+var o=lst[i];
+if(_116(o,cur)==_113){
+cur=o;
+}
+}
+return cur;
+},objMax:function(){
+return MochiKit.Base.listMinMax(1,arguments);
+},objMin:function(){
+return MochiKit.Base.listMinMax(-1,arguments);
+},findIdentical:function(lst,_11a,_11b,end){
+if(typeof (end)=="undefined"||end===null){
+end=lst.length;
+}
+if(typeof (_11b)=="undefined"||_11b===null){
+_11b=0;
+}
+for(var i=_11b;i<end;i++){
+if(lst[i]===_11a){
+return i;
+}
+}
+return -1;
+},mean:function(){
+var sum=0;
+var m=MochiKit.Base;
+var args=m.extend(null,arguments);
+var _121=args.length;
+while(args.length){
+var o=args.shift();
+if(o&&typeof (o)=="object"&&typeof (o.length)=="number"){
+_121+=o.length-1;
+for(var i=o.length-1;i>=0;i--){
+sum+=o[i];
+}
+}else{
+sum+=o;
+}
+}
+if(_121<=0){
+throw new TypeError("mean() requires at least one argument");
+}
+return sum/_121;
+},median:function(){
+var data=MochiKit.Base.flattenArguments(arguments);
+if(data.length===0){
+throw new TypeError("median() requires at least one argument");
+}
+data.sort(compare);
+if(data.length%2==0){
+var _125=data.length/2;
+return (data[_125]+data[_125-1])/2;
+}else{
+return data[(data.length-1)/2];
+}
+},findValue:function(lst,_127,_128,end){
+if(typeof (end)=="undefined"||end===null){
+end=lst.length;
+}
+if(typeof (_128)=="undefined"||_128===null){
+_128=0;
+}
+var cmp=MochiKit.Base.compare;
+for(var i=_128;i<end;i++){
+if(cmp(lst[i],_127)===0){
+return i;
+}
+}
+return -1;
+},nodeWalk:function(node,_12d){
+var _12e=[node];
+var _12f=MochiKit.Base.extend;
+while(_12e.length){
+var res=_12d(_12e.shift());
+if(res){
+_12f(_12e,res);
+}
+}
+},nameFunctions:function(_131){
+var base=_131.NAME;
+if(typeof (base)=="undefined"){
+base="";
+}else{
+base=base+".";
+}
+for(var name in _131){
+var o=_131[name];
+if(typeof (o)=="function"&&typeof (o.NAME)=="undefined"){
+try{
+o.NAME=base+name;
+}
+catch(e){
+}
+}
+}
+},queryString:function(_135,_136){
+if(typeof (MochiKit.DOM)!="undefined"&&arguments.length==1&&(typeof (_135)=="string"||(typeof (_135.nodeType)!="undefined"&&_135.nodeType>0))){
+var kv=MochiKit.DOM.formContents(_135);
+_135=kv[0];
+_136=kv[1];
+}else{
+if(arguments.length==1){
+if(typeof (_135.length)=="number"&&_135.length==2){
+return arguments.callee(_135[0],_135[1]);
+}
+var o=_135;
+_135=[];
+_136=[];
+for(var k in o){
+var v=o[k];
+if(typeof (v)=="function"){
+continue;
+}else{
+if(MochiKit.Base.isArrayLike(v)){
+for(var i=0;i<v.length;i++){
+_135.push(k);
+_136.push(v[i]);
+}
+}else{
+_135.push(k);
+_136.push(v);
+}
+}
+}
+}
+}
+var rval=[];
+var len=Math.min(_135.length,_136.length);
+var _13e=MochiKit.Base.urlEncode;
+for(var i=0;i<len;i++){
+v=_136[i];
+if(typeof (v)!="undefined"&&v!==null){
+rval.push(_13e(_135[i])+"="+_13e(v));
+}
+}
+return rval.join("&");
+},parseQueryString:function(_13f,_140){
+var qstr=(_13f.charAt(0)=="?")?_13f.substring(1):_13f;
+var _142=qstr.replace(/\+/g,"%20").split(/\&amp\;|\&\#38\;|\&#x26;|\&/);
+var o={};
+var _144;
+if(typeof (decodeURIComponent)!="undefined"){
+_144=decodeURIComponent;
+}else{
+_144=unescape;
+}
+if(_140){
+for(var i=0;i<_142.length;i++){
+var pair=_142[i].split("=");
+var name=_144(pair.shift());
+if(!name){
+continue;
+}
+var arr=o[name];
+if(!(arr instanceof Array)){
+arr=[];
+o[name]=arr;
+}
+arr.push(_144(pair.join("=")));
+}
+}else{
+for(i=0;i<_142.length;i++){
+pair=_142[i].split("=");
+var name=pair.shift();
+if(!name){
+continue;
+}
+o[_144(name)]=_144(pair.join("="));
+}
+}
+return o;
+}});
+MochiKit.Base.AdapterRegistry=function(){
+this.pairs=[];
+};
+MochiKit.Base.AdapterRegistry.prototype={register:function(name,_14a,wrap,_14c){
+if(_14c){
+this.pairs.unshift([name,_14a,wrap]);
+}else{
+this.pairs.push([name,_14a,wrap]);
+}
+},match:function(){
+for(var i=0;i<this.pairs.length;i++){
+var pair=this.pairs[i];
+if(pair[1].apply(this,arguments)){
+return pair[2].apply(this,arguments);
+}
+}
+throw MochiKit.Base.NotFound;
+},unregister:function(name){
+for(var i=0;i<this.pairs.length;i++){
+var pair=this.pairs[i];
+if(pair[0]==name){
+this.pairs.splice(i,1);
+return true;
+}
+}
+return false;
+}};
+MochiKit.Base.EXPORT=["flattenArray","noop","camelize","counter","clone","extend","update","updatetree","setdefault","keys","values","items","NamedError","operator","forwardCall","itemgetter","typeMatcher","isCallable","isUndefined","isUndefinedOrNull","isNull","isEmpty","isNotEmpty","isArrayLike","isDateLike","xmap","map","xfilter","filter","methodcaller","compose","bind","bindLate","bindMethods","NotFound","AdapterRegistry","registerComparator","compare","registerRepr","repr","objEqual","arrayEqual","concat","keyComparator","reverseKeyComparator","partial","merge","listMinMax","listMax","listMin","objMax","objMin","nodeWalk","zip","urlEncode","queryString","serializeJSON","registerJSON","evalJSON","parseQueryString","findValue","findIdentical","flattenArguments","method","average","mean","median"];
+MochiKit.Base.EXPORT_OK=["nameFunctions","comparatorRegistry","reprRegistry","jsonRegistry","compareDateLike","compareArrayLike","reprArrayLike","reprString","reprNumber"];
+MochiKit.Base._exportSymbols=function(_152,_153){
+if(!MochiKit.__export__){
+return;
+}
+var all=_153.EXPORT_TAGS[":all"];
+for(var i=0;i<all.length;i++){
+_152[all[i]]=_153[all[i]];
+}
+};
+MochiKit.Base.__new__=function(){
+var m=this;
+m.noop=m.operator.identity;
+m.forward=m.forwardCall;
+m.find=m.findValue;
+if(typeof (encodeURIComponent)!="undefined"){
+m.urlEncode=function(_157){
+return encodeURIComponent(_157).replace(/\'/g,"%27");
+};
+}else{
+m.urlEncode=function(_158){
+return escape(_158).replace(/\+/g,"%2B").replace(/\"/g,"%22").rval.replace(/\'/g,"%27");
+};
+}
+m.NamedError=function(name){
+this.message=name;
+this.name=name;
+};
+m.NamedError.prototype=new Error();
+m.update(m.NamedError.prototype,{repr:function(){
+if(this.message&&this.message!=this.name){
+return this.name+"("+m.repr(this.message)+")";
+}else{
+return this.name+"()";
+}
+},toString:m.forwardCall("repr")});
+m.NotFound=new m.NamedError("MochiKit.Base.NotFound");
+m.listMax=m.partial(m.listMinMax,1);
+m.listMin=m.partial(m.listMinMax,-1);
+m.isCallable=m.typeMatcher("function");
+m.isUndefined=m.typeMatcher("undefined");
+m.merge=m.partial(m.update,null);
+m.zip=m.partial(m.map,null);
+m.average=m.mean;
+m.comparatorRegistry=new m.AdapterRegistry();
+m.registerComparator("dateLike",m.isDateLike,m.compareDateLike);
+m.registerComparator("arrayLike",m.isArrayLike,m.compareArrayLike);
+m.reprRegistry=new m.AdapterRegistry();
+m.registerRepr("arrayLike",m.isArrayLike,m.reprArrayLike);
+m.registerRepr("string",m.typeMatcher("string"),m.reprString);
+m.registerRepr("numbers",m.typeMatcher("number","boolean"),m.reprNumber);
+m.jsonRegistry=new m.AdapterRegistry();
+var all=m.concat(m.EXPORT,m.EXPORT_OK);
+m.EXPORT_TAGS={":common":m.concat(m.EXPORT_OK),":all":all};
+m.nameFunctions(this);
+};
+MochiKit.Base.__new__();
+if(MochiKit.__export__){
+compare=MochiKit.Base.compare;
+compose=MochiKit.Base.compose;
+serializeJSON=MochiKit.Base.serializeJSON;
+mean=MochiKit.Base.mean;
+median=MochiKit.Base.median;
+}
+MochiKit.Base._exportSymbols(this,MochiKit.Base);
+MochiKit.Base._deps("Iter",["Base"]);
+MochiKit.Iter.NAME="MochiKit.Iter";
+MochiKit.Iter.VERSION="1.4.2";
+MochiKit.Base.update(MochiKit.Iter,{__repr__:function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+},toString:function(){
+return this.__repr__();
+},registerIteratorFactory:function(name,_15c,_15d,_15e){
+MochiKit.Iter.iteratorRegistry.register(name,_15c,_15d,_15e);
+},isIterable:function(o){
+return o!=null&&(typeof (o.next)=="function"||typeof (o.iter)=="function");
+},iter:function(_160,_161){
+var self=MochiKit.Iter;
+if(arguments.length==2){
+return self.takewhile(function(a){
+return a!=_161;
+},_160);
+}
+if(typeof (_160.next)=="function"){
+return _160;
+}else{
+if(typeof (_160.iter)=="function"){
+return _160.iter();
+}
+}
+try{
+return self.iteratorRegistry.match(_160);
+}
+catch(e){
+var m=MochiKit.Base;
+if(e==m.NotFound){
+e=new TypeError(typeof (_160)+": "+m.repr(_160)+" is not iterable");
+}
+throw e;
+}
+},count:function(n){
+if(!n){
+n=0;
+}
+var m=MochiKit.Base;
+return {repr:function(){
+return "count("+n+")";
+},toString:m.forwardCall("repr"),next:m.counter(n)};
+},cycle:function(p){
+var self=MochiKit.Iter;
+var m=MochiKit.Base;
+var lst=[];
+var _16b=self.iter(p);
+return {repr:function(){
+return "cycle(...)";
+},toString:m.forwardCall("repr"),next:function(){
+try{
+var rval=_16b.next();
+lst.push(rval);
+return rval;
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+if(lst.length===0){
+this.next=function(){
+throw self.StopIteration;
+};
+}else{
+var i=-1;
+this.next=function(){
+i=(i+1)%lst.length;
+return lst[i];
+};
+}
+return this.next();
+}
+}};
+},repeat:function(elem,n){
+var m=MochiKit.Base;
+if(typeof (n)=="undefined"){
+return {repr:function(){
+return "repeat("+m.repr(elem)+")";
+},toString:m.forwardCall("repr"),next:function(){
+return elem;
+}};
+}
+return {repr:function(){
+return "repeat("+m.repr(elem)+", "+n+")";
+},toString:m.forwardCall("repr"),next:function(){
+if(n<=0){
+throw MochiKit.Iter.StopIteration;
+}
+n-=1;
+return elem;
+}};
+},next:function(_171){
+return _171.next();
+},izip:function(p,q){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+var next=self.next;
+var _177=m.map(self.iter,arguments);
+return {repr:function(){
+return "izip(...)";
+},toString:m.forwardCall("repr"),next:function(){
+return m.map(next,_177);
+}};
+},ifilter:function(pred,seq){
+var m=MochiKit.Base;
+seq=MochiKit.Iter.iter(seq);
+if(pred===null){
+pred=m.operator.truth;
+}
+return {repr:function(){
+return "ifilter(...)";
+},toString:m.forwardCall("repr"),next:function(){
+while(true){
+var rval=seq.next();
+if(pred(rval)){
+return rval;
+}
+}
+return undefined;
+}};
+},ifilterfalse:function(pred,seq){
+var m=MochiKit.Base;
+seq=MochiKit.Iter.iter(seq);
+if(pred===null){
+pred=m.operator.truth;
+}
+return {repr:function(){
+return "ifilterfalse(...)";
+},toString:m.forwardCall("repr"),next:function(){
+while(true){
+var rval=seq.next();
+if(!pred(rval)){
+return rval;
+}
+}
+return undefined;
+}};
+},islice:function(seq){
+var self=MochiKit.Iter;
+var m=MochiKit.Base;
+seq=self.iter(seq);
+var _183=0;
+var stop=0;
+var step=1;
+var i=-1;
+if(arguments.length==2){
+stop=arguments[1];
+}else{
+if(arguments.length==3){
+_183=arguments[1];
+stop=arguments[2];
+}else{
+_183=arguments[1];
+stop=arguments[2];
+step=arguments[3];
+}
+}
+return {repr:function(){
+return "islice("+["...",_183,stop,step].join(", ")+")";
+},toString:m.forwardCall("repr"),next:function(){
+var rval;
+while(i<_183){
+rval=seq.next();
+i++;
+}
+if(_183>=stop){
+throw self.StopIteration;
+}
+_183+=step;
+return rval;
+}};
+},imap:function(fun,p,q){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+var _18d=m.map(self.iter,m.extend(null,arguments,1));
+var map=m.map;
+var next=self.next;
+return {repr:function(){
+return "imap(...)";
+},toString:m.forwardCall("repr"),next:function(){
+return fun.apply(this,map(next,_18d));
+}};
+},applymap:function(fun,seq,self){
+seq=MochiKit.Iter.iter(seq);
+var m=MochiKit.Base;
+return {repr:function(){
+return "applymap(...)";
+},toString:m.forwardCall("repr"),next:function(){
+return fun.apply(self,seq.next());
+}};
+},chain:function(p,q){
+var self=MochiKit.Iter;
+var m=MochiKit.Base;
+if(arguments.length==1){
+return self.iter(arguments[0]);
+}
+var _198=m.map(self.iter,arguments);
+return {repr:function(){
+return "chain(...)";
+},toString:m.forwardCall("repr"),next:function(){
+while(_198.length>1){
+try{
+var _199=_198[0].next();
+return _199;
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+_198.shift();
+var _199=_198[0].next();
+return _199;
+}
+}
+if(_198.length==1){
+var arg=_198.shift();
+this.next=m.bind("next",arg);
+return this.next();
+}
+throw self.StopIteration;
+}};
+},takewhile:function(pred,seq){
+var self=MochiKit.Iter;
+seq=self.iter(seq);
+return {repr:function(){
+return "takewhile(...)";
+},toString:MochiKit.Base.forwardCall("repr"),next:function(){
+var rval=seq.next();
+if(!pred(rval)){
+this.next=function(){
+throw self.StopIteration;
+};
+this.next();
+}
+return rval;
+}};
+},dropwhile:function(pred,seq){
+seq=MochiKit.Iter.iter(seq);
+var m=MochiKit.Base;
+var bind=m.bind;
+return {"repr":function(){
+return "dropwhile(...)";
+},"toString":m.forwardCall("repr"),"next":function(){
+while(true){
+var rval=seq.next();
+if(!pred(rval)){
+break;
+}
+}
+this.next=bind("next",seq);
+return rval;
+}};
+},_tee:function(_1a4,sync,_1a6){
+sync.pos[_1a4]=-1;
+var m=MochiKit.Base;
+var _1a8=m.listMin;
+return {repr:function(){
+return "tee("+_1a4+", ...)";
+},toString:m.forwardCall("repr"),next:function(){
+var rval;
+var i=sync.pos[_1a4];
+if(i==sync.max){
+rval=_1a6.next();
+sync.deque.push(rval);
+sync.max+=1;
+sync.pos[_1a4]+=1;
+}else{
+rval=sync.deque[i-sync.min];
+sync.pos[_1a4]+=1;
+if(i==sync.min&&_1a8(sync.pos)!=sync.min){
+sync.min+=1;
+sync.deque.shift();
+}
+}
+return rval;
+}};
+},tee:function(_1ab,n){
+var rval=[];
+var sync={"pos":[],"deque":[],"max":-1,"min":-1};
+if(arguments.length==1||typeof (n)=="undefined"||n===null){
+n=2;
+}
+var self=MochiKit.Iter;
+_1ab=self.iter(_1ab);
+var _tee=self._tee;
+for(var i=0;i<n;i++){
+rval.push(_tee(i,sync,_1ab));
+}
+return rval;
+},list:function(_1b2){
+var rval;
+if(_1b2 instanceof Array){
+return _1b2.slice();
+}
+if(typeof (_1b2)=="function"&&!(_1b2 instanceof Function)&&typeof (_1b2.length)=="number"){
+rval=[];
+for(var i=0;i<_1b2.length;i++){
+rval.push(_1b2[i]);
+}
+return rval;
+}
+var self=MochiKit.Iter;
+_1b2=self.iter(_1b2);
+var rval=[];
+var _1b6;
+try{
+while(true){
+_1b6=_1b2.next();
+rval.push(_1b6);
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+return rval;
+}
+return undefined;
+},reduce:function(fn,_1b8,_1b9){
+var i=0;
+var x=_1b9;
+var self=MochiKit.Iter;
+_1b8=self.iter(_1b8);
+if(arguments.length<3){
+try{
+x=_1b8.next();
+}
+catch(e){
+if(e==self.StopIteration){
+e=new TypeError("reduce() of empty sequence with no initial value");
+}
+throw e;
+}
+i++;
+}
+try{
+while(true){
+x=fn(x,_1b8.next());
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+}
+return x;
+},range:function(){
+var _1bd=0;
+var stop=0;
+var step=1;
+if(arguments.length==1){
+stop=arguments[0];
+}else{
+if(arguments.length==2){
+_1bd=arguments[0];
+stop=arguments[1];
+}else{
+if(arguments.length==3){
+_1bd=arguments[0];
+stop=arguments[1];
+step=arguments[2];
+}else{
+throw new TypeError("range() takes 1, 2, or 3 arguments!");
+}
+}
+}
+if(step===0){
+throw new TypeError("range() step must not be 0");
+}
+return {next:function(){
+if((step>0&&_1bd>=stop)||(step<0&&_1bd<=stop)){
+throw MochiKit.Iter.StopIteration;
+}
+var rval=_1bd;
+_1bd+=step;
+return rval;
+},repr:function(){
+return "range("+[_1bd,stop,step].join(", ")+")";
+},toString:MochiKit.Base.forwardCall("repr")};
+},sum:function(_1c1,_1c2){
+if(typeof (_1c2)=="undefined"||_1c2===null){
+_1c2=0;
+}
+var x=_1c2;
+var self=MochiKit.Iter;
+_1c1=self.iter(_1c1);
+try{
+while(true){
+x+=_1c1.next();
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+}
+return x;
+},exhaust:function(_1c5){
+var self=MochiKit.Iter;
+_1c5=self.iter(_1c5);
+try{
+while(true){
+_1c5.next();
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+}
+},forEach:function(_1c7,func,obj){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+if(arguments.length>2){
+func=m.bind(func,obj);
+}
+if(m.isArrayLike(_1c7)&&!self.isIterable(_1c7)){
+try{
+for(var i=0;i<_1c7.length;i++){
+func(_1c7[i]);
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+}
+}else{
+self.exhaust(self.imap(func,_1c7));
+}
+},every:function(_1cd,func){
+var self=MochiKit.Iter;
+try{
+self.ifilterfalse(func,_1cd).next();
+return false;
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+return true;
+}
+},sorted:function(_1d0,cmp){
+var rval=MochiKit.Iter.list(_1d0);
+if(arguments.length==1){
+cmp=MochiKit.Base.compare;
+}
+rval.sort(cmp);
+return rval;
+},reversed:function(_1d3){
+var rval=MochiKit.Iter.list(_1d3);
+rval.reverse();
+return rval;
+},some:function(_1d5,func){
+var self=MochiKit.Iter;
+try{
+self.ifilter(func,_1d5).next();
+return true;
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+return false;
+}
+},iextend:function(lst,_1d9){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+if(m.isArrayLike(_1d9)&&!self.isIterable(_1d9)){
+for(var i=0;i<_1d9.length;i++){
+lst.push(_1d9[i]);
+}
+}else{
+_1d9=self.iter(_1d9);
+try{
+while(true){
+lst.push(_1d9.next());
+}
+}
+catch(e){
+if(e!=self.StopIteration){
+throw e;
+}
+}
+}
+return lst;
+},groupby:function(_1dd,_1de){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+if(arguments.length<2){
+_1de=m.operator.identity;
+}
+_1dd=self.iter(_1dd);
+var pk=undefined;
+var k=undefined;
+var v;
+function fetch(){
+v=_1dd.next();
+k=_1de(v);
+}
+function eat(){
+var ret=v;
+v=undefined;
+return ret;
+}
+var _1e5=true;
+var _1e6=m.compare;
+return {repr:function(){
+return "groupby(...)";
+},next:function(){
+while(_1e6(k,pk)===0){
+fetch();
+if(_1e5){
+_1e5=false;
+break;
+}
+}
+pk=k;
+return [k,{next:function(){
+if(v==undefined){
+fetch();
+}
+if(_1e6(k,pk)!==0){
+throw self.StopIteration;
+}
+return eat();
+}}];
+}};
+},groupby_as_array:function(_1e7,_1e8){
+var m=MochiKit.Base;
+var self=MochiKit.Iter;
+if(arguments.length<2){
+_1e8=m.operator.identity;
+}
+_1e7=self.iter(_1e7);
+var _1eb=[];
+var _1ec=true;
+var _1ed;
+var _1ee=m.compare;
+while(true){
+try{
+var _1ef=_1e7.next();
+var key=_1e8(_1ef);
+}
+catch(e){
+if(e==self.StopIteration){
+break;
+}
+throw e;
+}
+if(_1ec||_1ee(key,_1ed)!==0){
+var _1f1=[];
+_1eb.push([key,_1f1]);
+}
+_1f1.push(_1ef);
+_1ec=false;
+_1ed=key;
+}
+return _1eb;
+},arrayLikeIter:function(_1f2){
+var i=0;
+return {repr:function(){
+return "arrayLikeIter(...)";
+},toString:MochiKit.Base.forwardCall("repr"),next:function(){
+if(i>=_1f2.length){
+throw MochiKit.Iter.StopIteration;
+}
+return _1f2[i++];
+}};
+},hasIterateNext:function(_1f4){
+return (_1f4&&typeof (_1f4.iterateNext)=="function");
+},iterateNextIter:function(_1f5){
+return {repr:function(){
+return "iterateNextIter(...)";
+},toString:MochiKit.Base.forwardCall("repr"),next:function(){
+var rval=_1f5.iterateNext();
+if(rval===null||rval===undefined){
+throw MochiKit.Iter.StopIteration;
+}
+return rval;
+}};
+}});
+MochiKit.Iter.EXPORT_OK=["iteratorRegistry","arrayLikeIter","hasIterateNext","iterateNextIter"];
+MochiKit.Iter.EXPORT=["StopIteration","registerIteratorFactory","iter","count","cycle","repeat","next","izip","ifilter","ifilterfalse","islice","imap","applymap","chain","takewhile","dropwhile","tee","list","reduce","range","sum","exhaust","forEach","every","sorted","reversed","some","iextend","groupby","groupby_as_array"];
+MochiKit.Iter.__new__=function(){
+var m=MochiKit.Base;
+if(typeof (StopIteration)!="undefined"){
+this.StopIteration=StopIteration;
+}else{
+this.StopIteration=new m.NamedError("StopIteration");
+}
+this.iteratorRegistry=new m.AdapterRegistry();
+this.registerIteratorFactory("arrayLike",m.isArrayLike,this.arrayLikeIter);
+this.registerIteratorFactory("iterateNext",this.hasIterateNext,this.iterateNextIter);
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+};
+MochiKit.Iter.__new__();
+if(MochiKit.__export__){
+reduce=MochiKit.Iter.reduce;
+}
+MochiKit.Base._exportSymbols(this,MochiKit.Iter);
+MochiKit.Base._deps("Logging",["Base"]);
+MochiKit.Logging.NAME="MochiKit.Logging";
+MochiKit.Logging.VERSION="1.4.2";
+MochiKit.Logging.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Logging.toString=function(){
+return this.__repr__();
+};
+MochiKit.Logging.EXPORT=["LogLevel","LogMessage","Logger","alertListener","logger","log","logError","logDebug","logFatal","logWarning"];
+MochiKit.Logging.EXPORT_OK=["logLevelAtLeast","isLogMessage","compareLogMessage"];
+MochiKit.Logging.LogMessage=function(num,_1f9,info){
+this.num=num;
+this.level=_1f9;
+this.info=info;
+this.timestamp=new Date();
+};
+MochiKit.Logging.LogMessage.prototype={repr:function(){
+var m=MochiKit.Base;
+return "LogMessage("+m.map(m.repr,[this.num,this.level,this.info]).join(", ")+")";
+},toString:MochiKit.Base.forwardCall("repr")};
+MochiKit.Base.update(MochiKit.Logging,{logLevelAtLeast:function(_1fc){
+var self=MochiKit.Logging;
+if(typeof (_1fc)=="string"){
+_1fc=self.LogLevel[_1fc];
+}
+return function(msg){
+var _1ff=msg.level;
+if(typeof (_1ff)=="string"){
+_1ff=self.LogLevel[_1ff];
+}
+return _1ff>=_1fc;
+};
+},isLogMessage:function(){
+var _200=MochiKit.Logging.LogMessage;
+for(var i=0;i<arguments.length;i++){
+if(!(arguments[i] instanceof _200)){
+return false;
+}
+}
+return true;
+},compareLogMessage:function(a,b){
+return MochiKit.Base.compare([a.level,a.info],[b.level,b.info]);
+},alertListener:function(msg){
+alert("num: "+msg.num+"\nlevel: "+msg.level+"\ninfo: "+msg.info.join(" "));
+}});
+MochiKit.Logging.Logger=function(_205){
+this.counter=0;
+if(typeof (_205)=="undefined"||_205===null){
+_205=-1;
+}
+this.maxSize=_205;
+this._messages=[];
+this.listeners={};
+this.useNativeConsole=false;
+};
+MochiKit.Logging.Logger.prototype={clear:function(){
+this._messages.splice(0,this._messages.length);
+},logToConsole:function(msg){
+if(typeof (window)!="undefined"&&window.console&&window.console.log){
+window.console.log(msg.replace(/%/g,"\uff05"));
+}else{
+if(typeof (opera)!="undefined"&&opera.postError){
+opera.postError(msg);
+}else{
+if(typeof (printfire)=="function"){
+printfire(msg);
+}else{
+if(typeof (Debug)!="undefined"&&Debug.writeln){
+Debug.writeln(msg);
+}else{
+if(typeof (debug)!="undefined"&&debug.trace){
+debug.trace(msg);
+}
+}
+}
+}
+}
+},dispatchListeners:function(msg){
+for(var k in this.listeners){
+var pair=this.listeners[k];
+if(pair.ident!=k||(pair[0]&&!pair[0](msg))){
+continue;
+}
+pair[1](msg);
+}
+},addListener:function(_20a,_20b,_20c){
+if(typeof (_20b)=="string"){
+_20b=MochiKit.Logging.logLevelAtLeast(_20b);
+}
+var _20d=[_20b,_20c];
+_20d.ident=_20a;
+this.listeners[_20a]=_20d;
+},removeListener:function(_20e){
+delete this.listeners[_20e];
+},baseLog:function(_20f,_210){
+if(typeof (_20f)=="number"){
+if(_20f>=MochiKit.Logging.LogLevel.FATAL){
+_20f="FATAL";
+}else{
+if(_20f>=MochiKit.Logging.LogLevel.ERROR){
+_20f="ERROR";
+}else{
+if(_20f>=MochiKit.Logging.LogLevel.WARNING){
+_20f="WARNING";
+}else{
+if(_20f>=MochiKit.Logging.LogLevel.INFO){
+_20f="INFO";
+}else{
+_20f="DEBUG";
+}
+}
+}
+}
+}
+var msg=new MochiKit.Logging.LogMessage(this.counter,_20f,MochiKit.Base.extend(null,arguments,1));
+this._messages.push(msg);
+this.dispatchListeners(msg);
+if(this.useNativeConsole){
+this.logToConsole(msg.level+": "+msg.info.join(" "));
+}
+this.counter+=1;
+while(this.maxSize>=0&&this._messages.length>this.maxSize){
+this._messages.shift();
+}
+},getMessages:function(_212){
+var _213=0;
+if(!(typeof (_212)=="undefined"||_212===null)){
+_213=Math.max(0,this._messages.length-_212);
+}
+return this._messages.slice(_213);
+},getMessageText:function(_214){
+if(typeof (_214)=="undefined"||_214===null){
+_214=30;
+}
+var _215=this.getMessages(_214);
+if(_215.length){
+var lst=map(function(m){
+return "\n ["+m.num+"] "+m.level+": "+m.info.join(" ");
+},_215);
+lst.unshift("LAST "+_215.length+" MESSAGES:");
+return lst.join("");
+}
+return "";
+},debuggingBookmarklet:function(_218){
+if(typeof (MochiKit.LoggingPane)=="undefined"){
+alert(this.getMessageText());
+}else{
+MochiKit.LoggingPane.createLoggingPane(_218||false);
+}
+}};
+MochiKit.Logging.__new__=function(){
+this.LogLevel={ERROR:40,FATAL:50,WARNING:30,INFO:20,DEBUG:10};
+var m=MochiKit.Base;
+m.registerComparator("LogMessage",this.isLogMessage,this.compareLogMessage);
+var _21a=m.partial;
+var _21b=this.Logger;
+var _21c=_21b.prototype.baseLog;
+m.update(this.Logger.prototype,{debug:_21a(_21c,"DEBUG"),log:_21a(_21c,"INFO"),error:_21a(_21c,"ERROR"),fatal:_21a(_21c,"FATAL"),warning:_21a(_21c,"WARNING")});
+var self=this;
+var _21e=function(name){
+return function(){
+self.logger[name].apply(self.logger,arguments);
+};
+};
+this.log=_21e("log");
+this.logError=_21e("error");
+this.logDebug=_21e("debug");
+this.logFatal=_21e("fatal");
+this.logWarning=_21e("warning");
+this.logger=new _21b();
+this.logger.useNativeConsole=true;
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+};
+if(typeof (printfire)=="undefined"&&typeof (document)!="undefined"&&document.createEvent&&typeof (dispatchEvent)!="undefined"){
+printfire=function(){
+printfire.args=arguments;
+var ev=document.createEvent("Events");
+ev.initEvent("printfire",false,true);
+dispatchEvent(ev);
+};
+}
+MochiKit.Logging.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Logging);
+MochiKit.Base._deps("DateTime",["Base"]);
+MochiKit.DateTime.NAME="MochiKit.DateTime";
+MochiKit.DateTime.VERSION="1.4.2";
+MochiKit.DateTime.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.DateTime.toString=function(){
+return this.__repr__();
+};
+MochiKit.DateTime.isoDate=function(str){
+str=str+"";
+if(typeof (str)!="string"||str.length===0){
+return null;
+}
+var iso=str.split("-");
+if(iso.length===0){
+return null;
+}
+var date=new Date(iso[0],iso[1]-1,iso[2]);
+date.setFullYear(iso[0]);
+date.setMonth(iso[1]-1);
+date.setDate(iso[2]);
+return date;
+};
+MochiKit.DateTime._isoRegexp=/(\d{4,})(?:-(\d{1,2})(?:-(\d{1,2})(?:[T ](\d{1,2}):(\d{1,2})(?::(\d{1,2})(?:\.(\d+))?)?(?:(Z)|([+-])(\d{1,2})(?::(\d{1,2}))?)?)?)?)?/;
+MochiKit.DateTime.isoTimestamp=function(str){
+str=str+"";
+if(typeof (str)!="string"||str.length===0){
+return null;
+}
+var res=str.match(MochiKit.DateTime._isoRegexp);
+if(typeof (res)=="undefined"||res===null){
+return null;
+}
+var year,_227,day,hour,min,sec,msec;
+year=parseInt(res[1],10);
+if(typeof (res[2])=="undefined"||res[2]===""){
+return new Date(year);
+}
+_227=parseInt(res[2],10)-1;
+day=parseInt(res[3],10);
+if(typeof (res[4])=="undefined"||res[4]===""){
+return new Date(year,_227,day);
+}
+hour=parseInt(res[4],10);
+min=parseInt(res[5],10);
+sec=(typeof (res[6])!="undefined"&&res[6]!=="")?parseInt(res[6],10):0;
+if(typeof (res[7])!="undefined"&&res[7]!==""){
+msec=Math.round(1000*parseFloat("0."+res[7]));
+}else{
+msec=0;
+}
+if((typeof (res[8])=="undefined"||res[8]==="")&&(typeof (res[9])=="undefined"||res[9]==="")){
+return new Date(year,_227,day,hour,min,sec,msec);
+}
+var ofs;
+if(typeof (res[9])!="undefined"&&res[9]!==""){
+ofs=parseInt(res[10],10)*3600000;
+if(typeof (res[11])!="undefined"&&res[11]!==""){
+ofs+=parseInt(res[11],10)*60000;
+}
+if(res[9]=="-"){
+ofs=-ofs;
+}
+}else{
+ofs=0;
+}
+return new Date(Date.UTC(year,_227,day,hour,min,sec,msec)-ofs);
+};
+MochiKit.DateTime.toISOTime=function(date,_22f){
+if(typeof (date)=="undefined"||date===null){
+return null;
+}
+var hh=date.getHours();
+var mm=date.getMinutes();
+var ss=date.getSeconds();
+var lst=[((_22f&&(hh<10))?"0"+hh:hh),((mm<10)?"0"+mm:mm),((ss<10)?"0"+ss:ss)];
+return lst.join(":");
+};
+MochiKit.DateTime.toISOTimestamp=function(date,_235){
+if(typeof (date)=="undefined"||date===null){
+return null;
+}
+var sep=_235?"T":" ";
+var foot=_235?"Z":"";
+if(_235){
+date=new Date(date.getTime()+(date.getTimezoneOffset()*60000));
+}
+return MochiKit.DateTime.toISODate(date)+sep+MochiKit.DateTime.toISOTime(date,_235)+foot;
+};
+MochiKit.DateTime.toISODate=function(date){
+if(typeof (date)=="undefined"||date===null){
+return null;
+}
+var _239=MochiKit.DateTime._padTwo;
+var _23a=MochiKit.DateTime._padFour;
+return [_23a(date.getFullYear()),_239(date.getMonth()+1),_239(date.getDate())].join("-");
+};
+MochiKit.DateTime.americanDate=function(d){
+d=d+"";
+if(typeof (d)!="string"||d.length===0){
+return null;
+}
+var a=d.split("/");
+return new Date(a[2],a[0]-1,a[1]);
+};
+MochiKit.DateTime._padTwo=function(n){
+return (n>9)?n:"0"+n;
+};
+MochiKit.DateTime._padFour=function(n){
+switch(n.toString().length){
+case 1:
+return "000"+n;
+break;
+case 2:
+return "00"+n;
+break;
+case 3:
+return "0"+n;
+break;
+case 4:
+default:
+return n;
+}
+};
+MochiKit.DateTime.toPaddedAmericanDate=function(d){
+if(typeof (d)=="undefined"||d===null){
+return null;
+}
+var _240=MochiKit.DateTime._padTwo;
+return [_240(d.getMonth()+1),_240(d.getDate()),d.getFullYear()].join("/");
+};
+MochiKit.DateTime.toAmericanDate=function(d){
+if(typeof (d)=="undefined"||d===null){
+return null;
+}
+return [d.getMonth()+1,d.getDate(),d.getFullYear()].join("/");
+};
+MochiKit.DateTime.EXPORT=["isoDate","isoTimestamp","toISOTime","toISOTimestamp","toISODate","americanDate","toPaddedAmericanDate","toAmericanDate"];
+MochiKit.DateTime.EXPORT_OK=[];
+MochiKit.DateTime.EXPORT_TAGS={":common":MochiKit.DateTime.EXPORT,":all":MochiKit.DateTime.EXPORT};
+MochiKit.DateTime.__new__=function(){
+var base=this.NAME+".";
+for(var k in this){
+var o=this[k];
+if(typeof (o)=="function"&&typeof (o.NAME)=="undefined"){
+try{
+o.NAME=base+k;
+}
+catch(e){
+}
+}
+}
+};
+MochiKit.DateTime.__new__();
+if(typeof (MochiKit.Base)!="undefined"){
+MochiKit.Base._exportSymbols(this,MochiKit.DateTime);
+}else{
+(function(_245,_246){
+if((typeof (JSAN)=="undefined"&&typeof (dojo)=="undefined")||(MochiKit.__export__===false)){
+var all=_246.EXPORT_TAGS[":all"];
+for(var i=0;i<all.length;i++){
+_245[all[i]]=_246[all[i]];
+}
+}
+})(this,MochiKit.DateTime);
+}
+MochiKit.Base._deps("Format",["Base"]);
+MochiKit.Format.NAME="MochiKit.Format";
+MochiKit.Format.VERSION="1.4.2";
+MochiKit.Format.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Format.toString=function(){
+return this.__repr__();
+};
+MochiKit.Format._numberFormatter=function(_249,_24a,_24b,_24c,_24d,_24e,_24f,_250,_251){
+return function(num){
+num=parseFloat(num);
+if(typeof (num)=="undefined"||num===null||isNaN(num)){
+return _249;
+}
+var _253=_24a;
+var _254=_24b;
+if(num<0){
+num=-num;
+}else{
+_253=_253.replace(/-/,"");
+}
+var me=arguments.callee;
+var fmt=MochiKit.Format.formatLocale(_24c);
+if(_24d){
+num=num*100;
+_254=fmt.percent+_254;
+}
+num=MochiKit.Format.roundToFixed(num,_24e);
+var _257=num.split(/\./);
+var _258=_257[0];
+var frac=(_257.length==1)?"":_257[1];
+var res="";
+while(_258.length<_24f){
+_258="0"+_258;
+}
+if(_250){
+while(_258.length>_250){
+var i=_258.length-_250;
+res=fmt.separator+_258.substring(i,_258.length)+res;
+_258=_258.substring(0,i);
+}
+}
+res=_258+res;
+if(_24e>0){
+while(frac.length<_251){
+frac=frac+"0";
+}
+res=res+fmt.decimal+frac;
+}
+return _253+res+_254;
+};
+};
+MochiKit.Format.numberFormatter=function(_25c,_25d,_25e){
+if(typeof (_25d)=="undefined"){
+_25d="";
+}
+var _25f=_25c.match(/((?:[0#]+,)?[0#]+)(?:\.([0#]+))?(%)?/);
+if(!_25f){
+throw TypeError("Invalid pattern");
+}
+var _260=_25c.substr(0,_25f.index);
+var _261=_25c.substr(_25f.index+_25f[0].length);
+if(_260.search(/-/)==-1){
+_260=_260+"-";
+}
+var _262=_25f[1];
+var frac=(typeof (_25f[2])=="string"&&_25f[2]!="")?_25f[2]:"";
+var _264=(typeof (_25f[3])=="string"&&_25f[3]!="");
+var tmp=_262.split(/,/);
+var _266;
+if(typeof (_25e)=="undefined"){
+_25e="default";
+}
+if(tmp.length==1){
+_266=null;
+}else{
+_266=tmp[1].length;
+}
+var _267=_262.length-_262.replace(/0/g,"").length;
+var _268=frac.length-frac.replace(/0/g,"").length;
+var _269=frac.length;
+var rval=MochiKit.Format._numberFormatter(_25d,_260,_261,_25e,_264,_269,_267,_266,_268);
+var m=MochiKit.Base;
+if(m){
+var fn=arguments.callee;
+var args=m.concat(arguments);
+rval.repr=function(){
+return [self.NAME,"(",map(m.repr,args).join(", "),")"].join("");
+};
+}
+return rval;
+};
+MochiKit.Format.formatLocale=function(_26e){
+if(typeof (_26e)=="undefined"||_26e===null){
+_26e="default";
+}
+if(typeof (_26e)=="string"){
+var rval=MochiKit.Format.LOCALE[_26e];
+if(typeof (rval)=="string"){
+rval=arguments.callee(rval);
+MochiKit.Format.LOCALE[_26e]=rval;
+}
+return rval;
+}else{
+return _26e;
+}
+};
+MochiKit.Format.twoDigitAverage=function(_270,_271){
+if(_271){
+var res=_270/_271;
+if(!isNaN(res)){
+return MochiKit.Format.twoDigitFloat(res);
+}
+}
+return "0";
+};
+MochiKit.Format.twoDigitFloat=function(_273){
+var res=roundToFixed(_273,2);
+if(res.indexOf(".00")>0){
+return res.substring(0,res.length-3);
+}else{
+if(res.charAt(res.length-1)=="0"){
+return res.substring(0,res.length-1);
+}else{
+return res;
+}
+}
+};
+MochiKit.Format.lstrip=function(str,_276){
+str=str+"";
+if(typeof (str)!="string"){
+return null;
+}
+if(!_276){
+return str.replace(/^\s+/,"");
+}else{
+return str.replace(new RegExp("^["+_276+"]+"),"");
+}
+};
+MochiKit.Format.rstrip=function(str,_278){
+str=str+"";
+if(typeof (str)!="string"){
+return null;
+}
+if(!_278){
+return str.replace(/\s+$/,"");
+}else{
+return str.replace(new RegExp("["+_278+"]+$"),"");
+}
+};
+MochiKit.Format.strip=function(str,_27a){
+var self=MochiKit.Format;
+return self.rstrip(self.lstrip(str,_27a),_27a);
+};
+MochiKit.Format.truncToFixed=function(_27c,_27d){
+var res=Math.floor(_27c).toFixed(0);
+if(_27c<0){
+res=Math.ceil(_27c).toFixed(0);
+if(res.charAt(0)!="-"&&_27d>0){
+res="-"+res;
+}
+}
+if(res.indexOf("e")<0&&_27d>0){
+var tail=_27c.toString();
+if(tail.indexOf("e")>0){
+tail=".";
+}else{
+if(tail.indexOf(".")<0){
+tail=".";
+}else{
+tail=tail.substring(tail.indexOf("."));
+}
+}
+if(tail.length-1>_27d){
+tail=tail.substring(0,_27d+1);
+}
+while(tail.length-1<_27d){
+tail+="0";
+}
+res+=tail;
+}
+return res;
+};
+MochiKit.Format.roundToFixed=function(_280,_281){
+var _282=Math.abs(_280)+0.5*Math.pow(10,-_281);
+var res=MochiKit.Format.truncToFixed(_282,_281);
+if(_280<0){
+res="-"+res;
+}
+return res;
+};
+MochiKit.Format.percentFormat=function(_284){
+return MochiKit.Format.twoDigitFloat(100*_284)+"%";
+};
+MochiKit.Format.EXPORT=["truncToFixed","roundToFixed","numberFormatter","formatLocale","twoDigitAverage","twoDigitFloat","percentFormat","lstrip","rstrip","strip"];
+MochiKit.Format.LOCALE={en_US:{separator:",",decimal:".",percent:"%"},de_DE:{separator:".",decimal:",",percent:"%"},pt_BR:{separator:".",decimal:",",percent:"%"},fr_FR:{separator:" ",decimal:",",percent:"%"},"default":"en_US"};
+MochiKit.Format.EXPORT_OK=[];
+MochiKit.Format.EXPORT_TAGS={":all":MochiKit.Format.EXPORT,":common":MochiKit.Format.EXPORT};
+MochiKit.Format.__new__=function(){
+var base=this.NAME+".";
+var k,v,o;
+for(k in this.LOCALE){
+o=this.LOCALE[k];
+if(typeof (o)=="object"){
+o.repr=function(){
+return this.NAME;
+};
+o.NAME=base+"LOCALE."+k;
+}
+}
+for(k in this){
+o=this[k];
+if(typeof (o)=="function"&&typeof (o.NAME)=="undefined"){
+try{
+o.NAME=base+k;
+}
+catch(e){
+}
+}
+}
+};
+MochiKit.Format.__new__();
+if(typeof (MochiKit.Base)!="undefined"){
+MochiKit.Base._exportSymbols(this,MochiKit.Format);
+}else{
+(function(_289,_28a){
+if((typeof (JSAN)=="undefined"&&typeof (dojo)=="undefined")||(MochiKit.__export__===false)){
+var all=_28a.EXPORT_TAGS[":all"];
+for(var i=0;i<all.length;i++){
+_289[all[i]]=_28a[all[i]];
+}
+}
+})(this,MochiKit.Format);
+}
+MochiKit.Base._deps("Async",["Base"]);
+MochiKit.Async.NAME="MochiKit.Async";
+MochiKit.Async.VERSION="1.4.2";
+MochiKit.Async.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Async.toString=function(){
+return this.__repr__();
+};
+MochiKit.Async.Deferred=function(_28d){
+this.chain=[];
+this.id=this._nextId();
+this.fired=-1;
+this.paused=0;
+this.results=[null,null];
+this.canceller=_28d;
+this.silentlyCancelled=false;
+this.chained=false;
+};
+MochiKit.Async.Deferred.prototype={repr:function(){
+var _28e;
+if(this.fired==-1){
+_28e="unfired";
+}else{
+if(this.fired===0){
+_28e="success";
+}else{
+_28e="error";
+}
+}
+return "Deferred("+this.id+", "+_28e+")";
+},toString:MochiKit.Base.forwardCall("repr"),_nextId:MochiKit.Base.counter(),cancel:function(){
+var self=MochiKit.Async;
+if(this.fired==-1){
+if(this.canceller){
+this.canceller(this);
+}else{
+this.silentlyCancelled=true;
+}
+if(this.fired==-1){
+this.errback(new self.CancelledError(this));
+}
+}else{
+if((this.fired===0)&&(this.results[0] instanceof self.Deferred)){
+this.results[0].cancel();
+}
+}
+},_resback:function(res){
+this.fired=((res instanceof Error)?1:0);
+this.results[this.fired]=res;
+this._fire();
+},_check:function(){
+if(this.fired!=-1){
+if(!this.silentlyCancelled){
+throw new MochiKit.Async.AlreadyCalledError(this);
+}
+this.silentlyCancelled=false;
+return;
+}
+},callback:function(res){
+this._check();
+if(res instanceof MochiKit.Async.Deferred){
+throw new Error("Deferred instances can only be chained if they are the result of a callback");
+}
+this._resback(res);
+},errback:function(res){
+this._check();
+var self=MochiKit.Async;
+if(res instanceof self.Deferred){
+throw new Error("Deferred instances can only be chained if they are the result of a callback");
+}
+if(!(res instanceof Error)){
+res=new self.GenericError(res);
+}
+this._resback(res);
+},addBoth:function(fn){
+if(arguments.length>1){
+fn=MochiKit.Base.partial.apply(null,arguments);
+}
+return this.addCallbacks(fn,fn);
+},addCallback:function(fn){
+if(arguments.length>1){
+fn=MochiKit.Base.partial.apply(null,arguments);
+}
+return this.addCallbacks(fn,null);
+},addErrback:function(fn){
+if(arguments.length>1){
+fn=MochiKit.Base.partial.apply(null,arguments);
+}
+return this.addCallbacks(null,fn);
+},addCallbacks:function(cb,eb){
+if(this.chained){
+throw new Error("Chained Deferreds can not be re-used");
+}
+this.chain.push([cb,eb]);
+if(this.fired>=0){
+this._fire();
+}
+return this;
+},_fire:function(){
+var _299=this.chain;
+var _29a=this.fired;
+var res=this.results[_29a];
+var self=this;
+var cb=null;
+while(_299.length>0&&this.paused===0){
+var pair=_299.shift();
+var f=pair[_29a];
+if(f===null){
+continue;
+}
+try{
+res=f(res);
+_29a=((res instanceof Error)?1:0);
+if(res instanceof MochiKit.Async.Deferred){
+cb=function(res){
+self._resback(res);
+self.paused--;
+if((self.paused===0)&&(self.fired>=0)){
+self._fire();
+}
+};
+this.paused++;
+}
+}
+catch(err){
+_29a=1;
+if(!(err instanceof Error)){
+err=new MochiKit.Async.GenericError(err);
+}
+res=err;
+}
+}
+this.fired=_29a;
+this.results[_29a]=res;
+if(cb&&this.paused){
+res.addBoth(cb);
+res.chained=true;
+}
+}};
+MochiKit.Base.update(MochiKit.Async,{evalJSONRequest:function(req){
+return MochiKit.Base.evalJSON(req.responseText);
+},succeed:function(_2a2){
+var d=new MochiKit.Async.Deferred();
+d.callback.apply(d,arguments);
+return d;
+},fail:function(_2a4){
+var d=new MochiKit.Async.Deferred();
+d.errback.apply(d,arguments);
+return d;
+},getXMLHttpRequest:function(){
+var self=arguments.callee;
+if(!self.XMLHttpRequest){
+var _2a7=[function(){
+return new XMLHttpRequest();
+},function(){
+return new ActiveXObject("Msxml2.XMLHTTP");
+},function(){
+return new ActiveXObject("Microsoft.XMLHTTP");
+},function(){
+return new ActiveXObject("Msxml2.XMLHTTP.4.0");
+},function(){
+throw new MochiKit.Async.BrowserComplianceError("Browser does not support XMLHttpRequest");
+}];
+for(var i=0;i<_2a7.length;i++){
+var func=_2a7[i];
+try{
+self.XMLHttpRequest=func;
+return func();
+}
+catch(e){
+}
+}
+}
+return self.XMLHttpRequest();
+},_xhr_onreadystatechange:function(d){
+var m=MochiKit.Base;
+if(this.readyState==4){
+try{
+this.onreadystatechange=null;
+}
+catch(e){
+try{
+this.onreadystatechange=m.noop;
+}
+catch(e){
+}
+}
+var _2ac=null;
+try{
+_2ac=this.status;
+if(!_2ac&&m.isNotEmpty(this.responseText)){
+_2ac=304;
+}
+}
+catch(e){
+}
+if(_2ac==200||_2ac==201||_2ac==204||_2ac==304||_2ac==1223){
+d.callback(this);
+}else{
+var err=new MochiKit.Async.XMLHttpRequestError(this,"Request failed");
+if(err.number){
+d.errback(err);
+}else{
+d.errback(err);
+}
+}
+}
+},_xhr_canceller:function(req){
+try{
+req.onreadystatechange=null;
+}
+catch(e){
+try{
+req.onreadystatechange=MochiKit.Base.noop;
+}
+catch(e){
+}
+}
+req.abort();
+},sendXMLHttpRequest:function(req,_2b0){
+if(typeof (_2b0)=="undefined"||_2b0===null){
+_2b0="";
+}
+var m=MochiKit.Base;
+var self=MochiKit.Async;
+var d=new self.Deferred(m.partial(self._xhr_canceller,req));
+try{
+req.onreadystatechange=m.bind(self._xhr_onreadystatechange,req,d);
+req.send(_2b0);
+}
+catch(e){
+try{
+req.onreadystatechange=null;
+}
+catch(ignore){
+}
+d.errback(e);
+}
+return d;
+},doXHR:function(url,opts){
+var self=MochiKit.Async;
+return self.callLater(0,self._doXHR,url,opts);
+},_doXHR:function(url,opts){
+var m=MochiKit.Base;
+opts=m.update({method:"GET",sendContent:""},opts);
+var self=MochiKit.Async;
+var req=self.getXMLHttpRequest();
+if(opts.queryString){
+var qs=m.queryString(opts.queryString);
+if(qs){
+url+="?"+qs;
+}
+}
+if("username" in opts){
+req.open(opts.method,url,true,opts.username,opts.password);
+}else{
+req.open(opts.method,url,true);
+}
+if(req.overrideMimeType&&opts.mimeType){
+req.overrideMimeType(opts.mimeType);
+}
+req.setRequestHeader("X-Requested-With","XMLHttpRequest");
+if(opts.headers){
+var _2bd=opts.headers;
+if(!m.isArrayLike(_2bd)){
+_2bd=m.items(_2bd);
+}
+for(var i=0;i<_2bd.length;i++){
+var _2bf=_2bd[i];
+var name=_2bf[0];
+var _2c1=_2bf[1];
+req.setRequestHeader(name,_2c1);
+}
+}
+return self.sendXMLHttpRequest(req,opts.sendContent);
+},_buildURL:function(url){
+if(arguments.length>1){
+var m=MochiKit.Base;
+var qs=m.queryString.apply(null,m.extend(null,arguments,1));
+if(qs){
+return url+"?"+qs;
+}
+}
+return url;
+},doSimpleXMLHttpRequest:function(url){
+var self=MochiKit.Async;
+url=self._buildURL.apply(self,arguments);
+return self.doXHR(url);
+},loadJSONDoc:function(url){
+var self=MochiKit.Async;
+url=self._buildURL.apply(self,arguments);
+var d=self.doXHR(url,{"mimeType":"text/plain","headers":[["Accept","application/json"]]});
+d=d.addCallback(self.evalJSONRequest);
+return d;
+},wait:function(_2ca,_2cb){
+var d=new MochiKit.Async.Deferred();
+var m=MochiKit.Base;
+if(typeof (_2cb)!="undefined"){
+d.addCallback(function(){
+return _2cb;
+});
+}
+var _2ce=setTimeout(m.bind("callback",d),Math.floor(_2ca*1000));
+d.canceller=function(){
+try{
+clearTimeout(_2ce);
+}
+catch(e){
+}
+};
+return d;
+},callLater:function(_2cf,func){
+var m=MochiKit.Base;
+var _2d2=m.partial.apply(m,m.extend(null,arguments,1));
+return MochiKit.Async.wait(_2cf).addCallback(function(res){
+return _2d2();
+});
+}});
+MochiKit.Async.DeferredLock=function(){
+this.waiting=[];
+this.locked=false;
+this.id=this._nextId();
+};
+MochiKit.Async.DeferredLock.prototype={__class__:MochiKit.Async.DeferredLock,acquire:function(){
+var d=new MochiKit.Async.Deferred();
+if(this.locked){
+this.waiting.push(d);
+}else{
+this.locked=true;
+d.callback(this);
+}
+return d;
+},release:function(){
+if(!this.locked){
+throw TypeError("Tried to release an unlocked DeferredLock");
+}
+this.locked=false;
+if(this.waiting.length>0){
+this.locked=true;
+this.waiting.shift().callback(this);
+}
+},_nextId:MochiKit.Base.counter(),repr:function(){
+var _2d5;
+if(this.locked){
+_2d5="locked, "+this.waiting.length+" waiting";
+}else{
+_2d5="unlocked";
+}
+return "DeferredLock("+this.id+", "+_2d5+")";
+},toString:MochiKit.Base.forwardCall("repr")};
+MochiKit.Async.DeferredList=function(list,_2d7,_2d8,_2d9,_2da){
+MochiKit.Async.Deferred.apply(this,[_2da]);
+this.list=list;
+var _2db=[];
+this.resultList=_2db;
+this.finishedCount=0;
+this.fireOnOneCallback=_2d7;
+this.fireOnOneErrback=_2d8;
+this.consumeErrors=_2d9;
+var cb=MochiKit.Base.bind(this._cbDeferred,this);
+for(var i=0;i<list.length;i++){
+var d=list[i];
+_2db.push(undefined);
+d.addCallback(cb,i,true);
+d.addErrback(cb,i,false);
+}
+if(list.length===0&&!_2d7){
+this.callback(this.resultList);
+}
+};
+MochiKit.Async.DeferredList.prototype=new MochiKit.Async.Deferred();
+MochiKit.Async.DeferredList.prototype._cbDeferred=function(_2df,_2e0,_2e1){
+this.resultList[_2df]=[_2e0,_2e1];
+this.finishedCount+=1;
+if(this.fired==-1){
+if(_2e0&&this.fireOnOneCallback){
+this.callback([_2df,_2e1]);
+}else{
+if(!_2e0&&this.fireOnOneErrback){
+this.errback(_2e1);
+}else{
+if(this.finishedCount==this.list.length){
+this.callback(this.resultList);
+}
+}
+}
+}
+if(!_2e0&&this.consumeErrors){
+_2e1=null;
+}
+return _2e1;
+};
+MochiKit.Async.gatherResults=function(_2e2){
+var d=new MochiKit.Async.DeferredList(_2e2,false,true,false);
+d.addCallback(function(_2e4){
+var ret=[];
+for(var i=0;i<_2e4.length;i++){
+ret.push(_2e4[i][1]);
+}
+return ret;
+});
+return d;
+};
+MochiKit.Async.maybeDeferred=function(func){
+var self=MochiKit.Async;
+var _2e9;
+try{
+var r=func.apply(null,MochiKit.Base.extend([],arguments,1));
+if(r instanceof self.Deferred){
+_2e9=r;
+}else{
+if(r instanceof Error){
+_2e9=self.fail(r);
+}else{
+_2e9=self.succeed(r);
+}
+}
+}
+catch(e){
+_2e9=self.fail(e);
+}
+return _2e9;
+};
+MochiKit.Async.EXPORT=["AlreadyCalledError","CancelledError","BrowserComplianceError","GenericError","XMLHttpRequestError","Deferred","succeed","fail","getXMLHttpRequest","doSimpleXMLHttpRequest","loadJSONDoc","wait","callLater","sendXMLHttpRequest","DeferredLock","DeferredList","gatherResults","maybeDeferred","doXHR"];
+MochiKit.Async.EXPORT_OK=["evalJSONRequest"];
+MochiKit.Async.__new__=function(){
+var m=MochiKit.Base;
+var ne=m.partial(m._newNamedError,this);
+ne("AlreadyCalledError",function(_2ed){
+this.deferred=_2ed;
+});
+ne("CancelledError",function(_2ee){
+this.deferred=_2ee;
+});
+ne("BrowserComplianceError",function(msg){
+this.message=msg;
+});
+ne("GenericError",function(msg){
+this.message=msg;
+});
+ne("XMLHttpRequestError",function(req,msg){
+this.req=req;
+this.message=msg;
+try{
+this.number=req.status;
+}
+catch(e){
+}
+});
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+};
+MochiKit.Async.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Async);
+MochiKit.Base._deps("DOM",["Base"]);
+MochiKit.DOM.NAME="MochiKit.DOM";
+MochiKit.DOM.VERSION="1.4.2";
+MochiKit.DOM.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.DOM.toString=function(){
+return this.__repr__();
+};
+MochiKit.DOM.EXPORT=["removeEmptyTextNodes","formContents","currentWindow","currentDocument","withWindow","withDocument","registerDOMConverter","coerceToDOM","createDOM","createDOMFunc","isChildNode","getNodeAttribute","removeNodeAttribute","setNodeAttribute","updateNodeAttributes","appendChildNodes","insertSiblingNodesAfter","insertSiblingNodesBefore","replaceChildNodes","removeElement","swapDOM","BUTTON","TT","PRE","H1","H2","H3","H4","H5","H6","BR","CANVAS","HR","LABEL","TEXTAREA","FORM","STRONG","SELECT","OPTION","OPTGROUP","LEGEND","FIELDSET","P","UL","OL","LI","DL","DT","DD","TD","TR","THEAD","TBODY","TFOOT","TABLE","TH","INPUT","SPAN","A","DIV","IMG","getElement","$","getElementsByTagAndClassName","addToCallStack","addLoadEvent","focusOnLoad","setElementClass","toggleElementClass","addElementClass","removeElementClass","swapElementClass","hasElementClass","computedStyle","escapeHTML","toHTML","emitHTML","scrapeText","getFirstParentByTagAndClassName","getFirstElementByTagAndClassName"];
+MochiKit.DOM.EXPORT_OK=["domConverters"];
+MochiKit.DOM.DEPRECATED=[["computedStyle","MochiKit.Style.getStyle","1.4"],["elementDimensions","MochiKit.Style.getElementDimensions","1.4"],["elementPosition","MochiKit.Style.getElementPosition","1.4"],["getViewportDimensions","MochiKit.Style.getViewportDimensions","1.4"],["hideElement","MochiKit.Style.hideElement","1.4"],["makeClipping","MochiKit.Style.makeClipping","1.4.1"],["makePositioned","MochiKit.Style.makePositioned","1.4.1"],["setElementDimensions","MochiKit.Style.setElementDimensions","1.4"],["setElementPosition","MochiKit.Style.setElementPosition","1.4"],["setDisplayForElement","MochiKit.Style.setDisplayForElement","1.4"],["setOpacity","MochiKit.Style.setOpacity","1.4"],["showElement","MochiKit.Style.showElement","1.4"],["undoClipping","MochiKit.Style.undoClipping","1.4.1"],["undoPositioned","MochiKit.Style.undoPositioned","1.4.1"],["Coordinates","MochiKit.Style.Coordinates","1.4"],["Dimensions","MochiKit.Style.Dimensions","1.4"]];
+MochiKit.Base.update(MochiKit.DOM,{currentWindow:function(){
+return MochiKit.DOM._window;
+},currentDocument:function(){
+return MochiKit.DOM._document;
+},withWindow:function(win,func){
+var self=MochiKit.DOM;
+var _2f6=self._document;
+var _2f7=self._window;
+var rval;
+try{
+self._window=win;
+self._document=win.document;
+rval=func();
+}
+catch(e){
+self._window=_2f7;
+self._document=_2f6;
+throw e;
+}
+self._window=_2f7;
+self._document=_2f6;
+return rval;
+},formContents:function(elem){
+var _2fa=[];
+var _2fb=[];
+var m=MochiKit.Base;
+var self=MochiKit.DOM;
+if(typeof (elem)=="undefined"||elem===null){
+elem=self._document.body;
+}else{
+elem=self.getElement(elem);
+}
+m.nodeWalk(elem,function(elem){
+var name=elem.name;
+if(m.isNotEmpty(name)){
+var _300=elem.tagName.toUpperCase();
+if(_300==="INPUT"&&(elem.type=="radio"||elem.type=="checkbox")&&!elem.checked){
+return null;
+}
+if(_300==="SELECT"){
+if(elem.type=="select-one"){
+if(elem.selectedIndex>=0){
+var opt=elem.options[elem.selectedIndex];
+var v=opt.value;
+if(!v){
+var h=opt.outerHTML;
+if(h&&!h.match(/^[^>]+\svalue\s*=/i)){
+v=opt.text;
+}
+}
+_2fa.push(name);
+_2fb.push(v);
+return null;
+}
+_2fa.push(name);
+_2fb.push("");
+return null;
+}else{
+var opts=elem.options;
+if(!opts.length){
+_2fa.push(name);
+_2fb.push("");
+return null;
+}
+for(var i=0;i<opts.length;i++){
+var opt=opts[i];
+if(!opt.selected){
+continue;
+}
+var v=opt.value;
+if(!v){
+var h=opt.outerHTML;
+if(h&&!h.match(/^[^>]+\svalue\s*=/i)){
+v=opt.text;
+}
+}
+_2fa.push(name);
+_2fb.push(v);
+}
+return null;
+}
+}
+if(_300==="FORM"||_300==="P"||_300==="SPAN"||_300==="DIV"){
+return elem.childNodes;
+}
+_2fa.push(name);
+_2fb.push(elem.value||"");
+return null;
+}
+return elem.childNodes;
+});
+return [_2fa,_2fb];
+},withDocument:function(doc,func){
+var self=MochiKit.DOM;
+var _309=self._document;
+var rval;
+try{
+self._document=doc;
+rval=func();
+}
+catch(e){
+self._document=_309;
+throw e;
+}
+self._document=_309;
+return rval;
+},registerDOMConverter:function(name,_30c,wrap,_30e){
+MochiKit.DOM.domConverters.register(name,_30c,wrap,_30e);
+},coerceToDOM:function(node,ctx){
+var m=MochiKit.Base;
+var im=MochiKit.Iter;
+var self=MochiKit.DOM;
+if(im){
+var iter=im.iter;
+var _315=im.repeat;
+}
+var map=m.map;
+var _317=self.domConverters;
+var _318=arguments.callee;
+var _319=m.NotFound;
+while(true){
+if(typeof (node)=="undefined"||node===null){
+return null;
+}
+if(typeof (node)=="function"&&typeof (node.length)=="number"&&!(node instanceof Function)){
+node=im?im.list(node):m.extend(null,node);
+}
+if(typeof (node.nodeType)!="undefined"&&node.nodeType>0){
+return node;
+}
+if(typeof (node)=="number"||typeof (node)=="boolean"){
+node=node.toString();
+}
+if(typeof (node)=="string"){
+return self._document.createTextNode(node);
+}
+if(typeof (node.__dom__)=="function"){
+node=node.__dom__(ctx);
+continue;
+}
+if(typeof (node.dom)=="function"){
+node=node.dom(ctx);
+continue;
+}
+if(typeof (node)=="function"){
+node=node.apply(ctx,[ctx]);
+continue;
+}
+if(im){
+var _31a=null;
+try{
+_31a=iter(node);
+}
+catch(e){
+}
+if(_31a){
+return map(_318,_31a,_315(ctx));
+}
+}else{
+if(m.isArrayLike(node)){
+var func=function(n){
+return _318(n,ctx);
+};
+return map(func,node);
+}
+}
+try{
+node=_317.match(node,ctx);
+continue;
+}
+catch(e){
+if(e!=_319){
+throw e;
+}
+}
+return self._document.createTextNode(node.toString());
+}
+return undefined;
+},isChildNode:function(node,_31e){
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+node=self.getElement(node);
+}
+if(typeof (_31e)=="string"){
+_31e=self.getElement(_31e);
+}
+if(typeof (node)=="undefined"||node===null){
+return false;
+}
+while(node!=null&&node!==self._document){
+if(node===_31e){
+return true;
+}
+node=node.parentNode;
+}
+return false;
+},setNodeAttribute:function(node,attr,_322){
+var o={};
+o[attr]=_322;
+try{
+return MochiKit.DOM.updateNodeAttributes(node,o);
+}
+catch(e){
+}
+return null;
+},getNodeAttribute:function(node,attr){
+var self=MochiKit.DOM;
+var _327=self.attributeArray.renames[attr];
+var _328=self.attributeArray.ignoreAttr[attr];
+node=self.getElement(node);
+try{
+if(_327){
+return node[_327];
+}
+var _329=node.getAttribute(attr);
+if(_329!=_328){
+return _329;
+}
+}
+catch(e){
+}
+return null;
+},removeNodeAttribute:function(node,attr){
+var self=MochiKit.DOM;
+var _32d=self.attributeArray.renames[attr];
+node=self.getElement(node);
+try{
+if(_32d){
+return node[_32d];
+}
+return node.removeAttribute(attr);
+}
+catch(e){
+}
+return null;
+},updateNodeAttributes:function(node,_32f){
+var elem=node;
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+elem=self.getElement(node);
+}
+if(_32f){
+var _332=MochiKit.Base.updatetree;
+if(self.attributeArray.compliant){
+for(var k in _32f){
+var v=_32f[k];
+if(typeof (v)=="object"&&typeof (elem[k])=="object"){
+if(k=="style"&&MochiKit.Style){
+MochiKit.Style.setStyle(elem,v);
+}else{
+_332(elem[k],v);
+}
+}else{
+if(k.substring(0,2)=="on"){
+if(typeof (v)=="string"){
+v=new Function(v);
+}
+elem[k]=v;
+}else{
+elem.setAttribute(k,v);
+}
+}
+if(typeof (elem[k])=="string"&&elem[k]!=v){
+elem[k]=v;
+}
+}
+}else{
+var _335=self.attributeArray.renames;
+for(var k in _32f){
+v=_32f[k];
+var _336=_335[k];
+if(k=="style"&&typeof (v)=="string"){
+elem.style.cssText=v;
+}else{
+if(typeof (_336)=="string"){
+elem[_336]=v;
+}else{
+if(typeof (elem[k])=="object"&&typeof (v)=="object"){
+if(k=="style"&&MochiKit.Style){
+MochiKit.Style.setStyle(elem,v);
+}else{
+_332(elem[k],v);
+}
+}else{
+if(k.substring(0,2)=="on"){
+if(typeof (v)=="string"){
+v=new Function(v);
+}
+elem[k]=v;
+}else{
+elem.setAttribute(k,v);
+}
+}
+}
+}
+if(typeof (elem[k])=="string"&&elem[k]!=v){
+elem[k]=v;
+}
+}
+}
+}
+return elem;
+},appendChildNodes:function(node){
+var elem=node;
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+elem=self.getElement(node);
+}
+var _33a=[self.coerceToDOM(MochiKit.Base.extend(null,arguments,1),elem)];
+var _33b=MochiKit.Base.concat;
+while(_33a.length){
+var n=_33a.shift();
+if(typeof (n)=="undefined"||n===null){
+}else{
+if(typeof (n.nodeType)=="number"){
+elem.appendChild(n);
+}else{
+_33a=_33b(n,_33a);
+}
+}
+}
+return elem;
+},insertSiblingNodesBefore:function(node){
+var elem=node;
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+elem=self.getElement(node);
+}
+var _340=[self.coerceToDOM(MochiKit.Base.extend(null,arguments,1),elem)];
+var _341=elem.parentNode;
+var _342=MochiKit.Base.concat;
+while(_340.length){
+var n=_340.shift();
+if(typeof (n)=="undefined"||n===null){
+}else{
+if(typeof (n.nodeType)=="number"){
+_341.insertBefore(n,elem);
+}else{
+_340=_342(n,_340);
+}
+}
+}
+return _341;
+},insertSiblingNodesAfter:function(node){
+var elem=node;
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+elem=self.getElement(node);
+}
+var _347=[self.coerceToDOM(MochiKit.Base.extend(null,arguments,1),elem)];
+if(elem.nextSibling){
+return self.insertSiblingNodesBefore(elem.nextSibling,_347);
+}else{
+return self.appendChildNodes(elem.parentNode,_347);
+}
+},replaceChildNodes:function(node){
+var elem=node;
+var self=MochiKit.DOM;
+if(typeof (node)=="string"){
+elem=self.getElement(node);
+arguments[0]=elem;
+}
+var _34b;
+while((_34b=elem.firstChild)){
+elem.removeChild(_34b);
+}
+if(arguments.length<2){
+return elem;
+}else{
+return self.appendChildNodes.apply(this,arguments);
+}
+},createDOM:function(name,_34d){
+var elem;
+var self=MochiKit.DOM;
+var m=MochiKit.Base;
+if(typeof (_34d)=="string"||typeof (_34d)=="number"){
+var args=m.extend([name,null],arguments,1);
+return arguments.callee.apply(this,args);
+}
+if(typeof (name)=="string"){
+var _352=self._xhtml;
+if(_34d&&!self.attributeArray.compliant){
+var _353="";
+if("name" in _34d){
+_353+=" name=\""+self.escapeHTML(_34d.name)+"\"";
+}
+if(name=="input"&&"type" in _34d){
+_353+=" type=\""+self.escapeHTML(_34d.type)+"\"";
+}
+if(_353){
+name="<"+name+_353+">";
+_352=false;
+}
+}
+var d=self._document;
+if(_352&&d===document){
+elem=d.createElementNS("http://www.w3.org/1999/xhtml",name);
+}else{
+elem=d.createElement(name);
+}
+}else{
+elem=name;
+}
+if(_34d){
+self.updateNodeAttributes(elem,_34d);
+}
+if(arguments.length<=2){
+return elem;
+}else{
+var args=m.extend([elem],arguments,2);
+return self.appendChildNodes.apply(this,args);
+}
+},createDOMFunc:function(){
+var m=MochiKit.Base;
+return m.partial.apply(this,m.extend([MochiKit.DOM.createDOM],arguments));
+},removeElement:function(elem){
+var self=MochiKit.DOM;
+var e=self.coerceToDOM(self.getElement(elem));
+e.parentNode.removeChild(e);
+return e;
+},swapDOM:function(dest,src){
+var self=MochiKit.DOM;
+dest=self.getElement(dest);
+var _35c=dest.parentNode;
+if(src){
+src=self.coerceToDOM(self.getElement(src),_35c);
+_35c.replaceChild(src,dest);
+}else{
+_35c.removeChild(dest);
+}
+return src;
+},getElement:function(id){
+var self=MochiKit.DOM;
+if(arguments.length==1){
+return ((typeof (id)=="string")?self._document.getElementById(id):id);
+}else{
+return MochiKit.Base.map(self.getElement,arguments);
+}
+},getElementsByTagAndClassName:function(_35f,_360,_361){
+var self=MochiKit.DOM;
+if(typeof (_35f)=="undefined"||_35f===null){
+_35f="*";
+}
+if(typeof (_361)=="undefined"||_361===null){
+_361=self._document;
+}
+_361=self.getElement(_361);
+if(_361==null){
+return [];
+}
+var _363=(_361.getElementsByTagName(_35f)||self._document.all);
+if(typeof (_360)=="undefined"||_360===null){
+return MochiKit.Base.extend(null,_363);
+}
+var _364=[];
+for(var i=0;i<_363.length;i++){
+var _366=_363[i];
+var cls=_366.className;
+if(typeof (cls)!="string"){
+cls=_366.getAttribute("class");
+}
+if(typeof (cls)=="string"){
+var _368=cls.split(" ");
+for(var j=0;j<_368.length;j++){
+if(_368[j]==_360){
+_364.push(_366);
+break;
+}
+}
+}
+}
+return _364;
+},_newCallStack:function(path,once){
+var rval=function(){
+var _36d=arguments.callee.callStack;
+for(var i=0;i<_36d.length;i++){
+if(_36d[i].apply(this,arguments)===false){
+break;
+}
+}
+if(once){
+try{
+this[path]=null;
+}
+catch(e){
+}
+}
+};
+rval.callStack=[];
+return rval;
+},addToCallStack:function(_36f,path,func,once){
+var self=MochiKit.DOM;
+var _374=_36f[path];
+var _375=_374;
+if(!(typeof (_374)=="function"&&typeof (_374.callStack)=="object"&&_374.callStack!==null)){
+_375=self._newCallStack(path,once);
+if(typeof (_374)=="function"){
+_375.callStack.push(_374);
+}
+_36f[path]=_375;
+}
+_375.callStack.push(func);
+},addLoadEvent:function(func){
+var self=MochiKit.DOM;
+self.addToCallStack(self._window,"onload",func,true);
+},focusOnLoad:function(_378){
+var self=MochiKit.DOM;
+self.addLoadEvent(function(){
+_378=self.getElement(_378);
+if(_378){
+_378.focus();
+}
+});
+},setElementClass:function(_37a,_37b){
+var self=MochiKit.DOM;
+var obj=self.getElement(_37a);
+if(self.attributeArray.compliant){
+obj.setAttribute("class",_37b);
+}else{
+obj.setAttribute("className",_37b);
+}
+},toggleElementClass:function(_37e){
+var self=MochiKit.DOM;
+for(var i=1;i<arguments.length;i++){
+var obj=self.getElement(arguments[i]);
+if(!self.addElementClass(obj,_37e)){
+self.removeElementClass(obj,_37e);
+}
+}
+},addElementClass:function(_382,_383){
+var self=MochiKit.DOM;
+var obj=self.getElement(_382);
+var cls=obj.className;
+if(typeof (cls)!="string"){
+cls=obj.getAttribute("class");
+}
+if(typeof (cls)!="string"||cls.length===0){
+self.setElementClass(obj,_383);
+return true;
+}
+if(cls==_383){
+return false;
+}
+var _387=cls.split(" ");
+for(var i=0;i<_387.length;i++){
+if(_387[i]==_383){
+return false;
+}
+}
+self.setElementClass(obj,cls+" "+_383);
+return true;
+},removeElementClass:function(_389,_38a){
+var self=MochiKit.DOM;
+var obj=self.getElement(_389);
+var cls=obj.className;
+if(typeof (cls)!="string"){
+cls=obj.getAttribute("class");
+}
+if(typeof (cls)!="string"||cls.length===0){
+return false;
+}
+if(cls==_38a){
+self.setElementClass(obj,"");
+return true;
+}
+var _38e=cls.split(" ");
+for(var i=0;i<_38e.length;i++){
+if(_38e[i]==_38a){
+_38e.splice(i,1);
+self.setElementClass(obj,_38e.join(" "));
+return true;
+}
+}
+return false;
+},swapElementClass:function(_390,_391,_392){
+var obj=MochiKit.DOM.getElement(_390);
+var res=MochiKit.DOM.removeElementClass(obj,_391);
+if(res){
+MochiKit.DOM.addElementClass(obj,_392);
+}
+return res;
+},hasElementClass:function(_395,_396){
+var obj=MochiKit.DOM.getElement(_395);
+if(obj==null){
+return false;
+}
+var cls=obj.className;
+if(typeof (cls)!="string"){
+cls=obj.getAttribute("class");
+}
+if(typeof (cls)!="string"){
+return false;
+}
+var _399=cls.split(" ");
+for(var i=1;i<arguments.length;i++){
+var good=false;
+for(var j=0;j<_399.length;j++){
+if(_399[j]==arguments[i]){
+good=true;
+break;
+}
+}
+if(!good){
+return false;
+}
+}
+return true;
+},escapeHTML:function(s){
+return s.replace(/&/g,"&amp;").replace(/"/g,"&quot;").replace(/</g,"&lt;").replace(/>/g,"&gt;");
+},toHTML:function(dom){
+return MochiKit.DOM.emitHTML(dom).join("");
+},emitHTML:function(dom,lst){
+if(typeof (lst)=="undefined"||lst===null){
+lst=[];
+}
+var _3a1=[dom];
+var self=MochiKit.DOM;
+var _3a3=self.escapeHTML;
+var _3a4=self.attributeArray;
+while(_3a1.length){
+dom=_3a1.pop();
+if(typeof (dom)=="string"){
+lst.push(dom);
+}else{
+if(dom.nodeType==1){
+lst.push("<"+dom.tagName.toLowerCase());
+var _3a5=[];
+var _3a6=_3a4(dom);
+for(var i=0;i<_3a6.length;i++){
+var a=_3a6[i];
+_3a5.push([" ",a.name,"=\"",_3a3(a.value),"\""]);
+}
+_3a5.sort();
+for(i=0;i<_3a5.length;i++){
+var _3a9=_3a5[i];
+for(var j=0;j<_3a9.length;j++){
+lst.push(_3a9[j]);
+}
+}
+if(dom.hasChildNodes()){
+lst.push(">");
+_3a1.push("</"+dom.tagName.toLowerCase()+">");
+var _3ab=dom.childNodes;
+for(i=_3ab.length-1;i>=0;i--){
+_3a1.push(_3ab[i]);
+}
+}else{
+lst.push("/>");
+}
+}else{
+if(dom.nodeType==3){
+lst.push(_3a3(dom.nodeValue));
+}
+}
+}
+}
+return lst;
+},scrapeText:function(node,_3ad){
+var rval=[];
+(function(node){
+var cn=node.childNodes;
+if(cn){
+for(var i=0;i<cn.length;i++){
+arguments.callee.call(this,cn[i]);
+}
+}
+var _3b2=node.nodeValue;
+if(typeof (_3b2)=="string"){
+rval.push(_3b2);
+}
+})(MochiKit.DOM.getElement(node));
+if(_3ad){
+return rval;
+}else{
+return rval.join("");
+}
+},removeEmptyTextNodes:function(_3b3){
+_3b3=MochiKit.DOM.getElement(_3b3);
+for(var i=0;i<_3b3.childNodes.length;i++){
+var node=_3b3.childNodes[i];
+if(node.nodeType==3&&!/\S/.test(node.nodeValue)){
+node.parentNode.removeChild(node);
+}
+}
+},getFirstElementByTagAndClassName:function(_3b6,_3b7,_3b8){
+var self=MochiKit.DOM;
+if(typeof (_3b6)=="undefined"||_3b6===null){
+_3b6="*";
+}
+if(typeof (_3b8)=="undefined"||_3b8===null){
+_3b8=self._document;
+}
+_3b8=self.getElement(_3b8);
+if(_3b8==null){
+return null;
+}
+var _3ba=(_3b8.getElementsByTagName(_3b6)||self._document.all);
+if(_3ba.length<=0){
+return null;
+}else{
+if(typeof (_3b7)=="undefined"||_3b7===null){
+return _3ba[0];
+}
+}
+for(var i=0;i<_3ba.length;i++){
+var _3bc=_3ba[i];
+var cls=_3bc.className;
+if(typeof (cls)!="string"){
+cls=_3bc.getAttribute("class");
+}
+if(typeof (cls)=="string"){
+var _3be=cls.split(" ");
+for(var j=0;j<_3be.length;j++){
+if(_3be[j]==_3b7){
+return _3bc;
+}
+}
+}
+}
+return null;
+},getFirstParentByTagAndClassName:function(elem,_3c1,_3c2){
+var self=MochiKit.DOM;
+elem=self.getElement(elem);
+if(typeof (_3c1)=="undefined"||_3c1===null){
+_3c1="*";
+}else{
+_3c1=_3c1.toUpperCase();
+}
+if(typeof (_3c2)=="undefined"||_3c2===null){
+_3c2=null;
+}
+if(elem){
+elem=elem.parentNode;
+}
+while(elem&&elem.tagName){
+var _3c4=elem.tagName.toUpperCase();
+if((_3c1==="*"||_3c1==_3c4)&&(_3c2===null||self.hasElementClass(elem,_3c2))){
+return elem;
+}
+elem=elem.parentNode;
+}
+return null;
+},__new__:function(win){
+var m=MochiKit.Base;
+if(typeof (document)!="undefined"){
+this._document=document;
+var _3c7="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
+this._xhtml=(document.documentElement&&document.createElementNS&&document.documentElement.namespaceURI===_3c7);
+}else{
+if(MochiKit.MockDOM){
+this._document=MochiKit.MockDOM.document;
+}
+}
+this._window=win;
+this.domConverters=new m.AdapterRegistry();
+var _3c8=this._document.createElement("span");
+var _3c9;
+if(_3c8&&_3c8.attributes&&_3c8.attributes.length>0){
+var _3ca=m.filter;
+_3c9=function(node){
+return _3ca(_3c9.ignoreAttrFilter,node.attributes);
+};
+_3c9.ignoreAttr={};
+var _3cc=_3c8.attributes;
+var _3cd=_3c9.ignoreAttr;
+for(var i=0;i<_3cc.length;i++){
+var a=_3cc[i];
+_3cd[a.name]=a.value;
+}
+_3c9.ignoreAttrFilter=function(a){
+return (_3c9.ignoreAttr[a.name]!=a.value);
+};
+_3c9.compliant=false;
+_3c9.renames={"class":"className","checked":"defaultChecked","usemap":"useMap","for":"htmlFor","readonly":"readOnly","colspan":"colSpan","bgcolor":"bgColor","cellspacing":"cellSpacing","cellpadding":"cellPadding"};
+}else{
+_3c9=function(node){
+return node.attributes;
+};
+_3c9.compliant=true;
+_3c9.ignoreAttr={};
+_3c9.renames={};
+}
+this.attributeArray=_3c9;
+var _3d2=function(_3d3,arr){
+var _3d5=arr[0];
+var _3d6=arr[1];
+var _3d7=_3d6.split(".")[1];
+var str="";
+str+="if (!MochiKit."+_3d7+") { throw new Error(\"";
+str+="This function has been deprecated and depends on MochiKit.";
+str+=_3d7+".\");}";
+str+="return "+_3d6+".apply(this, arguments);";
+MochiKit[_3d3][_3d5]=new Function(str);
+};
+for(var i=0;i<MochiKit.DOM.DEPRECATED.length;i++){
+_3d2("DOM",MochiKit.DOM.DEPRECATED[i]);
+}
+var _3d9=this.createDOMFunc;
+this.UL=_3d9("ul");
+this.OL=_3d9("ol");
+this.LI=_3d9("li");
+this.DL=_3d9("dl");
+this.DT=_3d9("dt");
+this.DD=_3d9("dd");
+this.TD=_3d9("td");
+this.TR=_3d9("tr");
+this.TBODY=_3d9("tbody");
+this.THEAD=_3d9("thead");
+this.TFOOT=_3d9("tfoot");
+this.TABLE=_3d9("table");
+this.TH=_3d9("th");
+this.INPUT=_3d9("input");
+this.SPAN=_3d9("span");
+this.A=_3d9("a");
+this.DIV=_3d9("div");
+this.IMG=_3d9("img");
+this.BUTTON=_3d9("button");
+this.TT=_3d9("tt");
+this.PRE=_3d9("pre");
+this.H1=_3d9("h1");
+this.H2=_3d9("h2");
+this.H3=_3d9("h3");
+this.H4=_3d9("h4");
+this.H5=_3d9("h5");
+this.H6=_3d9("h6");
+this.BR=_3d9("br");
+this.HR=_3d9("hr");
+this.LABEL=_3d9("label");
+this.TEXTAREA=_3d9("textarea");
+this.FORM=_3d9("form");
+this.P=_3d9("p");
+this.SELECT=_3d9("select");
+this.OPTION=_3d9("option");
+this.OPTGROUP=_3d9("optgroup");
+this.LEGEND=_3d9("legend");
+this.FIELDSET=_3d9("fieldset");
+this.STRONG=_3d9("strong");
+this.CANVAS=_3d9("canvas");
+this.$=this.getElement;
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+}});
+MochiKit.DOM.__new__(((typeof (window)=="undefined")?this:window));
+if(MochiKit.__export__){
+withWindow=MochiKit.DOM.withWindow;
+withDocument=MochiKit.DOM.withDocument;
+}
+MochiKit.Base._exportSymbols(this,MochiKit.DOM);
+MochiKit.Base._deps("Selector",["Base","DOM","Iter"]);
+MochiKit.Selector.NAME="MochiKit.Selector";
+MochiKit.Selector.VERSION="1.4.2";
+MochiKit.Selector.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Selector.toString=function(){
+return this.__repr__();
+};
+MochiKit.Selector.EXPORT=["Selector","findChildElements","findDocElements","$$"];
+MochiKit.Selector.EXPORT_OK=[];
+MochiKit.Selector.Selector=function(_3da){
+this.params={classNames:[],pseudoClassNames:[]};
+this.expression=_3da.toString().replace(/(^\s+|\s+$)/g,"");
+this.parseExpression();
+this.compileMatcher();
+};
+MochiKit.Selector.Selector.prototype={__class__:MochiKit.Selector.Selector,parseExpression:function(){
+function abort(_3db){
+throw "Parse error in selector: "+_3db;
+}
+if(this.expression==""){
+abort("empty expression");
+}
+var repr=MochiKit.Base.repr;
+var _3dd=this.params;
+var expr=this.expression;
+var _3df,_3e0,_3e1,rest;
+while(_3df=expr.match(/^(.*)\[([a-z0-9_:-]+?)(?:([~\|!^$*]?=)(?:"([^"]*)"|([^\]\s]*)))?\]$/i)){
+_3dd.attributes=_3dd.attributes||[];
+_3dd.attributes.push({name:_3df[2],operator:_3df[3],value:_3df[4]||_3df[5]||""});
+expr=_3df[1];
+}
+if(expr=="*"){
+return this.params.wildcard=true;
+}
+while(_3df=expr.match(/^([^a-z0-9_-])?([a-z0-9_-]+(?:\([^)]*\))?)(.*)/i)){
+_3e0=_3df[1];
+_3e1=_3df[2];
+rest=_3df[3];
+switch(_3e0){
+case "#":
+_3dd.id=_3e1;
+break;
+case ".":
+_3dd.classNames.push(_3e1);
+break;
+case ":":
+_3dd.pseudoClassNames.push(_3e1);
+break;
+case "":
+case undefined:
+_3dd.tagName=_3e1.toUpperCase();
+break;
+default:
+abort(repr(expr));
+}
+expr=rest;
+}
+if(expr.length>0){
+abort(repr(expr));
+}
+},buildMatchExpression:function(){
+var repr=MochiKit.Base.repr;
+var _3e4=this.params;
+var _3e5=[];
+var _3e6,i;
+function childElements(_3e8){
+return "MochiKit.Base.filter(function (node) { return node.nodeType == 1; }, "+_3e8+".childNodes)";
+}
+if(_3e4.wildcard){
+_3e5.push("true");
+}
+if(_3e6=_3e4.id){
+_3e5.push("element.id == "+repr(_3e6));
+}
+if(_3e6=_3e4.tagName){
+_3e5.push("element.tagName.toUpperCase() == "+repr(_3e6));
+}
+if((_3e6=_3e4.classNames).length>0){
+for(i=0;i<_3e6.length;i++){
+_3e5.push("MochiKit.DOM.hasElementClass(element, "+repr(_3e6[i])+")");
+}
+}
+if((_3e6=_3e4.pseudoClassNames).length>0){
+for(i=0;i<_3e6.length;i++){
+var _3e9=_3e6[i].match(/^([^(]+)(?:\((.*)\))?$/);
+var _3ea=_3e9[1];
+var _3eb=_3e9[2];
+switch(_3ea){
+case "root":
+_3e5.push("element.nodeType == 9 || element === element.ownerDocument.documentElement");
+break;
+case "nth-child":
+case "nth-last-child":
+case "nth-of-type":
+case "nth-last-of-type":
+_3e9=_3eb.match(/^((?:(\d+)n\+)?(\d+)|odd|even)$/);
+if(!_3e9){
+throw "Invalid argument to pseudo element nth-child: "+_3eb;
+}
+var a,b;
+if(_3e9[0]=="odd"){
+a=2;
+b=1;
+}else{
+if(_3e9[0]=="even"){
+a=2;
+b=0;
+}else{
+a=_3e9[2]&&parseInt(_3e9)||null;
+b=parseInt(_3e9[3]);
+}
+}
+_3e5.push("this.nthChild(element,"+a+","+b+","+!!_3ea.match("^nth-last")+","+!!_3ea.match("of-type$")+")");
+break;
+case "first-child":
+_3e5.push("this.nthChild(element, null, 1)");
+break;
+case "last-child":
+_3e5.push("this.nthChild(element, null, 1, true)");
+break;
+case "first-of-type":
+_3e5.push("this.nthChild(element, null, 1, false, true)");
+break;
+case "last-of-type":
+_3e5.push("this.nthChild(element, null, 1, true, true)");
+break;
+case "only-child":
+_3e5.push(childElements("element.parentNode")+".length == 1");
+break;
+case "only-of-type":
+_3e5.push("MochiKit.Base.filter(function (node) { return node.tagName == element.tagName; }, "+childElements("element.parentNode")+").length == 1");
+break;
+case "empty":
+_3e5.push("element.childNodes.length == 0");
+break;
+case "enabled":
+_3e5.push("(this.isUIElement(element) && element.disabled === false)");
+break;
+case "disabled":
+_3e5.push("(this.isUIElement(element) && element.disabled === true)");
+break;
+case "checked":
+_3e5.push("(this.isUIElement(element) && element.checked === true)");
+break;
+case "not":
+var _3ee=new MochiKit.Selector.Selector(_3eb);
+_3e5.push("!( "+_3ee.buildMatchExpression()+")");
+break;
+}
+}
+}
+if(_3e6=_3e4.attributes){
+MochiKit.Base.map(function(_3ef){
+var _3f0="MochiKit.DOM.getNodeAttribute(element, "+repr(_3ef.name)+")";
+var _3f1=function(_3f2){
+return _3f0+".split("+repr(_3f2)+")";
+};
+_3e5.push(_3f0+" != null");
+switch(_3ef.operator){
+case "=":
+_3e5.push(_3f0+" == "+repr(_3ef.value));
+break;
+case "~=":
+_3e5.push("MochiKit.Base.findValue("+_3f1(" ")+", "+repr(_3ef.value)+") > -1");
+break;
+case "^=":
+_3e5.push(_3f0+".substring(0, "+_3ef.value.length+") == "+repr(_3ef.value));
+break;
+case "$=":
+_3e5.push(_3f0+".substring("+_3f0+".length - "+_3ef.value.length+") == "+repr(_3ef.value));
+break;
+case "*=":
+_3e5.push(_3f0+".match("+repr(_3ef.value)+")");
+break;
+case "|=":
+_3e5.push(_3f1("-")+"[0].toUpperCase() == "+repr(_3ef.value.toUpperCase()));
+break;
+case "!=":
+_3e5.push(_3f0+" != "+repr(_3ef.value));
+break;
+case "":
+case undefined:
+break;
+default:
+throw "Unknown operator "+_3ef.operator+" in selector";
+}
+},_3e6);
+}
+return _3e5.join(" && ");
+},compileMatcher:function(){
+var code="return (!element.tagName) ? false : "+this.buildMatchExpression()+";";
+this.match=new Function("element",code);
+},nthChild:function(_3f4,a,b,_3f7,_3f8){
+var _3f9=MochiKit.Base.filter(function(node){
+return node.nodeType==1;
+},_3f4.parentNode.childNodes);
+if(_3f8){
+_3f9=MochiKit.Base.filter(function(node){
+return node.tagName==_3f4.tagName;
+},_3f9);
+}
+if(_3f7){
+_3f9=MochiKit.Iter.reversed(_3f9);
+}
+if(a){
+var _3fc=MochiKit.Base.findIdentical(_3f9,_3f4);
+return ((_3fc+1-b)/a)%1==0;
+}else{
+return b==MochiKit.Base.findIdentical(_3f9,_3f4)+1;
+}
+},isUIElement:function(_3fd){
+return MochiKit.Base.findValue(["input","button","select","option","textarea","object"],_3fd.tagName.toLowerCase())>-1;
+},findElements:function(_3fe,axis){
+var _400;
+if(axis==undefined){
+axis="";
+}
+function inScope(_401,_402){
+if(axis==""){
+return MochiKit.DOM.isChildNode(_401,_402);
+}else{
+if(axis==">"){
+return _401.parentNode===_402;
+}else{
+if(axis=="+"){
+return _401===nextSiblingElement(_402);
+}else{
+if(axis=="~"){
+var _403=_402;
+while(_403=nextSiblingElement(_403)){
+if(_401===_403){
+return true;
+}
+}
+return false;
+}else{
+throw "Invalid axis: "+axis;
+}
+}
+}
+}
+}
+if(_400=MochiKit.DOM.getElement(this.params.id)){
+if(this.match(_400)){
+if(!_3fe||inScope(_400,_3fe)){
+return [_400];
+}
+}
+}
+function nextSiblingElement(node){
+node=node.nextSibling;
+while(node&&node.nodeType!=1){
+node=node.nextSibling;
+}
+return node;
+}
+if(axis==""){
+_3fe=(_3fe||MochiKit.DOM.currentDocument()).getElementsByTagName(this.params.tagName||"*");
+}else{
+if(axis==">"){
+if(!_3fe){
+throw "> combinator not allowed without preceeding expression";
+}
+_3fe=MochiKit.Base.filter(function(node){
+return node.nodeType==1;
+},_3fe.childNodes);
+}else{
+if(axis=="+"){
+if(!_3fe){
+throw "+ combinator not allowed without preceeding expression";
+}
+_3fe=nextSiblingElement(_3fe)&&[nextSiblingElement(_3fe)];
+}else{
+if(axis=="~"){
+if(!_3fe){
+throw "~ combinator not allowed without preceeding expression";
+}
+var _406=[];
+while(nextSiblingElement(_3fe)){
+_3fe=nextSiblingElement(_3fe);
+_406.push(_3fe);
+}
+_3fe=_406;
+}
+}
+}
+}
+if(!_3fe){
+return [];
+}
+var _407=MochiKit.Base.filter(MochiKit.Base.bind(function(_408){
+return this.match(_408);
+},this),_3fe);
+return _407;
+},repr:function(){
+return "Selector("+this.expression+")";
+},toString:MochiKit.Base.forwardCall("repr")};
+MochiKit.Base.update(MochiKit.Selector,{findChildElements:function(_409,_40a){
+var uniq=function(arr){
+var res=[];
+for(var i=0;i<arr.length;i++){
+if(MochiKit.Base.findIdentical(res,arr[i])<0){
+res.push(arr[i]);
+}
+}
+return res;
+};
+return MochiKit.Base.flattenArray(MochiKit.Base.map(function(_40f){
+var _410="";
+var _411=function(_412,expr){
+if(match=expr.match(/^[>+~]$/)){
+_410=match[0];
+return _412;
+}else{
+var _414=new MochiKit.Selector.Selector(expr);
+var _415=MochiKit.Iter.reduce(function(_416,_417){
+return MochiKit.Base.extend(_416,_414.findElements(_417||_409,_410));
+},_412,[]);
+_410="";
+return _415;
+}
+};
+var _418=_40f.replace(/(^\s+|\s+$)/g,"").split(/\s+/);
+return uniq(MochiKit.Iter.reduce(_411,_418,[null]));
+},_40a));
+},findDocElements:function(){
+return MochiKit.Selector.findChildElements(MochiKit.DOM.currentDocument(),arguments);
+},__new__:function(){
+var m=MochiKit.Base;
+this.$$=this.findDocElements;
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+}});
+MochiKit.Selector.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Selector);
+MochiKit.Base._deps("Style",["Base","DOM"]);
+MochiKit.Style.NAME="MochiKit.Style";
+MochiKit.Style.VERSION="1.4.2";
+MochiKit.Style.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Style.toString=function(){
+return this.__repr__();
+};
+MochiKit.Style.EXPORT_OK=[];
+MochiKit.Style.EXPORT=["setStyle","setOpacity","getStyle","getElementDimensions","elementDimensions","setElementDimensions","getElementPosition","elementPosition","setElementPosition","makePositioned","undoPositioned","makeClipping","undoClipping","setDisplayForElement","hideElement","showElement","getViewportDimensions","getViewportPosition","Dimensions","Coordinates"];
+MochiKit.Style.Dimensions=function(w,h){
+this.w=w;
+this.h=h;
+};
+MochiKit.Style.Dimensions.prototype.__repr__=function(){
+var repr=MochiKit.Base.repr;
+return "{w: "+repr(this.w)+", h: "+repr(this.h)+"}";
+};
+MochiKit.Style.Dimensions.prototype.toString=function(){
+return this.__repr__();
+};
+MochiKit.Style.Coordinates=function(x,y){
+this.x=x;
+this.y=y;
+};
+MochiKit.Style.Coordinates.prototype.__repr__=function(){
+var repr=MochiKit.Base.repr;
+return "{x: "+repr(this.x)+", y: "+repr(this.y)+"}";
+};
+MochiKit.Style.Coordinates.prototype.toString=function(){
+return this.__repr__();
+};
+MochiKit.Base.update(MochiKit.Style,{getStyle:function(elem,_421){
+var dom=MochiKit.DOM;
+var d=dom._document;
+elem=dom.getElement(elem);
+_421=MochiKit.Base.camelize(_421);
+if(!elem||elem==d){
+return undefined;
+}
+if(_421=="opacity"&&typeof (elem.filters)!="undefined"){
+var _424=(MochiKit.Style.getStyle(elem,"filter")||"").match(/alpha\(opacity=(.*)\)/);
+if(_424&&_424[1]){
+return parseFloat(_424[1])/100;
+}
+return 1;
+}
+if(_421=="float"||_421=="cssFloat"||_421=="styleFloat"){
+if(elem.style["float"]){
+return elem.style["float"];
+}else{
+if(elem.style.cssFloat){
+return elem.style.cssFloat;
+}else{
+if(elem.style.styleFloat){
+return elem.style.styleFloat;
+}else{
+return "none";
+}
+}
+}
+}
+var _425=elem.style?elem.style[_421]:null;
+if(!_425){
+if(d.defaultView&&d.defaultView.getComputedStyle){
+var css=d.defaultView.getComputedStyle(elem,null);
+_421=_421.replace(/([A-Z])/g,"-$1").toLowerCase();
+_425=css?css.getPropertyValue(_421):null;
+}else{
+if(elem.currentStyle){
+_425=elem.currentStyle[_421];
+if(/^\d/.test(_425)&&!/px$/.test(_425)&&_421!="fontWeight"){
+var left=elem.style.left;
+var _428=elem.runtimeStyle.left;
+elem.runtimeStyle.left=elem.currentStyle.left;
+elem.style.left=_425||0;
+_425=elem.style.pixelLeft+"px";
+elem.style.left=left;
+elem.runtimeStyle.left=_428;
+}
+}
+}
+}
+if(_421=="opacity"){
+_425=parseFloat(_425);
+}
+if(/Opera/.test(navigator.userAgent)&&(MochiKit.Base.findValue(["left","top","right","bottom"],_421)!=-1)){
+if(MochiKit.Style.getStyle(elem,"position")=="static"){
+_425="auto";
+}
+}
+return _425=="auto"?null:_425;
+},setStyle:function(elem,_42a){
+elem=MochiKit.DOM.getElement(elem);
+for(var name in _42a){
+switch(name){
+case "opacity":
+MochiKit.Style.setOpacity(elem,_42a[name]);
+break;
+case "float":
+case "cssFloat":
+case "styleFloat":
+if(typeof (elem.style["float"])!="undefined"){
+elem.style["float"]=_42a[name];
+}else{
+if(typeof (elem.style.cssFloat)!="undefined"){
+elem.style.cssFloat=_42a[name];
+}else{
+elem.style.styleFloat=_42a[name];
+}
+}
+break;
+default:
+elem.style[MochiKit.Base.camelize(name)]=_42a[name];
+}
+}
+},setOpacity:function(elem,o){
+elem=MochiKit.DOM.getElement(elem);
+var self=MochiKit.Style;
+if(o==1){
+var _42f=/Gecko/.test(navigator.userAgent)&&!(/Konqueror|AppleWebKit|KHTML/.test(navigator.userAgent));
+elem.style["opacity"]=_42f?0.999999:1;
+if(/MSIE/.test(navigator.userAgent)){
+elem.style["filter"]=self.getStyle(elem,"filter").replace(/alpha\([^\)]*\)/gi,"");
+}
+}else{
+if(o<0.00001){
+o=0;
+}
+elem.style["opacity"]=o;
+if(/MSIE/.test(navigator.userAgent)){
+elem.style["filter"]=self.getStyle(elem,"filter").replace(/alpha\([^\)]*\)/gi,"")+"alpha(opacity="+o*100+")";
+}
+}
+},getElementPosition:function(elem,_431){
+var self=MochiKit.Style;
+var dom=MochiKit.DOM;
+elem=dom.getElement(elem);
+if(!elem||(!(elem.x&&elem.y)&&(!elem.parentNode===null||self.getStyle(elem,"display")=="none"))){
+return undefined;
+}
+var c=new self.Coordinates(0,0);
+var box=null;
+var _436=null;
+var d=MochiKit.DOM._document;
+var de=d.documentElement;
+var b=d.body;
+if(!elem.parentNode&&elem.x&&elem.y){
+c.x+=elem.x||0;
+c.y+=elem.y||0;
+}else{
+if(elem.getBoundingClientRect){
+box=elem.getBoundingClientRect();
+c.x+=box.left+(de.scrollLeft||b.scrollLeft)-(de.clientLeft||0);
+c.y+=box.top+(de.scrollTop||b.scrollTop)-(de.clientTop||0);
+}else{
+if(elem.offsetParent){
+c.x+=elem.offsetLeft;
+c.y+=elem.offsetTop;
+_436=elem.offsetParent;
+if(_436!=elem){
+while(_436){
+c.x+=parseInt(_436.style.borderLeftWidth)||0;
+c.y+=parseInt(_436.style.borderTopWidth)||0;
+c.x+=_436.offsetLeft;
+c.y+=_436.offsetTop;
+_436=_436.offsetParent;
+}
+}
+var ua=navigator.userAgent.toLowerCase();
+if((typeof (opera)!="undefined"&&parseFloat(opera.version())<9)||(ua.indexOf("AppleWebKit")!=-1&&self.getStyle(elem,"position")=="absolute")){
+c.x-=b.offsetLeft;
+c.y-=b.offsetTop;
+}
+if(elem.parentNode){
+_436=elem.parentNode;
+}else{
+_436=null;
+}
+while(_436){
+var _43b=_436.tagName.toUpperCase();
+if(_43b==="BODY"||_43b==="HTML"){
+break;
+}
+var disp=self.getStyle(_436,"display");
+if(disp.search(/^inline|table-row.*$/i)){
+c.x-=_436.scrollLeft;
+c.y-=_436.scrollTop;
+}
+if(_436.parentNode){
+_436=_436.parentNode;
+}else{
+_436=null;
+}
+}
+}
+}
+}
+if(typeof (_431)!="undefined"){
+_431=arguments.callee(_431);
+if(_431){
+c.x-=(_431.x||0);
+c.y-=(_431.y||0);
+}
+}
+return c;
+},setElementPosition:function(elem,_43e,_43f){
+elem=MochiKit.DOM.getElement(elem);
+if(typeof (_43f)=="undefined"){
+_43f="px";
+}
+var _440={};
+var _441=MochiKit.Base.isUndefinedOrNull;
+if(!_441(_43e.x)){
+_440["left"]=_43e.x+_43f;
+}
+if(!_441(_43e.y)){
+_440["top"]=_43e.y+_43f;
+}
+MochiKit.DOM.updateNodeAttributes(elem,{"style":_440});
+},makePositioned:function(_442){
+_442=MochiKit.DOM.getElement(_442);
+var pos=MochiKit.Style.getStyle(_442,"position");
+if(pos=="static"||!pos){
+_442.style.position="relative";
+if(/Opera/.test(navigator.userAgent)){
+_442.style.top=0;
+_442.style.left=0;
+}
+}
+},undoPositioned:function(_444){
+_444=MochiKit.DOM.getElement(_444);
+if(_444.style.position=="relative"){
+_444.style.position=_444.style.top=_444.style.left=_444.style.bottom=_444.style.right="";
+}
+},makeClipping:function(_445){
+_445=MochiKit.DOM.getElement(_445);
+var s=_445.style;
+var _447={"overflow":s.overflow,"overflow-x":s.overflowX,"overflow-y":s.overflowY};
+if((MochiKit.Style.getStyle(_445,"overflow")||"visible")!="hidden"){
+_445.style.overflow="hidden";
+_445.style.overflowX="hidden";
+_445.style.overflowY="hidden";
+}
+return _447;
+},undoClipping:function(_448,_449){
+_448=MochiKit.DOM.getElement(_448);
+if(typeof (_449)=="string"){
+_448.style.overflow=_449;
+}else{
+if(_449!=null){
+_448.style.overflow=_449["overflow"];
+_448.style.overflowX=_449["overflow-x"];
+_448.style.overflowY=_449["overflow-y"];
+}
+}
+},getElementDimensions:function(elem,_44b){
+var self=MochiKit.Style;
+var dom=MochiKit.DOM;
+if(typeof (elem.w)=="number"||typeof (elem.h)=="number"){
+return new self.Dimensions(elem.w||0,elem.h||0);
+}
+elem=dom.getElement(elem);
+if(!elem){
+return undefined;
+}
+var disp=self.getStyle(elem,"display");
+if(disp=="none"||disp==""||typeof (disp)=="undefined"){
+var s=elem.style;
+var _450=s.visibility;
+var _451=s.position;
+var _452=s.display;
+s.visibility="hidden";
+s.position="absolute";
+s.display=self._getDefaultDisplay(elem);
+var _453=elem.offsetWidth;
+var _454=elem.offsetHeight;
+s.display=_452;
+s.position=_451;
+s.visibility=_450;
+}else{
+_453=elem.offsetWidth||0;
+_454=elem.offsetHeight||0;
+}
+if(_44b){
+var _455="colSpan" in elem&&"rowSpan" in elem;
+var _456=(_455&&elem.parentNode&&self.getStyle(elem.parentNode,"borderCollapse")=="collapse");
+if(_456){
+if(/MSIE/.test(navigator.userAgent)){
+var _457=elem.previousSibling?0.5:1;
+var _458=elem.nextSibling?0.5:1;
+}else{
+var _457=0.5;
+var _458=0.5;
+}
+}else{
+var _457=1;
+var _458=1;
+}
+_453-=Math.round((parseFloat(self.getStyle(elem,"paddingLeft"))||0)+(parseFloat(self.getStyle(elem,"paddingRight"))||0)+_457*(parseFloat(self.getStyle(elem,"borderLeftWidth"))||0)+_458*(parseFloat(self.getStyle(elem,"borderRightWidth"))||0));
+if(_455){
+if(/Gecko|Opera/.test(navigator.userAgent)&&!/Konqueror|AppleWebKit|KHTML/.test(navigator.userAgent)){
+var _459=0;
+}else{
+if(/MSIE/.test(navigator.userAgent)){
+var _459=1;
+}else{
+var _459=_456?0.5:1;
+}
+}
+}else{
+var _459=1;
+}
+_454-=Math.round((parseFloat(self.getStyle(elem,"paddingTop"))||0)+(parseFloat(self.getStyle(elem,"paddingBottom"))||0)+_459*((parseFloat(self.getStyle(elem,"borderTopWidth"))||0)+(parseFloat(self.getStyle(elem,"borderBottomWidth"))||0)));
+}
+return new self.Dimensions(_453,_454);
+},setElementDimensions:function(elem,_45b,_45c){
+elem=MochiKit.DOM.getElement(elem);
+if(typeof (_45c)=="undefined"){
+_45c="px";
+}
+var _45d={};
+var _45e=MochiKit.Base.isUndefinedOrNull;
+if(!_45e(_45b.w)){
+_45d["width"]=_45b.w+_45c;
+}
+if(!_45e(_45b.h)){
+_45d["height"]=_45b.h+_45c;
+}
+MochiKit.DOM.updateNodeAttributes(elem,{"style":_45d});
+},_getDefaultDisplay:function(elem){
+var self=MochiKit.Style;
+var dom=MochiKit.DOM;
+elem=dom.getElement(elem);
+if(!elem){
+return undefined;
+}
+var _462=elem.tagName.toUpperCase();
+return self._defaultDisplay[_462]||"block";
+},setDisplayForElement:function(_463,_464){
+var _465=MochiKit.Base.extend(null,arguments,1);
+var _466=MochiKit.DOM.getElement;
+for(var i=0;i<_465.length;i++){
+_464=_466(_465[i]);
+if(_464){
+_464.style.display=_463;
+}
+}
+},getViewportDimensions:function(){
+var d=new MochiKit.Style.Dimensions();
+var w=MochiKit.DOM._window;
+var b=MochiKit.DOM._document.body;
+if(w.innerWidth){
+d.w=w.innerWidth;
+d.h=w.innerHeight;
+}else{
+if(b&&b.parentElement&&b.parentElement.clientWidth){
+d.w=b.parentElement.clientWidth;
+d.h=b.parentElement.clientHeight;
+}else{
+if(b&&b.clientWidth){
+d.w=b.clientWidth;
+d.h=b.clientHeight;
+}
+}
+}
+return d;
+},getViewportPosition:function(){
+var c=new MochiKit.Style.Coordinates(0,0);
+var d=MochiKit.DOM._document;
+var de=d.documentElement;
+var db=d.body;
+if(de&&(de.scrollTop||de.scrollLeft)){
+c.x=de.scrollLeft;
+c.y=de.scrollTop;
+}else{
+if(db){
+c.x=db.scrollLeft;
+c.y=db.scrollTop;
+}
+}
+return c;
+},__new__:function(){
+var m=MochiKit.Base;
+var _470=["A","ABBR","ACRONYM","B","BASEFONT","BDO","BIG","BR","CITE","CODE","DFN","EM","FONT","I","IMG","KBD","LABEL","Q","S","SAMP","SMALL","SPAN","STRIKE","STRONG","SUB","SUP","TEXTAREA","TT","U","VAR"];
+this._defaultDisplay={"TABLE":"table","THEAD":"table-header-group","TBODY":"table-row-group","TFOOT":"table-footer-group","COLGROUP":"table-column-group","COL":"table-column","TR":"table-row","TD":"table-cell","TH":"table-cell","CAPTION":"table-caption","LI":"list-item","INPUT":"inline-block","SELECT":"inline-block"};
+if(/MSIE/.test(navigator.userAgent)){
+for(var k in this._defaultDisplay){
+var v=this._defaultDisplay[k];
+if(v.indexOf("table")==0){
+this._defaultDisplay[k]="block";
+}
+}
+}
+for(var i=0;i<_470.length;i++){
+this._defaultDisplay[_470[i]]="inline";
+}
+this.elementPosition=this.getElementPosition;
+this.elementDimensions=this.getElementDimensions;
+this.hideElement=m.partial(this.setDisplayForElement,"none");
+this.showElement=m.partial(this.setDisplayForElement,"block");
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+}});
+MochiKit.Style.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Style);
+MochiKit.Base._deps("LoggingPane",["Base","Logging"]);
+MochiKit.LoggingPane.NAME="MochiKit.LoggingPane";
+MochiKit.LoggingPane.VERSION="1.4.2";
+MochiKit.LoggingPane.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.LoggingPane.toString=function(){
+return this.__repr__();
+};
+MochiKit.LoggingPane.createLoggingPane=function(_474){
+var m=MochiKit.LoggingPane;
+_474=!(!_474);
+if(m._loggingPane&&m._loggingPane.inline!=_474){
+m._loggingPane.closePane();
+m._loggingPane=null;
+}
+if(!m._loggingPane||m._loggingPane.closed){
+m._loggingPane=new m.LoggingPane(_474,MochiKit.Logging.logger);
+}
+return m._loggingPane;
+};
+MochiKit.LoggingPane.LoggingPane=function(_476,_477){
+if(typeof (_477)=="undefined"||_477===null){
+_477=MochiKit.Logging.logger;
+}
+this.logger=_477;
+var _478=MochiKit.Base.update;
+var _479=MochiKit.Base.updatetree;
+var bind=MochiKit.Base.bind;
+var _47b=MochiKit.Base.clone;
+var win=window;
+var uid="_MochiKit_LoggingPane";
+if(typeof (MochiKit.DOM)!="undefined"){
+win=MochiKit.DOM.currentWindow();
+}
+if(!_476){
+var url=win.location.href.split("?")[0].replace(/[#:\/.><&%-]/g,"_");
+var name=uid+"_"+url;
+var nwin=win.open("",name,"dependent,resizable,height=200");
+if(!nwin){
+alert("Not able to open debugging window due to pop-up blocking.");
+return undefined;
+}
+nwin.document.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\" "+"\"http://www.w3.org/TR/html4/loose.dtd\">"+"<html><head><title>[MochiKit.LoggingPane]</title></head>"+"<body></body></html>");
+nwin.document.close();
+nwin.document.title+=" "+win.document.title;
+win=nwin;
+}
+var doc=win.document;
+this.doc=doc;
+var _482=doc.getElementById(uid);
+var _483=!!_482;
+if(_482&&typeof (_482.loggingPane)!="undefined"){
+_482.loggingPane.logger=this.logger;
+_482.loggingPane.buildAndApplyFilter();
+return _482.loggingPane;
+}
+if(_483){
+var _484;
+while((_484=_482.firstChild)){
+_482.removeChild(_484);
+}
+}else{
+_482=doc.createElement("div");
+_482.id=uid;
+}
+_482.loggingPane=this;
+var _485=doc.createElement("input");
+var _486=doc.createElement("input");
+var _487=doc.createElement("button");
+var _488=doc.createElement("button");
+var _489=doc.createElement("button");
+var _48a=doc.createElement("button");
+var _48b=doc.createElement("div");
+var _48c=doc.createElement("div");
+var _48d=uid+"_Listener";
+this.colorTable=_47b(this.colorTable);
+var _48e=[];
+var _48f=null;
+var _490=function(msg){
+var _492=msg.level;
+if(typeof (_492)=="number"){
+_492=MochiKit.Logging.LogLevel[_492];
+}
+return _492;
+};
+var _493=function(msg){
+return msg.info.join(" ");
+};
+var _495=bind(function(msg){
+var _497=_490(msg);
+var text=_493(msg);
+var c=this.colorTable[_497];
+var p=doc.createElement("span");
+p.className="MochiKit-LogMessage MochiKit-LogLevel-"+_497;
+p.style.cssText="margin: 0px; white-space: -moz-pre-wrap; white-space: -o-pre-wrap; white-space: pre-wrap; white-space: pre-line; word-wrap: break-word; wrap-option: emergency; color: "+c;
+p.appendChild(doc.createTextNode(_497+": "+text));
+_48c.appendChild(p);
+_48c.appendChild(doc.createElement("br"));
+if(_48b.offsetHeight>_48b.scrollHeight){
+_48b.scrollTop=0;
+}else{
+_48b.scrollTop=_48b.scrollHeight;
+}
+},this);
+var _49b=function(msg){
+_48e[_48e.length]=msg;
+_495(msg);
+};
+var _49d=function(){
+var _49e,_49f;
+try{
+_49e=new RegExp(_485.value);
+_49f=new RegExp(_486.value);
+}
+catch(e){
+logDebug("Error in filter regex: "+e.message);
+return null;
+}
+return function(msg){
+return (_49e.test(_490(msg))&&_49f.test(_493(msg)));
+};
+};
+var _4a1=function(){
+while(_48c.firstChild){
+_48c.removeChild(_48c.firstChild);
+}
+};
+var _4a2=function(){
+_48e=[];
+_4a1();
+};
+var _4a3=bind(function(){
+if(this.closed){
+return;
+}
+this.closed=true;
+if(MochiKit.LoggingPane._loggingPane==this){
+MochiKit.LoggingPane._loggingPane=null;
+}
+this.logger.removeListener(_48d);
+try{
+try{
+_482.loggingPane=null;
+}
+catch(e){
+logFatal("Bookmarklet was closed incorrectly.");
+}
+if(_476){
+_482.parentNode.removeChild(_482);
+}else{
+this.win.close();
+}
+}
+catch(e){
+}
+},this);
+var _4a4=function(){
+_4a1();
+for(var i=0;i<_48e.length;i++){
+var msg=_48e[i];
+if(_48f===null||_48f(msg)){
+_495(msg);
+}
+}
+};
+this.buildAndApplyFilter=function(){
+_48f=_49d();
+_4a4();
+this.logger.removeListener(_48d);
+this.logger.addListener(_48d,_48f,_49b);
+};
+var _4a7=bind(function(){
+_48e=this.logger.getMessages();
+_4a4();
+},this);
+var _4a8=bind(function(_4a9){
+_4a9=_4a9||window.event;
+key=_4a9.which||_4a9.keyCode;
+if(key==13){
+this.buildAndApplyFilter();
+}
+},this);
+var _4aa="display: block; z-index: 1000; left: 0px; bottom: 0px; position: fixed; width: 100%; background-color: white; font: "+this.logFont;
+if(_476){
+_4aa+="; height: 10em; border-top: 2px solid black";
+}else{
+_4aa+="; height: 100%;";
+}
+_482.style.cssText=_4aa;
+if(!_483){
+doc.body.appendChild(_482);
+}
+_4aa={"cssText":"width: 33%; display: inline; font: "+this.logFont};
+_479(_485,{"value":"FATAL|ERROR|WARNING|INFO|DEBUG","onkeypress":_4a8,"style":_4aa});
+_482.appendChild(_485);
+_479(_486,{"value":".*","onkeypress":_4a8,"style":_4aa});
+_482.appendChild(_486);
+_4aa="width: 8%; display:inline; font: "+this.logFont;
+_487.appendChild(doc.createTextNode("Filter"));
+_487.onclick=bind("buildAndApplyFilter",this);
+_487.style.cssText=_4aa;
+_482.appendChild(_487);
+_488.appendChild(doc.createTextNode("Load"));
+_488.onclick=_4a7;
+_488.style.cssText=_4aa;
+_482.appendChild(_488);
+_489.appendChild(doc.createTextNode("Clear"));
+_489.onclick=_4a2;
+_489.style.cssText=_4aa;
+_482.appendChild(_489);
+_48a.appendChild(doc.createTextNode("Close"));
+_48a.onclick=_4a3;
+_48a.style.cssText=_4aa;
+_482.appendChild(_48a);
+_48b.style.cssText="overflow: auto; width: 100%";
+_48c.style.cssText="width: 100%; height: "+(_476?"8em":"100%");
+_48b.appendChild(_48c);
+_482.appendChild(_48b);
+this.buildAndApplyFilter();
+_4a7();
+if(_476){
+this.win=undefined;
+}else{
+this.win=win;
+}
+this.inline=_476;
+this.closePane=_4a3;
+this.closed=false;
+return this;
+};
+MochiKit.LoggingPane.LoggingPane.prototype={"logFont":"8pt Verdana,sans-serif","colorTable":{"ERROR":"red","FATAL":"darkred","WARNING":"blue","INFO":"black","DEBUG":"green"}};
+MochiKit.LoggingPane.EXPORT_OK=["LoggingPane"];
+MochiKit.LoggingPane.EXPORT=["createLoggingPane"];
+MochiKit.LoggingPane.__new__=function(){
+this.EXPORT_TAGS={":common":this.EXPORT,":all":MochiKit.Base.concat(this.EXPORT,this.EXPORT_OK)};
+MochiKit.Base.nameFunctions(this);
+MochiKit.LoggingPane._loggingPane=null;
+};
+MochiKit.LoggingPane.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.LoggingPane);
+MochiKit.Base._deps("Color",["Base","DOM","Style"]);
+MochiKit.Color.NAME="MochiKit.Color";
+MochiKit.Color.VERSION="1.4.2";
+MochiKit.Color.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Color.toString=function(){
+return this.__repr__();
+};
+MochiKit.Color.Color=function(red,_4ac,blue,_4ae){
+if(typeof (_4ae)=="undefined"||_4ae===null){
+_4ae=1;
+}
+this.rgb={r:red,g:_4ac,b:blue,a:_4ae};
+};
+MochiKit.Color.Color.prototype={__class__:MochiKit.Color.Color,colorWithAlpha:function(_4af){
+var rgb=this.rgb;
+var m=MochiKit.Color;
+return m.Color.fromRGB(rgb.r,rgb.g,rgb.b,_4af);
+},colorWithHue:function(hue){
+var hsl=this.asHSL();
+hsl.h=hue;
+var m=MochiKit.Color;
+return m.Color.fromHSL(hsl);
+},colorWithSaturation:function(_4b5){
+var hsl=this.asHSL();
+hsl.s=_4b5;
+var m=MochiKit.Color;
+return m.Color.fromHSL(hsl);
+},colorWithLightness:function(_4b8){
+var hsl=this.asHSL();
+hsl.l=_4b8;
+var m=MochiKit.Color;
+return m.Color.fromHSL(hsl);
+},darkerColorWithLevel:function(_4bb){
+var hsl=this.asHSL();
+hsl.l=Math.max(hsl.l-_4bb,0);
+var m=MochiKit.Color;
+return m.Color.fromHSL(hsl);
+},lighterColorWithLevel:function(_4be){
+var hsl=this.asHSL();
+hsl.l=Math.min(hsl.l+_4be,1);
+var m=MochiKit.Color;
+return m.Color.fromHSL(hsl);
+},blendedColor:function(_4c1,_4c2){
+if(typeof (_4c2)=="undefined"||_4c2===null){
+_4c2=0.5;
+}
+var sf=1-_4c2;
+var s=this.rgb;
+var d=_4c1.rgb;
+var df=_4c2;
+return MochiKit.Color.Color.fromRGB((s.r*sf)+(d.r*df),(s.g*sf)+(d.g*df),(s.b*sf)+(d.b*df),(s.a*sf)+(d.a*df));
+},compareRGB:function(_4c7){
+var a=this.asRGB();
+var b=_4c7.asRGB();
+return MochiKit.Base.compare([a.r,a.g,a.b,a.a],[b.r,b.g,b.b,b.a]);
+},isLight:function(){
+return this.asHSL().b>0.5;
+},isDark:function(){
+return (!this.isLight());
+},toHSLString:function(){
+var c=this.asHSL();
+var ccc=MochiKit.Color.clampColorComponent;
+var rval=this._hslString;
+if(!rval){
+var mid=(ccc(c.h,360).toFixed(0)+","+ccc(c.s,100).toPrecision(4)+"%"+","+ccc(c.l,100).toPrecision(4)+"%");
+var a=c.a;
+if(a>=1){
+a=1;
+rval="hsl("+mid+")";
+}else{
+if(a<=0){
+a=0;
+}
+rval="hsla("+mid+","+a+")";
+}
+this._hslString=rval;
+}
+return rval;
+},toRGBString:function(){
+var c=this.rgb;
+var ccc=MochiKit.Color.clampColorComponent;
+var rval=this._rgbString;
+if(!rval){
+var mid=(ccc(c.r,255).toFixed(0)+","+ccc(c.g,255).toFixed(0)+","+ccc(c.b,255).toFixed(0));
+if(c.a!=1){
+rval="rgba("+mid+","+c.a+")";
+}else{
+rval="rgb("+mid+")";
+}
+this._rgbString=rval;
+}
+return rval;
+},asRGB:function(){
+return MochiKit.Base.clone(this.rgb);
+},toHexString:function(){
+var m=MochiKit.Color;
+var c=this.rgb;
+var ccc=MochiKit.Color.clampColorComponent;
+var rval=this._hexString;
+if(!rval){
+rval=("#"+m.toColorPart(ccc(c.r,255))+m.toColorPart(ccc(c.g,255))+m.toColorPart(ccc(c.b,255)));
+this._hexString=rval;
+}
+return rval;
+},asHSV:function(){
+var hsv=this.hsv;
+var c=this.rgb;
+if(typeof (hsv)=="undefined"||hsv===null){
+hsv=MochiKit.Color.rgbToHSV(this.rgb);
+this.hsv=hsv;
+}
+return MochiKit.Base.clone(hsv);
+},asHSL:function(){
+var hsl=this.hsl;
+var c=this.rgb;
+if(typeof (hsl)=="undefined"||hsl===null){
+hsl=MochiKit.Color.rgbToHSL(this.rgb);
+this.hsl=hsl;
+}
+return MochiKit.Base.clone(hsl);
+},toString:function(){
+return this.toRGBString();
+},repr:function(){
+var c=this.rgb;
+var col=[c.r,c.g,c.b,c.a];
+return this.__class__.NAME+"("+col.join(", ")+")";
+}};
+MochiKit.Base.update(MochiKit.Color.Color,{fromRGB:function(red,_4de,blue,_4e0){
+var _4e1=MochiKit.Color.Color;
+if(arguments.length==1){
+var rgb=red;
+red=rgb.r;
+_4de=rgb.g;
+blue=rgb.b;
+if(typeof (rgb.a)=="undefined"){
+_4e0=undefined;
+}else{
+_4e0=rgb.a;
+}
+}
+return new _4e1(red,_4de,blue,_4e0);
+},fromHSL:function(hue,_4e4,_4e5,_4e6){
+var m=MochiKit.Color;
+return m.Color.fromRGB(m.hslToRGB.apply(m,arguments));
+},fromHSV:function(hue,_4e9,_4ea,_4eb){
+var m=MochiKit.Color;
+return m.Color.fromRGB(m.hsvToRGB.apply(m,arguments));
+},fromName:function(name){
+var _4ee=MochiKit.Color.Color;
+if(name.charAt(0)=="\""){
+name=name.substr(1,name.length-2);
+}
+var _4ef=_4ee._namedColors[name.toLowerCase()];
+if(typeof (_4ef)=="string"){
+return _4ee.fromHexString(_4ef);
+}else{
+if(name=="transparent"){
+return _4ee.transparentColor();
+}
+}
+return null;
+},fromString:function(_4f0){
+var self=MochiKit.Color.Color;
+var _4f2=_4f0.substr(0,3);
+if(_4f2=="rgb"){
+return self.fromRGBString(_4f0);
+}else{
+if(_4f2=="hsl"){
+return self.fromHSLString(_4f0);
+}else{
+if(_4f0.charAt(0)=="#"){
+return self.fromHexString(_4f0);
+}
+}
+}
+return self.fromName(_4f0);
+},fromHexString:function(_4f3){
+if(_4f3.charAt(0)=="#"){
+_4f3=_4f3.substring(1);
+}
+var _4f4=[];
+var i,hex;
+if(_4f3.length==3){
+for(i=0;i<3;i++){
+hex=_4f3.substr(i,1);
+_4f4.push(parseInt(hex+hex,16)/255);
+}
+}else{
+for(i=0;i<6;i+=2){
+hex=_4f3.substr(i,2);
+_4f4.push(parseInt(hex,16)/255);
+}
+}
+var _4f7=MochiKit.Color.Color;
+return _4f7.fromRGB.apply(_4f7,_4f4);
+},_fromColorString:function(pre,_4f9,_4fa,_4fb){
+if(_4fb.indexOf(pre)===0){
+_4fb=_4fb.substring(_4fb.indexOf("(",3)+1,_4fb.length-1);
+}
+var _4fc=_4fb.split(/\s*,\s*/);
+var _4fd=[];
+for(var i=0;i<_4fc.length;i++){
+var c=_4fc[i];
+var val;
+var _501=c.substring(c.length-3);
+if(c.charAt(c.length-1)=="%"){
+val=0.01*parseFloat(c.substring(0,c.length-1));
+}else{
+if(_501=="deg"){
+val=parseFloat(c)/360;
+}else{
+if(_501=="rad"){
+val=parseFloat(c)/(Math.PI*2);
+}else{
+val=_4fa[i]*parseFloat(c);
+}
+}
+}
+_4fd.push(val);
+}
+return this[_4f9].apply(this,_4fd);
+},fromComputedStyle:function(elem,_503){
+var d=MochiKit.DOM;
+var cls=MochiKit.Color.Color;
+for(elem=d.getElement(elem);elem;elem=elem.parentNode){
+var _506=MochiKit.Style.getStyle.apply(d,arguments);
+if(!_506){
+continue;
+}
+var _507=cls.fromString(_506);
+if(!_507){
+break;
+}
+if(_507.asRGB().a>0){
+return _507;
+}
+}
+return null;
+},fromBackground:function(elem){
+var cls=MochiKit.Color.Color;
+return cls.fromComputedStyle(elem,"backgroundColor","background-color")||cls.whiteColor();
+},fromText:function(elem){
+var cls=MochiKit.Color.Color;
+return cls.fromComputedStyle(elem,"color","color")||cls.blackColor();
+},namedColors:function(){
+return MochiKit.Base.clone(MochiKit.Color.Color._namedColors);
+}});
+MochiKit.Base.update(MochiKit.Color,{clampColorComponent:function(v,_50d){
+v*=_50d;
+if(v<0){
+return 0;
+}else{
+if(v>_50d){
+return _50d;
+}else{
+return v;
+}
+}
+},_hslValue:function(n1,n2,hue){
+if(hue>6){
+hue-=6;
+}else{
+if(hue<0){
+hue+=6;
+}
+}
+var val;
+if(hue<1){
+val=n1+(n2-n1)*hue;
+}else{
+if(hue<3){
+val=n2;
+}else{
+if(hue<4){
+val=n1+(n2-n1)*(4-hue);
+}else{
+val=n1;
+}
+}
+}
+return val;
+},hsvToRGB:function(hue,_513,_514,_515){
+if(arguments.length==1){
+var hsv=hue;
+hue=hsv.h;
+_513=hsv.s;
+_514=hsv.v;
+_515=hsv.a;
+}
+var red;
+var _518;
+var blue;
+if(_513===0){
+red=_514;
+_518=_514;
+blue=_514;
+}else{
+var i=Math.floor(hue*6);
+var f=(hue*6)-i;
+var p=_514*(1-_513);
+var q=_514*(1-(_513*f));
+var t=_514*(1-(_513*(1-f)));
+switch(i){
+case 1:
+red=q;
+_518=_514;
+blue=p;
+break;
+case 2:
+red=p;
+_518=_514;
+blue=t;
+break;
+case 3:
+red=p;
+_518=q;
+blue=_514;
+break;
+case 4:
+red=t;
+_518=p;
+blue=_514;
+break;
+case 5:
+red=_514;
+_518=p;
+blue=q;
+break;
+case 6:
+case 0:
+red=_514;
+_518=t;
+blue=p;
+break;
+}
+}
+return {r:red,g:_518,b:blue,a:_515};
+},hslToRGB:function(hue,_520,_521,_522){
+if(arguments.length==1){
+var hsl=hue;
+hue=hsl.h;
+_520=hsl.s;
+_521=hsl.l;
+_522=hsl.a;
+}
+var red;
+var _525;
+var blue;
+if(_520===0){
+red=_521;
+_525=_521;
+blue=_521;
+}else{
+var m2;
+if(_521<=0.5){
+m2=_521*(1+_520);
+}else{
+m2=_521+_520-(_521*_520);
+}
+var m1=(2*_521)-m2;
+var f=MochiKit.Color._hslValue;
+var h6=hue*6;
+red=f(m1,m2,h6+2);
+_525=f(m1,m2,h6);
+blue=f(m1,m2,h6-2);
+}
+return {r:red,g:_525,b:blue,a:_522};
+},rgbToHSV:function(red,_52c,blue,_52e){
+if(arguments.length==1){
+var rgb=red;
+red=rgb.r;
+_52c=rgb.g;
+blue=rgb.b;
+_52e=rgb.a;
+}
+var max=Math.max(Math.max(red,_52c),blue);
+var min=Math.min(Math.min(red,_52c),blue);
+var hue;
+var _533;
+var _534=max;
+if(min==max){
+hue=0;
+_533=0;
+}else{
+var _535=(max-min);
+_533=_535/max;
+if(red==max){
+hue=(_52c-blue)/_535;
+}else{
+if(_52c==max){
+hue=2+((blue-red)/_535);
+}else{
+hue=4+((red-_52c)/_535);
+}
+}
+hue/=6;
+if(hue<0){
+hue+=1;
+}
+if(hue>1){
+hue-=1;
+}
+}
+return {h:hue,s:_533,v:_534,a:_52e};
+},rgbToHSL:function(red,_537,blue,_539){
+if(arguments.length==1){
+var rgb=red;
+red=rgb.r;
+_537=rgb.g;
+blue=rgb.b;
+_539=rgb.a;
+}
+var max=Math.max(red,Math.max(_537,blue));
+var min=Math.min(red,Math.min(_537,blue));
+var hue;
+var _53e;
+var _53f=(max+min)/2;
+var _540=max-min;
+if(_540===0){
+hue=0;
+_53e=0;
+}else{
+if(_53f<=0.5){
+_53e=_540/(max+min);
+}else{
+_53e=_540/(2-max-min);
+}
+if(red==max){
+hue=(_537-blue)/_540;
+}else{
+if(_537==max){
+hue=2+((blue-red)/_540);
+}else{
+hue=4+((red-_537)/_540);
+}
+}
+hue/=6;
+if(hue<0){
+hue+=1;
+}
+if(hue>1){
+hue-=1;
+}
+}
+return {h:hue,s:_53e,l:_53f,a:_539};
+},toColorPart:function(num){
+num=Math.round(num);
+var _542=num.toString(16);
+if(num<16){
+return "0"+_542;
+}
+return _542;
+},__new__:function(){
+var m=MochiKit.Base;
+this.Color.fromRGBString=m.bind(this.Color._fromColorString,this.Color,"rgb","fromRGB",[1/255,1/255,1/255,1]);
+this.Color.fromHSLString=m.bind(this.Color._fromColorString,this.Color,"hsl","fromHSL",[1/360,0.01,0.01,1]);
+var _544=1/3;
+var _545={black:[0,0,0],blue:[0,0,1],brown:[0.6,0.4,0.2],cyan:[0,1,1],darkGray:[_544,_544,_544],gray:[0.5,0.5,0.5],green:[0,1,0],lightGray:[2*_544,2*_544,2*_544],magenta:[1,0,1],orange:[1,0.5,0],purple:[0.5,0,0.5],red:[1,0,0],transparent:[0,0,0,0],white:[1,1,1],yellow:[1,1,0]};
+var _546=function(name,r,g,b,a){
+var rval=this.fromRGB(r,g,b,a);
+this[name]=function(){
+return rval;
+};
+return rval;
+};
+for(var k in _545){
+var name=k+"Color";
+var _54f=m.concat([_546,this.Color,name],_545[k]);
+this.Color[name]=m.bind.apply(null,_54f);
+}
+var _550=function(){
+for(var i=0;i<arguments.length;i++){
+if(!(arguments[i] instanceof MochiKit.Color.Color)){
+return false;
+}
+}
+return true;
+};
+var _552=function(a,b){
+return a.compareRGB(b);
+};
+m.nameFunctions(this);
+m.registerComparator(this.Color.NAME,_550,_552);
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+}});
+MochiKit.Color.EXPORT=["Color"];
+MochiKit.Color.EXPORT_OK=["clampColorComponent","rgbToHSL","hslToRGB","rgbToHSV","hsvToRGB","toColorPart"];
+MochiKit.Color.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Color);
+MochiKit.Color.Color._namedColors={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400",darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32"};
+MochiKit.Base._deps("Signal",["Base","DOM","Style"]);
+MochiKit.Signal.NAME="MochiKit.Signal";
+MochiKit.Signal.VERSION="1.4.2";
+MochiKit.Signal._observers=[];
+MochiKit.Signal.Event=function(src,e){
+this._event=e||window.event;
+this._src=src;
+};
+MochiKit.Base.update(MochiKit.Signal.Event.prototype,{__repr__:function(){
+var repr=MochiKit.Base.repr;
+var str="{event(): "+repr(this.event())+", src(): "+repr(this.src())+", type(): "+repr(this.type())+", target(): "+repr(this.target());
+if(this.type()&&this.type().indexOf("key")===0||this.type().indexOf("mouse")===0||this.type().indexOf("click")!=-1||this.type()=="contextmenu"){
+str+=", modifier(): "+"{alt: "+repr(this.modifier().alt)+", ctrl: "+repr(this.modifier().ctrl)+", meta: "+repr(this.modifier().meta)+", shift: "+repr(this.modifier().shift)+", any: "+repr(this.modifier().any)+"}";
+}
+if(this.type()&&this.type().indexOf("key")===0){
+str+=", key(): {code: "+repr(this.key().code)+", string: "+repr(this.key().string)+"}";
+}
+if(this.type()&&(this.type().indexOf("mouse")===0||this.type().indexOf("click")!=-1||this.type()=="contextmenu")){
+str+=", mouse(): {page: "+repr(this.mouse().page)+", client: "+repr(this.mouse().client);
+if(this.type()!="mousemove"&&this.type()!="mousewheel"){
+str+=", button: {left: "+repr(this.mouse().button.left)+", middle: "+repr(this.mouse().button.middle)+", right: "+repr(this.mouse().button.right)+"}";
+}
+if(this.type()=="mousewheel"){
+str+=", wheel: "+repr(this.mouse().wheel);
+}
+str+="}";
+}
+if(this.type()=="mouseover"||this.type()=="mouseout"||this.type()=="mouseenter"||this.type()=="mouseleave"){
+str+=", relatedTarget(): "+repr(this.relatedTarget());
+}
+str+="}";
+return str;
+},toString:function(){
+return this.__repr__();
+},src:function(){
+return this._src;
+},event:function(){
+return this._event;
+},type:function(){
+if(this._event.type==="DOMMouseScroll"){
+return "mousewheel";
+}else{
+return this._event.type||undefined;
+}
+},target:function(){
+return this._event.target||this._event.srcElement;
+},_relatedTarget:null,relatedTarget:function(){
+if(this._relatedTarget!==null){
+return this._relatedTarget;
+}
+var elem=null;
+if(this.type()=="mouseover"||this.type()=="mouseenter"){
+elem=(this._event.relatedTarget||this._event.fromElement);
+}else{
+if(this.type()=="mouseout"||this.type()=="mouseleave"){
+elem=(this._event.relatedTarget||this._event.toElement);
+}
+}
+try{
+if(elem!==null&&elem.nodeType!==null){
+this._relatedTarget=elem;
+return elem;
+}
+}
+catch(ignore){
+}
+return undefined;
+},_modifier:null,modifier:function(){
+if(this._modifier!==null){
+return this._modifier;
+}
+var m={};
+m.alt=this._event.altKey;
+m.ctrl=this._event.ctrlKey;
+m.meta=this._event.metaKey||false;
+m.shift=this._event.shiftKey;
+m.any=m.alt||m.ctrl||m.shift||m.meta;
+this._modifier=m;
+return m;
+},_key:null,key:function(){
+if(this._key!==null){
+return this._key;
+}
+var k={};
+if(this.type()&&this.type().indexOf("key")===0){
+if(this.type()=="keydown"||this.type()=="keyup"){
+k.code=this._event.keyCode;
+k.string=(MochiKit.Signal._specialKeys[k.code]||"KEY_UNKNOWN");
+this._key=k;
+return k;
+}else{
+if(this.type()=="keypress"){
+k.code=0;
+k.string="";
+if(typeof (this._event.charCode)!="undefined"&&this._event.charCode!==0&&!MochiKit.Signal._specialMacKeys[this._event.charCode]){
+k.code=this._event.charCode;
+k.string=String.fromCharCode(k.code);
+}else{
+if(this._event.keyCode&&typeof (this._event.charCode)=="undefined"){
+k.code=this._event.keyCode;
+k.string=String.fromCharCode(k.code);
+}
+}
+this._key=k;
+return k;
+}
+}
+}
+return undefined;
+},_mouse:null,mouse:function(){
+if(this._mouse!==null){
+return this._mouse;
+}
+var m={};
+var e=this._event;
+if(this.type()&&(this.type().indexOf("mouse")===0||this.type().indexOf("click")!=-1||this.type()=="contextmenu")){
+m.client=new MochiKit.Style.Coordinates(0,0);
+if(e.clientX||e.clientY){
+m.client.x=(!e.clientX||e.clientX<0)?0:e.clientX;
+m.client.y=(!e.clientY||e.clientY<0)?0:e.clientY;
+}
+m.page=new MochiKit.Style.Coordinates(0,0);
+if(e.pageX||e.pageY){
+m.page.x=(!e.pageX||e.pageX<0)?0:e.pageX;
+m.page.y=(!e.pageY||e.pageY<0)?0:e.pageY;
+}else{
+var de=MochiKit.DOM._document.documentElement;
+var b=MochiKit.DOM._document.body;
+m.page.x=e.clientX+(de.scrollLeft||b.scrollLeft)-(de.clientLeft||0);
+m.page.y=e.clientY+(de.scrollTop||b.scrollTop)-(de.clientTop||0);
+}
+if(this.type()!="mousemove"&&this.type()!="mousewheel"){
+m.button={};
+m.button.left=false;
+m.button.right=false;
+m.button.middle=false;
+if(e.which){
+m.button.left=(e.which==1);
+m.button.middle=(e.which==2);
+m.button.right=(e.which==3);
+}else{
+m.button.left=!!(e.button&1);
+m.button.right=!!(e.button&2);
+m.button.middle=!!(e.button&4);
+}
+}
+if(this.type()=="mousewheel"){
+m.wheel=new MochiKit.Style.Coordinates(0,0);
+if(e.wheelDeltaX||e.wheelDeltaY){
+m.wheel.x=e.wheelDeltaX/-40||0;
+m.wheel.y=e.wheelDeltaY/-40||0;
+}else{
+if(e.wheelDelta){
+m.wheel.y=e.wheelDelta/-40;
+}else{
+m.wheel.y=e.detail||0;
+}
+}
+}
+this._mouse=m;
+return m;
+}
+return undefined;
+},stop:function(){
+this.stopPropagation();
+this.preventDefault();
+},stopPropagation:function(){
+if(this._event.stopPropagation){
+this._event.stopPropagation();
+}else{
+this._event.cancelBubble=true;
+}
+},preventDefault:function(){
+if(this._event.preventDefault){
+this._event.preventDefault();
+}else{
+if(this._confirmUnload===null){
+this._event.returnValue=false;
+}
+}
+},_confirmUnload:null,confirmUnload:function(msg){
+if(this.type()=="beforeunload"){
+this._confirmUnload=msg;
+this._event.returnValue=msg;
+}
+}});
+MochiKit.Signal._specialMacKeys={3:"KEY_ENTER",63289:"KEY_NUM_PAD_CLEAR",63276:"KEY_PAGE_UP",63277:"KEY_PAGE_DOWN",63275:"KEY_END",63273:"KEY_HOME",63234:"KEY_ARROW_LEFT",63232:"KEY_ARROW_UP",63235:"KEY_ARROW_RIGHT",63233:"KEY_ARROW_DOWN",63302:"KEY_INSERT",63272:"KEY_DELETE"};
+(function(){
+var _561=MochiKit.Signal._specialMacKeys;
+for(i=63236;i<=63242;i++){
+_561[i]="KEY_F"+(i-63236+1);
+}
+})();
+MochiKit.Signal._specialKeys={8:"KEY_BACKSPACE",9:"KEY_TAB",12:"KEY_NUM_PAD_CLEAR",13:"KEY_ENTER",16:"KEY_SHIFT",17:"KEY_CTRL",18:"KEY_ALT",19:"KEY_PAUSE",20:"KEY_CAPS_LOCK",27:"KEY_ESCAPE",32:"KEY_SPACEBAR",33:"KEY_PAGE_UP",34:"KEY_PAGE_DOWN",35:"KEY_END",36:"KEY_HOME",37:"KEY_ARROW_LEFT",38:"KEY_ARROW_UP",39:"KEY_ARROW_RIGHT",40:"KEY_ARROW_DOWN",44:"KEY_PRINT_SCREEN",45:"KEY_INSERT",46:"KEY_DELETE",59:"KEY_SEMICOLON",91:"KEY_WINDOWS_LEFT",92:"KEY_WINDOWS_RIGHT",93:"KEY_SELECT",106:"KEY_NUM_PAD_ASTERISK",107:"KEY_NUM_PAD_PLUS_SIGN",109:"KEY_NUM_PAD_HYPHEN-MINUS",110:"KEY_NUM_PAD_FULL_STOP",111:"KEY_NUM_PAD_SOLIDUS",144:"KEY_NUM_LOCK",145:"KEY_SCROLL_LOCK",186:"KEY_SEMICOLON",187:"KEY_EQUALS_SIGN",188:"KEY_COMMA",189:"KEY_HYPHEN-MINUS",190:"KEY_FULL_STOP",191:"KEY_SOLIDUS",192:"KEY_GRAVE_ACCENT",219:"KEY_LEFT_SQUARE_BRACKET",220:"KEY_REVERSE_SOLIDUS",221:"KEY_RIGHT_SQUARE_BRACKET",222:"KEY_APOSTROPHE"};
+(function(){
+var _562=MochiKit.Signal._specialKeys;
+for(var i=48;i<=57;i++){
+_562[i]="KEY_"+(i-48);
+}
+for(i=65;i<=90;i++){
+_562[i]="KEY_"+String.fromCharCode(i);
+}
+for(i=96;i<=105;i++){
+_562[i]="KEY_NUM_PAD_"+(i-96);
+}
+for(i=112;i<=123;i++){
+_562[i]="KEY_F"+(i-112+1);
+}
+})();
+MochiKit.Signal.Ident=function(_564){
+this.source=_564.source;
+this.signal=_564.signal;
+this.listener=_564.listener;
+this.isDOM=_564.isDOM;
+this.objOrFunc=_564.objOrFunc;
+this.funcOrStr=_564.funcOrStr;
+this.connected=_564.connected;
+};
+MochiKit.Signal.Ident.prototype={};
+MochiKit.Base.update(MochiKit.Signal,{__repr__:function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+},toString:function(){
+return this.__repr__();
+},_unloadCache:function(){
+var self=MochiKit.Signal;
+var _566=self._observers;
+for(var i=0;i<_566.length;i++){
+if(_566[i].signal!=="onload"&&_566[i].signal!=="onunload"){
+self._disconnect(_566[i]);
+}
+}
+},_listener:function(src,sig,func,obj,_56c){
+var self=MochiKit.Signal;
+var E=self.Event;
+if(!_56c){
+if(typeof (func.im_self)=="undefined"){
+return MochiKit.Base.bindLate(func,obj);
+}else{
+return func;
+}
+}
+obj=obj||src;
+if(typeof (func)=="string"){
+if(sig==="onload"||sig==="onunload"){
+return function(_56f){
+obj[func].apply(obj,[new E(src,_56f)]);
+var _570=new MochiKit.Signal.Ident({source:src,signal:sig,objOrFunc:obj,funcOrStr:func});
+MochiKit.Signal._disconnect(_570);
+};
+}else{
+return function(_571){
+obj[func].apply(obj,[new E(src,_571)]);
+};
+}
+}else{
+if(sig==="onload"||sig==="onunload"){
+return function(_572){
+func.apply(obj,[new E(src,_572)]);
+var _573=new MochiKit.Signal.Ident({source:src,signal:sig,objOrFunc:func});
+MochiKit.Signal._disconnect(_573);
+};
+}else{
+return function(_574){
+func.apply(obj,[new E(src,_574)]);
+};
+}
+}
+},_browserAlreadyHasMouseEnterAndLeave:function(){
+return /MSIE/.test(navigator.userAgent);
+},_browserLacksMouseWheelEvent:function(){
+return /Gecko\//.test(navigator.userAgent);
+},_mouseEnterListener:function(src,sig,func,obj){
+var E=MochiKit.Signal.Event;
+return function(_57a){
+var e=new E(src,_57a);
+try{
+e.relatedTarget().nodeName;
+}
+catch(err){
+return;
+}
+e.stop();
+if(MochiKit.DOM.isChildNode(e.relatedTarget(),src)){
+return;
+}
+e.type=function(){
+return sig;
+};
+if(typeof (func)=="string"){
+return obj[func].apply(obj,[e]);
+}else{
+return func.apply(obj,[e]);
+}
+};
+},_getDestPair:function(_57c,_57d){
+var obj=null;
+var func=null;
+if(typeof (_57d)!="undefined"){
+obj=_57c;
+func=_57d;
+if(typeof (_57d)=="string"){
+if(typeof (_57c[_57d])!="function"){
+throw new Error("'funcOrStr' must be a function on 'objOrFunc'");
+}
+}else{
+if(typeof (_57d)!="function"){
+throw new Error("'funcOrStr' must be a function or string");
+}
+}
+}else{
+if(typeof (_57c)!="function"){
+throw new Error("'objOrFunc' must be a function if 'funcOrStr' is not given");
+}else{
+func=_57c;
+}
+}
+return [obj,func];
+},connect:function(src,sig,_582,_583){
+src=MochiKit.DOM.getElement(src);
+var self=MochiKit.Signal;
+if(typeof (sig)!="string"){
+throw new Error("'sig' must be a string");
+}
+var _585=self._getDestPair(_582,_583);
+var obj=_585[0];
+var func=_585[1];
+if(typeof (obj)=="undefined"||obj===null){
+obj=src;
+}
+var _588=!!(src.addEventListener||src.attachEvent);
+if(_588&&(sig==="onmouseenter"||sig==="onmouseleave")&&!self._browserAlreadyHasMouseEnterAndLeave()){
+var _589=self._mouseEnterListener(src,sig.substr(2),func,obj);
+if(sig==="onmouseenter"){
+sig="onmouseover";
+}else{
+sig="onmouseout";
+}
+}else{
+if(_588&&sig=="onmousewheel"&&self._browserLacksMouseWheelEvent()){
+var _589=self._listener(src,sig,func,obj,_588);
+sig="onDOMMouseScroll";
+}else{
+var _589=self._listener(src,sig,func,obj,_588);
+}
+}
+if(src.addEventListener){
+src.addEventListener(sig.substr(2),_589,false);
+}else{
+if(src.attachEvent){
+src.attachEvent(sig,_589);
+}
+}
+var _58a=new MochiKit.Signal.Ident({source:src,signal:sig,listener:_589,isDOM:_588,objOrFunc:_582,funcOrStr:_583,connected:true});
+self._observers.push(_58a);
+if(!_588&&typeof (src.__connect__)=="function"){
+var args=MochiKit.Base.extend([_58a],arguments,1);
+src.__connect__.apply(src,args);
+}
+return _58a;
+},_disconnect:function(_58c){
+if(!_58c.connected){
+return;
+}
+_58c.connected=false;
+var src=_58c.source;
+var sig=_58c.signal;
+var _58f=_58c.listener;
+if(!_58c.isDOM){
+if(typeof (src.__disconnect__)=="function"){
+src.__disconnect__(_58c,sig,_58c.objOrFunc,_58c.funcOrStr);
+}
+return;
+}
+if(src.removeEventListener){
+src.removeEventListener(sig.substr(2),_58f,false);
+}else{
+if(src.detachEvent){
+src.detachEvent(sig,_58f);
+}else{
+throw new Error("'src' must be a DOM element");
+}
+}
+},disconnect:function(_590){
+var self=MochiKit.Signal;
+var _592=self._observers;
+var m=MochiKit.Base;
+if(arguments.length>1){
+var src=MochiKit.DOM.getElement(arguments[0]);
+var sig=arguments[1];
+var obj=arguments[2];
+var func=arguments[3];
+for(var i=_592.length-1;i>=0;i--){
+var o=_592[i];
+if(o.source===src&&o.signal===sig&&o.objOrFunc===obj&&o.funcOrStr===func){
+self._disconnect(o);
+if(!self._lock){
+_592.splice(i,1);
+}else{
+self._dirty=true;
+}
+return true;
+}
+}
+}else{
+var idx=m.findIdentical(_592,_590);
+if(idx>=0){
+self._disconnect(_590);
+if(!self._lock){
+_592.splice(idx,1);
+}else{
+self._dirty=true;
+}
+return true;
+}
+}
+return false;
+},disconnectAllTo:function(_59b,_59c){
+var self=MochiKit.Signal;
+var _59e=self._observers;
+var _59f=self._disconnect;
+var _5a0=self._lock;
+var _5a1=self._dirty;
+if(typeof (_59c)==="undefined"){
+_59c=null;
+}
+for(var i=_59e.length-1;i>=0;i--){
+var _5a3=_59e[i];
+if(_5a3.objOrFunc===_59b&&(_59c===null||_5a3.funcOrStr===_59c)){
+_59f(_5a3);
+if(_5a0){
+_5a1=true;
+}else{
+_59e.splice(i,1);
+}
+}
+}
+self._dirty=_5a1;
+},disconnectAll:function(src,sig){
+src=MochiKit.DOM.getElement(src);
+var m=MochiKit.Base;
+var _5a7=m.flattenArguments(m.extend(null,arguments,1));
+var self=MochiKit.Signal;
+var _5a9=self._disconnect;
+var _5aa=self._observers;
+var i,_5ac;
+var _5ad=self._lock;
+var _5ae=self._dirty;
+if(_5a7.length===0){
+for(i=_5aa.length-1;i>=0;i--){
+_5ac=_5aa[i];
+if(_5ac.source===src){
+_5a9(_5ac);
+if(!_5ad){
+_5aa.splice(i,1);
+}else{
+_5ae=true;
+}
+}
+}
+}else{
+var sigs={};
+for(i=0;i<_5a7.length;i++){
+sigs[_5a7[i]]=true;
+}
+for(i=_5aa.length-1;i>=0;i--){
+_5ac=_5aa[i];
+if(_5ac.source===src&&_5ac.signal in sigs){
+_5a9(_5ac);
+if(!_5ad){
+_5aa.splice(i,1);
+}else{
+_5ae=true;
+}
+}
+}
+}
+self._dirty=_5ae;
+},signal:function(src,sig){
+var self=MochiKit.Signal;
+var _5b3=self._observers;
+src=MochiKit.DOM.getElement(src);
+var args=MochiKit.Base.extend(null,arguments,2);
+var _5b5=[];
+self._lock=true;
+for(var i=0;i<_5b3.length;i++){
+var _5b7=_5b3[i];
+if(_5b7.source===src&&_5b7.signal===sig&&_5b7.connected){
+try{
+_5b7.listener.apply(src,args);
+}
+catch(e){
+_5b5.push(e);
+}
+}
+}
+self._lock=false;
+if(self._dirty){
+self._dirty=false;
+for(var i=_5b3.length-1;i>=0;i--){
+if(!_5b3[i].connected){
+_5b3.splice(i,1);
+}
+}
+}
+if(_5b5.length==1){
+throw _5b5[0];
+}else{
+if(_5b5.length>1){
+var e=new Error("Multiple errors thrown in handling 'sig', see errors property");
+e.errors=_5b5;
+throw e;
+}
+}
+}});
+MochiKit.Signal.EXPORT_OK=[];
+MochiKit.Signal.EXPORT=["connect","disconnect","signal","disconnectAll","disconnectAllTo"];
+MochiKit.Signal.__new__=function(win){
+var m=MochiKit.Base;
+this._document=document;
+this._window=win;
+this._lock=false;
+this._dirty=false;
+try{
+this.connect(window,"onunload",this._unloadCache);
+}
+catch(e){
+}
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+};
+MochiKit.Signal.__new__(this);
+if(MochiKit.__export__){
+connect=MochiKit.Signal.connect;
+disconnect=MochiKit.Signal.disconnect;
+disconnectAll=MochiKit.Signal.disconnectAll;
+signal=MochiKit.Signal.signal;
+}
+MochiKit.Base._exportSymbols(this,MochiKit.Signal);
+MochiKit.Base._deps("Position",["Base","DOM","Style"]);
+MochiKit.Position.NAME="MochiKit.Position";
+MochiKit.Position.VERSION="1.4.2";
+MochiKit.Position.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Position.toString=function(){
+return this.__repr__();
+};
+MochiKit.Position.EXPORT_OK=[];
+MochiKit.Position.EXPORT=[];
+MochiKit.Base.update(MochiKit.Position,{includeScrollOffsets:false,prepare:function(){
+var _5bb=window.pageXOffset||document.documentElement.scrollLeft||document.body.scrollLeft||0;
+var _5bc=window.pageYOffset||document.documentElement.scrollTop||document.body.scrollTop||0;
+this.windowOffset=new MochiKit.Style.Coordinates(_5bb,_5bc);
+},cumulativeOffset:function(_5bd){
+var _5be=0;
+var _5bf=0;
+do{
+_5be+=_5bd.offsetTop||0;
+_5bf+=_5bd.offsetLeft||0;
+_5bd=_5bd.offsetParent;
+}while(_5bd);
+return new MochiKit.Style.Coordinates(_5bf,_5be);
+},realOffset:function(_5c0){
+var _5c1=0;
+var _5c2=0;
+do{
+_5c1+=_5c0.scrollTop||0;
+_5c2+=_5c0.scrollLeft||0;
+_5c0=_5c0.parentNode;
+}while(_5c0);
+return new MochiKit.Style.Coordinates(_5c2,_5c1);
+},within:function(_5c3,x,y){
+if(this.includeScrollOffsets){
+return this.withinIncludingScrolloffsets(_5c3,x,y);
+}
+this.xcomp=x;
+this.ycomp=y;
+this.offset=this.cumulativeOffset(_5c3);
+if(_5c3.style.position=="fixed"){
+this.offset.x+=this.windowOffset.x;
+this.offset.y+=this.windowOffset.y;
+}
+return (y>=this.offset.y&&y<this.offset.y+_5c3.offsetHeight&&x>=this.offset.x&&x<this.offset.x+_5c3.offsetWidth);
+},withinIncludingScrolloffsets:function(_5c6,x,y){
+var _5c9=this.realOffset(_5c6);
+this.xcomp=x+_5c9.x-this.windowOffset.x;
+this.ycomp=y+_5c9.y-this.windowOffset.y;
+this.offset=this.cumulativeOffset(_5c6);
+return (this.ycomp>=this.offset.y&&this.ycomp<this.offset.y+_5c6.offsetHeight&&this.xcomp>=this.offset.x&&this.xcomp<this.offset.x+_5c6.offsetWidth);
+},overlap:function(mode,_5cb){
+if(!mode){
+return 0;
+}
+if(mode=="vertical"){
+return ((this.offset.y+_5cb.offsetHeight)-this.ycomp)/_5cb.offsetHeight;
+}
+if(mode=="horizontal"){
+return ((this.offset.x+_5cb.offsetWidth)-this.xcomp)/_5cb.offsetWidth;
+}
+},absolutize:function(_5cc){
+_5cc=MochiKit.DOM.getElement(_5cc);
+if(_5cc.style.position=="absolute"){
+return;
+}
+MochiKit.Position.prepare();
+var _5cd=MochiKit.Position.positionedOffset(_5cc);
+var _5ce=_5cc.clientWidth;
+var _5cf=_5cc.clientHeight;
+var _5d0={"position":_5cc.style.position,"left":_5cd.x-parseFloat(_5cc.style.left||0),"top":_5cd.y-parseFloat(_5cc.style.top||0),"width":_5cc.style.width,"height":_5cc.style.height};
+_5cc.style.position="absolute";
+_5cc.style.top=_5cd.y+"px";
+_5cc.style.left=_5cd.x+"px";
+_5cc.style.width=_5ce+"px";
+_5cc.style.height=_5cf+"px";
+return _5d0;
+},positionedOffset:function(_5d1){
+var _5d2=0,_5d3=0;
+do{
+_5d2+=_5d1.offsetTop||0;
+_5d3+=_5d1.offsetLeft||0;
+_5d1=_5d1.offsetParent;
+if(_5d1){
+p=MochiKit.Style.getStyle(_5d1,"position");
+if(p=="relative"||p=="absolute"){
+break;
+}
+}
+}while(_5d1);
+return new MochiKit.Style.Coordinates(_5d3,_5d2);
+},relativize:function(_5d4,_5d5){
+_5d4=MochiKit.DOM.getElement(_5d4);
+if(_5d4.style.position=="relative"){
+return;
+}
+MochiKit.Position.prepare();
+var top=parseFloat(_5d4.style.top||0)-(_5d5["top"]||0);
+var left=parseFloat(_5d4.style.left||0)-(_5d5["left"]||0);
+_5d4.style.position=_5d5["position"];
+_5d4.style.top=top+"px";
+_5d4.style.left=left+"px";
+_5d4.style.width=_5d5["width"];
+_5d4.style.height=_5d5["height"];
+},clone:function(_5d8,_5d9){
+_5d8=MochiKit.DOM.getElement(_5d8);
+_5d9=MochiKit.DOM.getElement(_5d9);
+_5d9.style.position="absolute";
+var _5da=this.cumulativeOffset(_5d8);
+_5d9.style.top=_5da.y+"px";
+_5d9.style.left=_5da.x+"px";
+_5d9.style.width=_5d8.offsetWidth+"px";
+_5d9.style.height=_5d8.offsetHeight+"px";
+},page:function(_5db){
+var _5dc=0;
+var _5dd=0;
+var _5de=_5db;
+do{
+_5dc+=_5de.offsetTop||0;
+_5dd+=_5de.offsetLeft||0;
+if(_5de.offsetParent==document.body&&MochiKit.Style.getStyle(_5de,"position")=="absolute"){
+break;
+}
+}while(_5de=_5de.offsetParent);
+_5de=_5db;
+do{
+_5dc-=_5de.scrollTop||0;
+_5dd-=_5de.scrollLeft||0;
+}while(_5de=_5de.parentNode);
+return new MochiKit.Style.Coordinates(_5dd,_5dc);
+}});
+MochiKit.Position.__new__=function(win){
+var m=MochiKit.Base;
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+m.nameFunctions(this);
+};
+MochiKit.Position.__new__(this);
+MochiKit.Base._exportSymbols(this,MochiKit.Position);
+MochiKit.Base._deps("Visual",["Base","DOM","Style","Color","Position"]);
+MochiKit.Visual.NAME="MochiKit.Visual";
+MochiKit.Visual.VERSION="1.4.2";
+MochiKit.Visual.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Visual.toString=function(){
+return this.__repr__();
+};
+MochiKit.Visual._RoundCorners=function(e,_5e2){
+e=MochiKit.DOM.getElement(e);
+this._setOptions(_5e2);
+if(this.options.__unstable__wrapElement){
+e=this._doWrap(e);
+}
+var _5e3=this.options.color;
+var C=MochiKit.Color.Color;
+if(this.options.color==="fromElement"){
+_5e3=C.fromBackground(e);
+}else{
+if(!(_5e3 instanceof C)){
+_5e3=C.fromString(_5e3);
+}
+}
+this.isTransparent=(_5e3.asRGB().a<=0);
+var _5e5=this.options.bgColor;
+if(this.options.bgColor==="fromParent"){
+_5e5=C.fromBackground(e.offsetParent);
+}else{
+if(!(_5e5 instanceof C)){
+_5e5=C.fromString(_5e5);
+}
+}
+this._roundCornersImpl(e,_5e3,_5e5);
+};
+MochiKit.Visual._RoundCorners.prototype={_doWrap:function(e){
+var _5e7=e.parentNode;
+var doc=MochiKit.DOM.currentDocument();
+if(typeof (doc.defaultView)==="undefined"||doc.defaultView===null){
+return e;
+}
+var _5e9=doc.defaultView.getComputedStyle(e,null);
+if(typeof (_5e9)==="undefined"||_5e9===null){
+return e;
+}
+var _5ea=MochiKit.DOM.DIV({"style":{display:"block",marginTop:_5e9.getPropertyValue("padding-top"),marginRight:_5e9.getPropertyValue("padding-right"),marginBottom:_5e9.getPropertyValue("padding-bottom"),marginLeft:_5e9.getPropertyValue("padding-left"),padding:"0px"}});
+_5ea.innerHTML=e.innerHTML;
+e.innerHTML="";
+e.appendChild(_5ea);
+return e;
+},_roundCornersImpl:function(e,_5ec,_5ed){
+if(this.options.border){
+this._renderBorder(e,_5ed);
+}
+if(this._isTopRounded()){
+this._roundTopCorners(e,_5ec,_5ed);
+}
+if(this._isBottomRounded()){
+this._roundBottomCorners(e,_5ec,_5ed);
+}
+},_renderBorder:function(el,_5ef){
+var _5f0="1px solid "+this._borderColor(_5ef);
+var _5f1="border-left: "+_5f0;
+var _5f2="border-right: "+_5f0;
+var _5f3="style='"+_5f1+";"+_5f2+"'";
+el.innerHTML="<div "+_5f3+">"+el.innerHTML+"</div>";
+},_roundTopCorners:function(el,_5f5,_5f6){
+var _5f7=this._createCorner(_5f6);
+for(var i=0;i<this.options.numSlices;i++){
+_5f7.appendChild(this._createCornerSlice(_5f5,_5f6,i,"top"));
+}
+el.style.paddingTop=0;
+el.insertBefore(_5f7,el.firstChild);
+},_roundBottomCorners:function(el,_5fa,_5fb){
+var _5fc=this._createCorner(_5fb);
+for(var i=(this.options.numSlices-1);i>=0;i--){
+_5fc.appendChild(this._createCornerSlice(_5fa,_5fb,i,"bottom"));
+}
+el.style.paddingBottom=0;
+el.appendChild(_5fc);
+},_createCorner:function(_5fe){
+var dom=MochiKit.DOM;
+return dom.DIV({style:{backgroundColor:_5fe.toString()}});
+},_createCornerSlice:function(_600,_601,n,_603){
+var _604=MochiKit.DOM.SPAN();
+var _605=_604.style;
+_605.backgroundColor=_600.toString();
+_605.display="block";
+_605.height="1px";
+_605.overflow="hidden";
+_605.fontSize="1px";
+var _606=this._borderColor(_600,_601);
+if(this.options.border&&n===0){
+_605.borderTopStyle="solid";
+_605.borderTopWidth="1px";
+_605.borderLeftWidth="0px";
+_605.borderRightWidth="0px";
+_605.borderBottomWidth="0px";
+_605.height="0px";
+_605.borderColor=_606.toString();
+}else{
+if(_606){
+_605.borderColor=_606.toString();
+_605.borderStyle="solid";
+_605.borderWidth="0px 1px";
+}
+}
+if(!this.options.compact&&(n==(this.options.numSlices-1))){
+_605.height="2px";
+}
+this._setMargin(_604,n,_603);
+this._setBorder(_604,n,_603);
+return _604;
+},_setOptions:function(_607){
+this.options={corners:"all",color:"fromElement",bgColor:"fromParent",blend:true,border:false,compact:false,__unstable__wrapElement:false};
+MochiKit.Base.update(this.options,_607);
+this.options.numSlices=(this.options.compact?2:4);
+},_whichSideTop:function(){
+var _608=this.options.corners;
+if(this._hasString(_608,"all","top")){
+return "";
+}
+var _609=(_608.indexOf("tl")!=-1);
+var _60a=(_608.indexOf("tr")!=-1);
+if(_609&&_60a){
+return "";
+}
+if(_609){
+return "left";
+}
+if(_60a){
+return "right";
+}
+return "";
+},_whichSideBottom:function(){
+var _60b=this.options.corners;
+if(this._hasString(_60b,"all","bottom")){
+return "";
+}
+var _60c=(_60b.indexOf("bl")!=-1);
+var _60d=(_60b.indexOf("br")!=-1);
+if(_60c&&_60d){
+return "";
+}
+if(_60c){
+return "left";
+}
+if(_60d){
+return "right";
+}
+return "";
+},_borderColor:function(_60e,_60f){
+if(_60e=="transparent"){
+return _60f;
+}else{
+if(this.options.border){
+return this.options.border;
+}else{
+if(this.options.blend){
+return _60f.blendedColor(_60e);
+}
+}
+}
+return "";
+},_setMargin:function(el,n,_612){
+var _613=this._marginSize(n)+"px";
+var _614=(_612=="top"?this._whichSideTop():this._whichSideBottom());
+var _615=el.style;
+if(_614=="left"){
+_615.marginLeft=_613;
+_615.marginRight="0px";
+}else{
+if(_614=="right"){
+_615.marginRight=_613;
+_615.marginLeft="0px";
+}else{
+_615.marginLeft=_613;
+_615.marginRight=_613;
+}
+}
+},_setBorder:function(el,n,_618){
+var _619=this._borderSize(n)+"px";
+var _61a=(_618=="top"?this._whichSideTop():this._whichSideBottom());
+var _61b=el.style;
+if(_61a=="left"){
+_61b.borderLeftWidth=_619;
+_61b.borderRightWidth="0px";
+}else{
+if(_61a=="right"){
+_61b.borderRightWidth=_619;
+_61b.borderLeftWidth="0px";
+}else{
+_61b.borderLeftWidth=_619;
+_61b.borderRightWidth=_619;
+}
+}
+},_marginSize:function(n){
+if(this.isTransparent){
+return 0;
+}
+var o=this.options;
+if(o.compact&&o.blend){
+var _61e=[1,0];
+return _61e[n];
+}else{
+if(o.compact){
+var _61f=[2,1];
+return _61f[n];
+}else{
+if(o.blend){
+var _620=[3,2,1,0];
+return _620[n];
+}else{
+var _621=[5,3,2,1];
+return _621[n];
+}
+}
+}
+},_borderSize:function(n){
+var o=this.options;
+var _624;
+if(o.compact&&(o.blend||this.isTransparent)){
+return 1;
+}else{
+if(o.compact){
+_624=[1,0];
+}else{
+if(o.blend){
+_624=[2,1,1,1];
+}else{
+if(o.border){
+_624=[0,2,0,0];
+}else{
+if(this.isTransparent){
+_624=[5,3,2,1];
+}else{
+return 0;
+}
+}
+}
+}
+}
+return _624[n];
+},_hasString:function(str){
+for(var i=1;i<arguments.length;i++){
+if(str.indexOf(arguments[i])!=-1){
+return true;
+}
+}
+return false;
+},_isTopRounded:function(){
+return this._hasString(this.options.corners,"all","top","tl","tr");
+},_isBottomRounded:function(){
+return this._hasString(this.options.corners,"all","bottom","bl","br");
+},_hasSingleTextChild:function(el){
+return (el.childNodes.length==1&&el.childNodes[0].nodeType==3);
+}};
+MochiKit.Visual.roundElement=function(e,_629){
+new MochiKit.Visual._RoundCorners(e,_629);
+};
+MochiKit.Visual.roundClass=function(_62a,_62b,_62c){
+var _62d=MochiKit.DOM.getElementsByTagAndClassName(_62a,_62b);
+for(var i=0;i<_62d.length;i++){
+MochiKit.Visual.roundElement(_62d[i],_62c);
+}
+};
+MochiKit.Visual.tagifyText=function(_62f,_630){
+_630=_630||"position:relative";
+if(/MSIE/.test(navigator.userAgent)){
+_630+=";zoom:1";
+}
+_62f=MochiKit.DOM.getElement(_62f);
+var ma=MochiKit.Base.map;
+ma(function(_632){
+if(_632.nodeType==3){
+ma(function(_633){
+_62f.insertBefore(MochiKit.DOM.SPAN({style:_630},_633==" "?String.fromCharCode(160):_633),_632);
+},_632.nodeValue.split(""));
+MochiKit.DOM.removeElement(_632);
+}
+},_62f.childNodes);
+};
+MochiKit.Visual.forceRerendering=function(_634){
+try{
+_634=MochiKit.DOM.getElement(_634);
+var n=document.createTextNode(" ");
+_634.appendChild(n);
+_634.removeChild(n);
+}
+catch(e){
+}
+};
+MochiKit.Visual.multiple=function(_636,_637,_638){
+_638=MochiKit.Base.update({speed:0.1,delay:0},_638);
+var _639=_638.delay;
+var _63a=0;
+MochiKit.Base.map(function(_63b){
+_638.delay=_63a*_638.speed+_639;
+new _637(_63b,_638);
+_63a+=1;
+},_636);
+};
+MochiKit.Visual.PAIRS={"slide":["slideDown","slideUp"],"blind":["blindDown","blindUp"],"appear":["appear","fade"],"size":["grow","shrink"]};
+MochiKit.Visual.toggle=function(_63c,_63d,_63e){
+_63c=MochiKit.DOM.getElement(_63c);
+_63d=(_63d||"appear").toLowerCase();
+_63e=MochiKit.Base.update({queue:{position:"end",scope:(_63c.id||"global"),limit:1}},_63e);
+var v=MochiKit.Visual;
+v[MochiKit.Style.getStyle(_63c,"display")!="none"?v.PAIRS[_63d][1]:v.PAIRS[_63d][0]](_63c,_63e);
+};
+MochiKit.Visual.Transitions={};
+MochiKit.Visual.Transitions.linear=function(pos){
+return pos;
+};
+MochiKit.Visual.Transitions.sinoidal=function(pos){
+return 0.5-Math.cos(pos*Math.PI)/2;
+};
+MochiKit.Visual.Transitions.reverse=function(pos){
+return 1-pos;
+};
+MochiKit.Visual.Transitions.flicker=function(pos){
+return 0.25-Math.cos(pos*Math.PI)/4+Math.random()/2;
+};
+MochiKit.Visual.Transitions.wobble=function(pos){
+return 0.5-Math.cos(9*pos*Math.PI)/2;
+};
+MochiKit.Visual.Transitions.pulse=function(pos,_646){
+if(_646){
+pos*=2*_646;
+}else{
+pos*=10;
+}
+var _647=pos-Math.floor(pos);
+return (Math.floor(pos)%2==0)?_647:1-_647;
+};
+MochiKit.Visual.Transitions.parabolic=function(pos){
+return pos*pos;
+};
+MochiKit.Visual.Transitions.none=function(pos){
+return 0;
+};
+MochiKit.Visual.Transitions.full=function(pos){
+return 1;
+};
+MochiKit.Visual.ScopedQueue=function(){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls();
+}
+this.__init__();
+};
+MochiKit.Base.update(MochiKit.Visual.ScopedQueue.prototype,{__init__:function(){
+this.effects=[];
+this.interval=null;
+},add:function(_64c){
+var _64d=new Date().getTime();
+var _64e=(typeof (_64c.options.queue)=="string")?_64c.options.queue:_64c.options.queue.position;
+var ma=MochiKit.Base.map;
+switch(_64e){
+case "front":
+ma(function(e){
+if(e.state=="idle"){
+e.startOn+=_64c.finishOn;
+e.finishOn+=_64c.finishOn;
+}
+},this.effects);
+break;
+case "end":
+var _651;
+ma(function(e){
+var i=e.finishOn;
+if(i>=(_651||i)){
+_651=i;
+}
+},this.effects);
+_64d=_651||_64d;
+break;
+case "break":
+ma(function(e){
+e.finalize();
+},this.effects);
+break;
+}
+_64c.startOn+=_64d;
+_64c.finishOn+=_64d;
+if(!_64c.options.queue.limit||this.effects.length<_64c.options.queue.limit){
+this.effects.push(_64c);
+}
+if(!this.interval){
+this.interval=this.startLoop(MochiKit.Base.bind(this.loop,this),40);
+}
+},startLoop:function(func,_656){
+return setInterval(func,_656);
+},remove:function(_657){
+this.effects=MochiKit.Base.filter(function(e){
+return e!=_657;
+},this.effects);
+if(!this.effects.length){
+this.stopLoop(this.interval);
+this.interval=null;
+}
+},stopLoop:function(_659){
+clearInterval(_659);
+},loop:function(){
+var _65a=new Date().getTime();
+MochiKit.Base.map(function(_65b){
+_65b.loop(_65a);
+},this.effects);
+}});
+MochiKit.Visual.Queues={instances:{},get:function(_65c){
+if(typeof (_65c)!="string"){
+return _65c;
+}
+if(!this.instances[_65c]){
+this.instances[_65c]=new MochiKit.Visual.ScopedQueue();
+}
+return this.instances[_65c];
+}};
+MochiKit.Visual.Queue=MochiKit.Visual.Queues.get("global");
+MochiKit.Visual.DefaultOptions={transition:MochiKit.Visual.Transitions.sinoidal,duration:1,fps:25,sync:false,from:0,to:1,delay:0,queue:"parallel"};
+MochiKit.Visual.Base=function(){
+};
+MochiKit.Visual.Base.prototype={__class__:MochiKit.Visual.Base,start:function(_65d){
+var v=MochiKit.Visual;
+this.options=MochiKit.Base.setdefault(_65d,v.DefaultOptions);
+this.currentFrame=0;
+this.state="idle";
+this.startOn=this.options.delay*1000;
+this.finishOn=this.startOn+(this.options.duration*1000);
+this.event("beforeStart");
+if(!this.options.sync){
+v.Queues.get(typeof (this.options.queue)=="string"?"global":this.options.queue.scope).add(this);
+}
+},loop:function(_65f){
+if(_65f>=this.startOn){
+if(_65f>=this.finishOn){
+return this.finalize();
+}
+var pos=(_65f-this.startOn)/(this.finishOn-this.startOn);
+var _661=Math.round(pos*this.options.fps*this.options.duration);
+if(_661>this.currentFrame){
+this.render(pos);
+this.currentFrame=_661;
+}
+}
+},render:function(pos){
+if(this.state=="idle"){
+this.state="running";
+this.event("beforeSetup");
+this.setup();
+this.event("afterSetup");
+}
+if(this.state=="running"){
+if(this.options.transition){
+pos=this.options.transition(pos);
+}
+pos*=(this.options.to-this.options.from);
+pos+=this.options.from;
+this.event("beforeUpdate");
+this.update(pos);
+this.event("afterUpdate");
+}
+},cancel:function(){
+if(!this.options.sync){
+MochiKit.Visual.Queues.get(typeof (this.options.queue)=="string"?"global":this.options.queue.scope).remove(this);
+}
+this.state="finished";
+},finalize:function(){
+this.render(1);
+this.cancel();
+this.event("beforeFinish");
+this.finish();
+this.event("afterFinish");
+},setup:function(){
+},finish:function(){
+},update:function(_663){
+},event:function(_664){
+if(this.options[_664+"Internal"]){
+this.options[_664+"Internal"](this);
+}
+if(this.options[_664]){
+this.options[_664](this);
+}
+},repr:function(){
+return "["+this.__class__.NAME+", options:"+MochiKit.Base.repr(this.options)+"]";
+}};
+MochiKit.Visual.Parallel=function(_665,_666){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_665,_666);
+}
+this.__init__(_665,_666);
+};
+MochiKit.Visual.Parallel.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Parallel.prototype,{__class__:MochiKit.Visual.Parallel,__init__:function(_668,_669){
+this.effects=_668||[];
+this.start(_669);
+},update:function(_66a){
+MochiKit.Base.map(function(_66b){
+_66b.render(_66a);
+},this.effects);
+},finish:function(){
+MochiKit.Base.map(function(_66c){
+_66c.finalize();
+},this.effects);
+}});
+MochiKit.Visual.Sequence=function(_66d,_66e){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_66d,_66e);
+}
+this.__init__(_66d,_66e);
+};
+MochiKit.Visual.Sequence.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Sequence.prototype,{__class__:MochiKit.Visual.Sequence,__init__:function(_670,_671){
+var defs={transition:MochiKit.Visual.Transitions.linear,duration:0};
+this.effects=_670||[];
+MochiKit.Base.map(function(_673){
+defs.duration+=_673.options.duration;
+},this.effects);
+MochiKit.Base.setdefault(_671,defs);
+this.start(_671);
+},update:function(_674){
+var time=_674*this.options.duration;
+for(var i=0;i<this.effects.length;i++){
+var _677=this.effects[i];
+if(time<=_677.options.duration){
+_677.render(time/_677.options.duration);
+break;
+}else{
+time-=_677.options.duration;
+}
+}
+},finish:function(){
+MochiKit.Base.map(function(_678){
+_678.finalize();
+},this.effects);
+}});
+MochiKit.Visual.Opacity=function(_679,_67a){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_679,_67a);
+}
+this.__init__(_679,_67a);
+};
+MochiKit.Visual.Opacity.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Opacity.prototype,{__class__:MochiKit.Visual.Opacity,__init__:function(_67c,_67d){
+var b=MochiKit.Base;
+var s=MochiKit.Style;
+this.element=MochiKit.DOM.getElement(_67c);
+if(this.element.currentStyle&&(!this.element.currentStyle.hasLayout)){
+s.setStyle(this.element,{zoom:1});
+}
+_67d=b.update({from:s.getStyle(this.element,"opacity")||0,to:1},_67d);
+this.start(_67d);
+},update:function(_680){
+MochiKit.Style.setStyle(this.element,{"opacity":_680});
+}});
+MochiKit.Visual.Move=function(_681,_682){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_681,_682);
+}
+this.__init__(_681,_682);
+};
+MochiKit.Visual.Move.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Move.prototype,{__class__:MochiKit.Visual.Move,__init__:function(_684,_685){
+this.element=MochiKit.DOM.getElement(_684);
+_685=MochiKit.Base.update({x:0,y:0,mode:"relative"},_685);
+this.start(_685);
+},setup:function(){
+MochiKit.Style.makePositioned(this.element);
+var s=this.element.style;
+var _687=s.visibility;
+var _688=s.display;
+if(_688=="none"){
+s.visibility="hidden";
+s.display="";
+}
+this.originalLeft=parseFloat(MochiKit.Style.getStyle(this.element,"left")||"0");
+this.originalTop=parseFloat(MochiKit.Style.getStyle(this.element,"top")||"0");
+if(this.options.mode=="absolute"){
+this.options.x-=this.originalLeft;
+this.options.y-=this.originalTop;
+}
+if(_688=="none"){
+s.visibility=_687;
+s.display=_688;
+}
+},update:function(_689){
+MochiKit.Style.setStyle(this.element,{left:Math.round(this.options.x*_689+this.originalLeft)+"px",top:Math.round(this.options.y*_689+this.originalTop)+"px"});
+}});
+MochiKit.Visual.Scale=function(_68a,_68b,_68c){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_68a,_68b,_68c);
+}
+this.__init__(_68a,_68b,_68c);
+};
+MochiKit.Visual.Scale.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Scale.prototype,{__class__:MochiKit.Visual.Scale,__init__:function(_68e,_68f,_690){
+this.element=MochiKit.DOM.getElement(_68e);
+_690=MochiKit.Base.update({scaleX:true,scaleY:true,scaleContent:true,scaleFromCenter:false,scaleMode:"box",scaleFrom:100,scaleTo:_68f},_690);
+this.start(_690);
+},setup:function(){
+this.restoreAfterFinish=this.options.restoreAfterFinish||false;
+this.elementPositioning=MochiKit.Style.getStyle(this.element,"position");
+var ma=MochiKit.Base.map;
+var b=MochiKit.Base.bind;
+this.originalStyle={};
+ma(b(function(k){
+this.originalStyle[k]=this.element.style[k];
+},this),["top","left","width","height","fontSize"]);
+this.originalTop=this.element.offsetTop;
+this.originalLeft=this.element.offsetLeft;
+var _694=MochiKit.Style.getStyle(this.element,"font-size")||"100%";
+ma(b(function(_695){
+if(_694.indexOf(_695)>0){
+this.fontSize=parseFloat(_694);
+this.fontSizeType=_695;
+}
+},this),["em","px","%"]);
+this.factor=(this.options.scaleTo-this.options.scaleFrom)/100;
+if(/^content/.test(this.options.scaleMode)){
+this.dims=[this.element.scrollHeight,this.element.scrollWidth];
+}else{
+if(this.options.scaleMode=="box"){
+this.dims=[this.element.offsetHeight,this.element.offsetWidth];
+}else{
+this.dims=[this.options.scaleMode.originalHeight,this.options.scaleMode.originalWidth];
+}
+}
+},update:function(_696){
+var _697=(this.options.scaleFrom/100)+(this.factor*_696);
+if(this.options.scaleContent&&this.fontSize){
+MochiKit.Style.setStyle(this.element,{fontSize:this.fontSize*_697+this.fontSizeType});
+}
+this.setDimensions(this.dims[0]*_697,this.dims[1]*_697);
+},finish:function(){
+if(this.restoreAfterFinish){
+MochiKit.Style.setStyle(this.element,this.originalStyle);
+}
+},setDimensions:function(_698,_699){
+var d={};
+var r=Math.round;
+if(/MSIE/.test(navigator.userAgent)){
+r=Math.ceil;
+}
+if(this.options.scaleX){
+d.width=r(_699)+"px";
+}
+if(this.options.scaleY){
+d.height=r(_698)+"px";
+}
+if(this.options.scaleFromCenter){
+var topd=(_698-this.dims[0])/2;
+var _69d=(_699-this.dims[1])/2;
+if(this.elementPositioning=="absolute"){
+if(this.options.scaleY){
+d.top=this.originalTop-topd+"px";
+}
+if(this.options.scaleX){
+d.left=this.originalLeft-_69d+"px";
+}
+}else{
+if(this.options.scaleY){
+d.top=-topd+"px";
+}
+if(this.options.scaleX){
+d.left=-_69d+"px";
+}
+}
+}
+MochiKit.Style.setStyle(this.element,d);
+}});
+MochiKit.Visual.Highlight=function(_69e,_69f){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_69e,_69f);
+}
+this.__init__(_69e,_69f);
+};
+MochiKit.Visual.Highlight.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Highlight.prototype,{__class__:MochiKit.Visual.Highlight,__init__:function(_6a1,_6a2){
+this.element=MochiKit.DOM.getElement(_6a1);
+_6a2=MochiKit.Base.update({startcolor:"#ffff99"},_6a2);
+this.start(_6a2);
+},setup:function(){
+var b=MochiKit.Base;
+var s=MochiKit.Style;
+if(s.getStyle(this.element,"display")=="none"){
+this.cancel();
+return;
+}
+this.oldStyle={backgroundImage:s.getStyle(this.element,"background-image")};
+s.setStyle(this.element,{backgroundImage:"none"});
+if(!this.options.endcolor){
+this.options.endcolor=MochiKit.Color.Color.fromBackground(this.element).toHexString();
+}
+if(b.isUndefinedOrNull(this.options.restorecolor)){
+this.options.restorecolor=s.getStyle(this.element,"background-color");
+}
+this._base=b.map(b.bind(function(i){
+return parseInt(this.options.startcolor.slice(i*2+1,i*2+3),16);
+},this),[0,1,2]);
+this._delta=b.map(b.bind(function(i){
+return parseInt(this.options.endcolor.slice(i*2+1,i*2+3),16)-this._base[i];
+},this),[0,1,2]);
+},update:function(_6a7){
+var m="#";
+MochiKit.Base.map(MochiKit.Base.bind(function(i){
+m+=MochiKit.Color.toColorPart(Math.round(this._base[i]+this._delta[i]*_6a7));
+},this),[0,1,2]);
+MochiKit.Style.setStyle(this.element,{backgroundColor:m});
+},finish:function(){
+MochiKit.Style.setStyle(this.element,MochiKit.Base.update(this.oldStyle,{backgroundColor:this.options.restorecolor}));
+}});
+MochiKit.Visual.ScrollTo=function(_6aa,_6ab){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_6aa,_6ab);
+}
+this.__init__(_6aa,_6ab);
+};
+MochiKit.Visual.ScrollTo.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.ScrollTo.prototype,{__class__:MochiKit.Visual.ScrollTo,__init__:function(_6ad,_6ae){
+this.element=MochiKit.DOM.getElement(_6ad);
+this.start(_6ae);
+},setup:function(){
+var p=MochiKit.Position;
+p.prepare();
+var _6b0=p.cumulativeOffset(this.element);
+if(this.options.offset){
+_6b0.y+=this.options.offset;
+}
+var max;
+if(window.innerHeight){
+max=window.innerHeight-window.height;
+}else{
+if(document.documentElement&&document.documentElement.clientHeight){
+max=document.documentElement.clientHeight-document.body.scrollHeight;
+}else{
+if(document.body){
+max=document.body.clientHeight-document.body.scrollHeight;
+}
+}
+}
+this.scrollStart=p.windowOffset.y;
+this.delta=(_6b0.y>max?max:_6b0.y)-this.scrollStart;
+},update:function(_6b2){
+var p=MochiKit.Position;
+p.prepare();
+window.scrollTo(p.windowOffset.x,this.scrollStart+(_6b2*this.delta));
+}});
+MochiKit.Visual.CSS_LENGTH=/^(([\+\-]?[0-9\.]+)(em|ex|px|in|cm|mm|pt|pc|\%))|0$/;
+MochiKit.Visual.Morph=function(_6b4,_6b5){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_6b4,_6b5);
+}
+this.__init__(_6b4,_6b5);
+};
+MochiKit.Visual.Morph.prototype=new MochiKit.Visual.Base();
+MochiKit.Base.update(MochiKit.Visual.Morph.prototype,{__class__:MochiKit.Visual.Morph,__init__:function(_6b7,_6b8){
+this.element=MochiKit.DOM.getElement(_6b7);
+this.start(_6b8);
+},setup:function(){
+var b=MochiKit.Base;
+var _6ba=this.options.style;
+this.styleStart={};
+this.styleEnd={};
+this.units={};
+var _6bb,unit;
+for(var s in _6ba){
+_6bb=_6ba[s];
+s=b.camelize(s);
+if(MochiKit.Visual.CSS_LENGTH.test(_6bb)){
+var _6be=_6bb.match(/^([\+\-]?[0-9\.]+)(.*)$/);
+_6bb=parseFloat(_6be[1]);
+unit=(_6be.length==3)?_6be[2]:null;
+this.styleEnd[s]=_6bb;
+this.units[s]=unit;
+_6bb=MochiKit.Style.getStyle(this.element,s);
+_6be=_6bb.match(/^([\+\-]?[0-9\.]+)(.*)$/);
+_6bb=parseFloat(_6be[1]);
+this.styleStart[s]=_6bb;
+}else{
+if(/[Cc]olor$/.test(s)){
+var c=MochiKit.Color.Color;
+_6bb=c.fromString(_6bb);
+if(_6bb){
+this.units[s]="color";
+this.styleEnd[s]=_6bb.toHexString();
+_6bb=MochiKit.Style.getStyle(this.element,s);
+this.styleStart[s]=c.fromString(_6bb).toHexString();
+this.styleStart[s]=b.map(b.bind(function(i){
+return parseInt(this.styleStart[s].slice(i*2+1,i*2+3),16);
+},this),[0,1,2]);
+this.styleEnd[s]=b.map(b.bind(function(i){
+return parseInt(this.styleEnd[s].slice(i*2+1,i*2+3),16);
+},this),[0,1,2]);
+}
+}else{
+this.element.style[s]=_6bb;
+}
+}
+}
+},update:function(_6c2){
+var _6c3;
+for(var s in this.styleStart){
+if(this.units[s]=="color"){
+var m="#";
+var _6c6=this.styleStart[s];
+var end=this.styleEnd[s];
+MochiKit.Base.map(MochiKit.Base.bind(function(i){
+m+=MochiKit.Color.toColorPart(Math.round(_6c6[i]+(end[i]-_6c6[i])*_6c2));
+},this),[0,1,2]);
+this.element.style[s]=m;
+}else{
+_6c3=this.styleStart[s]+Math.round((this.styleEnd[s]-this.styleStart[s])*_6c2*1000)/1000+this.units[s];
+this.element.style[s]=_6c3;
+}
+}
+}});
+MochiKit.Visual.fade=function(_6c9,_6ca){
+var s=MochiKit.Style;
+var _6cc=s.getStyle(_6c9,"opacity");
+_6ca=MochiKit.Base.update({from:s.getStyle(_6c9,"opacity")||1,to:0,afterFinishInternal:function(_6cd){
+if(_6cd.options.to!==0){
+return;
+}
+s.hideElement(_6cd.element);
+s.setStyle(_6cd.element,{"opacity":_6cc});
+}},_6ca);
+return new MochiKit.Visual.Opacity(_6c9,_6ca);
+};
+MochiKit.Visual.appear=function(_6ce,_6cf){
+var s=MochiKit.Style;
+var v=MochiKit.Visual;
+_6cf=MochiKit.Base.update({from:(s.getStyle(_6ce,"display")=="none"?0:s.getStyle(_6ce,"opacity")||0),to:1,afterFinishInternal:function(_6d2){
+v.forceRerendering(_6d2.element);
+},beforeSetupInternal:function(_6d3){
+s.setStyle(_6d3.element,{"opacity":_6d3.options.from});
+s.showElement(_6d3.element);
+}},_6cf);
+return new v.Opacity(_6ce,_6cf);
+};
+MochiKit.Visual.puff=function(_6d4,_6d5){
+var s=MochiKit.Style;
+var v=MochiKit.Visual;
+_6d4=MochiKit.DOM.getElement(_6d4);
+var _6d8=MochiKit.Style.getElementDimensions(_6d4,true);
+var _6d9={position:s.getStyle(_6d4,"position"),top:_6d4.style.top,left:_6d4.style.left,width:_6d4.style.width,height:_6d4.style.height,opacity:s.getStyle(_6d4,"opacity")};
+_6d5=MochiKit.Base.update({beforeSetupInternal:function(_6da){
+MochiKit.Position.absolutize(_6da.effects[0].element);
+},afterFinishInternal:function(_6db){
+s.hideElement(_6db.effects[0].element);
+s.setStyle(_6db.effects[0].element,_6d9);
+},scaleContent:true,scaleFromCenter:true},_6d5);
+return new v.Parallel([new v.Scale(_6d4,200,{sync:true,scaleFromCenter:_6d5.scaleFromCenter,scaleMode:{originalHeight:_6d8.h,originalWidth:_6d8.w},scaleContent:_6d5.scaleContent,restoreAfterFinish:true}),new v.Opacity(_6d4,{sync:true,to:0})],_6d5);
+};
+MochiKit.Visual.blindUp=function(_6dc,_6dd){
+var d=MochiKit.DOM;
+var s=MochiKit.Style;
+_6dc=d.getElement(_6dc);
+var _6e0=s.getElementDimensions(_6dc,true);
+var _6e1=s.makeClipping(_6dc);
+_6dd=MochiKit.Base.update({scaleContent:false,scaleX:false,scaleMode:{originalHeight:_6e0.h,originalWidth:_6e0.w},restoreAfterFinish:true,afterFinishInternal:function(_6e2){
+s.hideElement(_6e2.element);
+s.undoClipping(_6e2.element,_6e1);
+}},_6dd);
+return new MochiKit.Visual.Scale(_6dc,0,_6dd);
+};
+MochiKit.Visual.blindDown=function(_6e3,_6e4){
+var d=MochiKit.DOM;
+var s=MochiKit.Style;
+_6e3=d.getElement(_6e3);
+var _6e7=s.getElementDimensions(_6e3,true);
+var _6e8;
+_6e4=MochiKit.Base.update({scaleContent:false,scaleX:false,scaleFrom:0,scaleMode:{originalHeight:_6e7.h,originalWidth:_6e7.w},restoreAfterFinish:true,afterSetupInternal:function(_6e9){
+_6e8=s.makeClipping(_6e9.element);
+s.setStyle(_6e9.element,{height:"0px"});
+s.showElement(_6e9.element);
+},afterFinishInternal:function(_6ea){
+s.undoClipping(_6ea.element,_6e8);
+}},_6e4);
+return new MochiKit.Visual.Scale(_6e3,100,_6e4);
+};
+MochiKit.Visual.switchOff=function(_6eb,_6ec){
+var d=MochiKit.DOM;
+var s=MochiKit.Style;
+_6eb=d.getElement(_6eb);
+var _6ef=s.getElementDimensions(_6eb,true);
+var _6f0=s.getStyle(_6eb,"opacity");
+var _6f1;
+_6ec=MochiKit.Base.update({duration:0.7,restoreAfterFinish:true,beforeSetupInternal:function(_6f2){
+s.makePositioned(_6eb);
+_6f1=s.makeClipping(_6eb);
+},afterFinishInternal:function(_6f3){
+s.hideElement(_6eb);
+s.undoClipping(_6eb,_6f1);
+s.undoPositioned(_6eb);
+s.setStyle(_6eb,{"opacity":_6f0});
+}},_6ec);
+var v=MochiKit.Visual;
+return new v.Sequence([new v.appear(_6eb,{sync:true,duration:0.57*_6ec.duration,from:0,transition:v.Transitions.flicker}),new v.Scale(_6eb,1,{sync:true,duration:0.43*_6ec.duration,scaleFromCenter:true,scaleX:false,scaleMode:{originalHeight:_6ef.h,originalWidth:_6ef.w},scaleContent:false,restoreAfterFinish:true})],_6ec);
+};
+MochiKit.Visual.dropOut=function(_6f5,_6f6){
+var d=MochiKit.DOM;
+var s=MochiKit.Style;
+_6f5=d.getElement(_6f5);
+var _6f9={top:s.getStyle(_6f5,"top"),left:s.getStyle(_6f5,"left"),opacity:s.getStyle(_6f5,"opacity")};
+_6f6=MochiKit.Base.update({duration:0.5,distance:100,beforeSetupInternal:function(_6fa){
+s.makePositioned(_6fa.effects[0].element);
+},afterFinishInternal:function(_6fb){
+s.hideElement(_6fb.effects[0].element);
+s.undoPositioned(_6fb.effects[0].element);
+s.setStyle(_6fb.effects[0].element,_6f9);
+}},_6f6);
+var v=MochiKit.Visual;
+return new v.Parallel([new v.Move(_6f5,{x:0,y:_6f6.distance,sync:true}),new v.Opacity(_6f5,{sync:true,to:0})],_6f6);
+};
+MochiKit.Visual.shake=function(_6fd,_6fe){
+var d=MochiKit.DOM;
+var v=MochiKit.Visual;
+var s=MochiKit.Style;
+_6fd=d.getElement(_6fd);
+var _702={top:s.getStyle(_6fd,"top"),left:s.getStyle(_6fd,"left")};
+_6fe=MochiKit.Base.update({duration:0.5,afterFinishInternal:function(_703){
+s.undoPositioned(_6fd);
+s.setStyle(_6fd,_702);
+}},_6fe);
+return new v.Sequence([new v.Move(_6fd,{sync:true,duration:0.1*_6fe.duration,x:20,y:0}),new v.Move(_6fd,{sync:true,duration:0.2*_6fe.duration,x:-40,y:0}),new v.Move(_6fd,{sync:true,duration:0.2*_6fe.duration,x:40,y:0}),new v.Move(_6fd,{sync:true,duration:0.2*_6fe.duration,x:-40,y:0}),new v.Move(_6fd,{sync:true,duration:0.2*_6fe.duration,x:40,y:0}),new v.Move(_6fd,{sync:true,duration:0.1*_6fe.duration,x:-20,y:0})],_6fe);
+};
+MochiKit.Visual.slideDown=function(_704,_705){
+var d=MochiKit.DOM;
+var b=MochiKit.Base;
+var s=MochiKit.Style;
+_704=d.getElement(_704);
+if(!_704.firstChild){
+throw new Error("MochiKit.Visual.slideDown must be used on a element with a child");
+}
+d.removeEmptyTextNodes(_704);
+var _709=s.getStyle(_704.firstChild,"bottom")||0;
+var _70a=s.getElementDimensions(_704,true);
+var _70b;
+_705=b.update({scaleContent:false,scaleX:false,scaleFrom:0,scaleMode:{originalHeight:_70a.h,originalWidth:_70a.w},restoreAfterFinish:true,afterSetupInternal:function(_70c){
+s.makePositioned(_70c.element);
+s.makePositioned(_70c.element.firstChild);
+if(/Opera/.test(navigator.userAgent)){
+s.setStyle(_70c.element,{top:""});
+}
+_70b=s.makeClipping(_70c.element);
+s.setStyle(_70c.element,{height:"0px"});
+s.showElement(_70c.element);
+},afterUpdateInternal:function(_70d){
+var _70e=s.getElementDimensions(_70d.element,true);
+s.setStyle(_70d.element.firstChild,{bottom:(_70d.dims[0]-_70e.h)+"px"});
+},afterFinishInternal:function(_70f){
+s.undoClipping(_70f.element,_70b);
+if(/MSIE/.test(navigator.userAgent)){
+s.undoPositioned(_70f.element);
+s.undoPositioned(_70f.element.firstChild);
+}else{
+s.undoPositioned(_70f.element.firstChild);
+s.undoPositioned(_70f.element);
+}
+s.setStyle(_70f.element.firstChild,{bottom:_709});
+}},_705);
+return new MochiKit.Visual.Scale(_704,100,_705);
+};
+MochiKit.Visual.slideUp=function(_710,_711){
+var d=MochiKit.DOM;
+var b=MochiKit.Base;
+var s=MochiKit.Style;
+_710=d.getElement(_710);
+if(!_710.firstChild){
+throw new Error("MochiKit.Visual.slideUp must be used on a element with a child");
+}
+d.removeEmptyTextNodes(_710);
+var _715=s.getStyle(_710.firstChild,"bottom");
+var _716=s.getElementDimensions(_710,true);
+var _717;
+_711=b.update({scaleContent:false,scaleX:false,scaleMode:{originalHeight:_716.h,originalWidth:_716.w},scaleFrom:100,restoreAfterFinish:true,beforeStartInternal:function(_718){
+s.makePositioned(_718.element);
+s.makePositioned(_718.element.firstChild);
+if(/Opera/.test(navigator.userAgent)){
+s.setStyle(_718.element,{top:""});
+}
+_717=s.makeClipping(_718.element);
+s.showElement(_718.element);
+},afterUpdateInternal:function(_719){
+var _71a=s.getElementDimensions(_719.element,true);
+s.setStyle(_719.element.firstChild,{bottom:(_719.dims[0]-_71a.h)+"px"});
+},afterFinishInternal:function(_71b){
+s.hideElement(_71b.element);
+s.undoClipping(_71b.element,_717);
+s.undoPositioned(_71b.element.firstChild);
+s.undoPositioned(_71b.element);
+s.setStyle(_71b.element.firstChild,{bottom:_715});
+}},_711);
+return new MochiKit.Visual.Scale(_710,0,_711);
+};
+MochiKit.Visual.squish=function(_71c,_71d){
+var d=MochiKit.DOM;
+var b=MochiKit.Base;
+var s=MochiKit.Style;
+var _721=s.getElementDimensions(_71c,true);
+var _722;
+_71d=b.update({restoreAfterFinish:true,scaleMode:{originalHeight:_721.w,originalWidth:_721.h},beforeSetupInternal:function(_723){
+_722=s.makeClipping(_723.element);
+},afterFinishInternal:function(_724){
+s.hideElement(_724.element);
+s.undoClipping(_724.element,_722);
+}},_71d);
+return new MochiKit.Visual.Scale(_71c,/Opera/.test(navigator.userAgent)?1:0,_71d);
+};
+MochiKit.Visual.grow=function(_725,_726){
+var d=MochiKit.DOM;
+var v=MochiKit.Visual;
+var s=MochiKit.Style;
+_725=d.getElement(_725);
+_726=MochiKit.Base.update({direction:"center",moveTransition:v.Transitions.sinoidal,scaleTransition:v.Transitions.sinoidal,opacityTransition:v.Transitions.full,scaleContent:true,scaleFromCenter:false},_726);
+var _72a={top:_725.style.top,left:_725.style.left,height:_725.style.height,width:_725.style.width,opacity:s.getStyle(_725,"opacity")};
+var dims=s.getElementDimensions(_725,true);
+var _72c,_72d;
+var _72e,_72f;
+switch(_726.direction){
+case "top-left":
+_72c=_72d=_72e=_72f=0;
+break;
+case "top-right":
+_72c=dims.w;
+_72d=_72f=0;
+_72e=-dims.w;
+break;
+case "bottom-left":
+_72c=_72e=0;
+_72d=dims.h;
+_72f=-dims.h;
+break;
+case "bottom-right":
+_72c=dims.w;
+_72d=dims.h;
+_72e=-dims.w;
+_72f=-dims.h;
+break;
+case "center":
+_72c=dims.w/2;
+_72d=dims.h/2;
+_72e=-dims.w/2;
+_72f=-dims.h/2;
+break;
+}
+var _730=MochiKit.Base.update({beforeSetupInternal:function(_731){
+s.setStyle(_731.effects[0].element,{height:"0px"});
+s.showElement(_731.effects[0].element);
+},afterFinishInternal:function(_732){
+s.undoClipping(_732.effects[0].element);
+s.undoPositioned(_732.effects[0].element);
+s.setStyle(_732.effects[0].element,_72a);
+}},_726);
+return new v.Move(_725,{x:_72c,y:_72d,duration:0.01,beforeSetupInternal:function(_733){
+s.hideElement(_733.element);
+s.makeClipping(_733.element);
+s.makePositioned(_733.element);
+},afterFinishInternal:function(_734){
+new v.Parallel([new v.Opacity(_734.element,{sync:true,to:1,from:0,transition:_726.opacityTransition}),new v.Move(_734.element,{x:_72e,y:_72f,sync:true,transition:_726.moveTransition}),new v.Scale(_734.element,100,{scaleMode:{originalHeight:dims.h,originalWidth:dims.w},sync:true,scaleFrom:/Opera/.test(navigator.userAgent)?1:0,transition:_726.scaleTransition,scaleContent:_726.scaleContent,scaleFromCenter:_726.scaleFromCenter,restoreAfterFinish:true})],_730);
+}});
+};
+MochiKit.Visual.shrink=function(_735,_736){
+var d=MochiKit.DOM;
+var v=MochiKit.Visual;
+var s=MochiKit.Style;
+_735=d.getElement(_735);
+_736=MochiKit.Base.update({direction:"center",moveTransition:v.Transitions.sinoidal,scaleTransition:v.Transitions.sinoidal,opacityTransition:v.Transitions.none,scaleContent:true,scaleFromCenter:false},_736);
+var _73a={top:_735.style.top,left:_735.style.left,height:_735.style.height,width:_735.style.width,opacity:s.getStyle(_735,"opacity")};
+var dims=s.getElementDimensions(_735,true);
+var _73c,_73d;
+switch(_736.direction){
+case "top-left":
+_73c=_73d=0;
+break;
+case "top-right":
+_73c=dims.w;
+_73d=0;
+break;
+case "bottom-left":
+_73c=0;
+_73d=dims.h;
+break;
+case "bottom-right":
+_73c=dims.w;
+_73d=dims.h;
+break;
+case "center":
+_73c=dims.w/2;
+_73d=dims.h/2;
+break;
+}
+var _73e;
+var _73f=MochiKit.Base.update({beforeStartInternal:function(_740){
+s.makePositioned(_740.effects[0].element);
+_73e=s.makeClipping(_740.effects[0].element);
+},afterFinishInternal:function(_741){
+s.hideElement(_741.effects[0].element);
+s.undoClipping(_741.effects[0].element,_73e);
+s.undoPositioned(_741.effects[0].element);
+s.setStyle(_741.effects[0].element,_73a);
+}},_736);
+return new v.Parallel([new v.Opacity(_735,{sync:true,to:0,from:1,transition:_736.opacityTransition}),new v.Scale(_735,/Opera/.test(navigator.userAgent)?1:0,{scaleMode:{originalHeight:dims.h,originalWidth:dims.w},sync:true,transition:_736.scaleTransition,scaleContent:_736.scaleContent,scaleFromCenter:_736.scaleFromCenter,restoreAfterFinish:true}),new v.Move(_735,{x:_73c,y:_73d,sync:true,transition:_736.moveTransition})],_73f);
+};
+MochiKit.Visual.pulsate=function(_742,_743){
+var d=MochiKit.DOM;
+var v=MochiKit.Visual;
+var b=MochiKit.Base;
+var _747=MochiKit.Style.getStyle(_742,"opacity");
+_743=b.update({duration:3,from:0,afterFinishInternal:function(_748){
+MochiKit.Style.setStyle(_748.element,{"opacity":_747});
+}},_743);
+var _749=_743.transition||v.Transitions.sinoidal;
+_743.transition=function(pos){
+return _749(1-v.Transitions.pulse(pos,_743.pulses));
+};
+return new v.Opacity(_742,_743);
+};
+MochiKit.Visual.fold=function(_74b,_74c){
+var d=MochiKit.DOM;
+var v=MochiKit.Visual;
+var s=MochiKit.Style;
+_74b=d.getElement(_74b);
+var _750=s.getElementDimensions(_74b,true);
+var _751={top:_74b.style.top,left:_74b.style.left,width:_74b.style.width,height:_74b.style.height};
+var _752=s.makeClipping(_74b);
+_74c=MochiKit.Base.update({scaleContent:false,scaleX:false,scaleMode:{originalHeight:_750.h,originalWidth:_750.w},afterFinishInternal:function(_753){
+new v.Scale(_74b,1,{scaleContent:false,scaleY:false,scaleMode:{originalHeight:_750.h,originalWidth:_750.w},afterFinishInternal:function(_754){
+s.hideElement(_754.element);
+s.undoClipping(_754.element,_752);
+s.setStyle(_754.element,_751);
+}});
+}},_74c);
+return new v.Scale(_74b,5,_74c);
+};
+MochiKit.Visual.Color=MochiKit.Color.Color;
+MochiKit.Visual.getElementsComputedStyle=MochiKit.DOM.computedStyle;
+MochiKit.Visual.__new__=function(){
+var m=MochiKit.Base;
+m.nameFunctions(this);
+this.EXPORT_TAGS={":common":this.EXPORT,":all":m.concat(this.EXPORT,this.EXPORT_OK)};
+};
+MochiKit.Visual.EXPORT=["roundElement","roundClass","tagifyText","multiple","toggle","Parallel","Sequence","Opacity","Move","Scale","Highlight","ScrollTo","Morph","fade","appear","puff","blindUp","blindDown","switchOff","dropOut","shake","slideDown","slideUp","squish","grow","shrink","pulsate","fold"];
+MochiKit.Visual.EXPORT_OK=["Base","PAIRS"];
+MochiKit.Visual.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Visual);
+MochiKit.Base._deps("DragAndDrop",["Base","Iter","DOM","Signal","Visual","Position"]);
+MochiKit.DragAndDrop.NAME="MochiKit.DragAndDrop";
+MochiKit.DragAndDrop.VERSION="1.4.2";
+MochiKit.DragAndDrop.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.DragAndDrop.toString=function(){
+return this.__repr__();
+};
+MochiKit.DragAndDrop.EXPORT=["Droppable","Draggable"];
+MochiKit.DragAndDrop.EXPORT_OK=["Droppables","Draggables"];
+MochiKit.DragAndDrop.Droppables={drops:[],remove:function(_756){
+this.drops=MochiKit.Base.filter(function(d){
+return d.element!=MochiKit.DOM.getElement(_756);
+},this.drops);
+},register:function(drop){
+this.drops.push(drop);
+},unregister:function(drop){
+this.drops=MochiKit.Base.filter(function(d){
+return d!=drop;
+},this.drops);
+},prepare:function(_75b){
+MochiKit.Base.map(function(drop){
+if(drop.isAccepted(_75b)){
+if(drop.options.activeclass){
+MochiKit.DOM.addElementClass(drop.element,drop.options.activeclass);
+}
+drop.options.onactive(drop.element,_75b);
+}
+},this.drops);
+},findDeepestChild:function(_75d){
+deepest=_75d[0];
+for(i=1;i<_75d.length;++i){
+if(MochiKit.DOM.isChildNode(_75d[i].element,deepest.element)){
+deepest=_75d[i];
+}
+}
+return deepest;
+},show:function(_75e,_75f){
+if(!this.drops.length){
+return;
+}
+var _760=[];
+if(this.last_active){
+this.last_active.deactivate();
+}
+MochiKit.Iter.forEach(this.drops,function(drop){
+if(drop.isAffected(_75e,_75f)){
+_760.push(drop);
+}
+});
+if(_760.length>0){
+drop=this.findDeepestChild(_760);
+MochiKit.Position.within(drop.element,_75e.page.x,_75e.page.y);
+drop.options.onhover(_75f,drop.element,MochiKit.Position.overlap(drop.options.overlap,drop.element));
+drop.activate();
+}
+},fire:function(_762,_763){
+if(!this.last_active){
+return;
+}
+MochiKit.Position.prepare();
+if(this.last_active.isAffected(_762.mouse(),_763)){
+this.last_active.options.ondrop(_763,this.last_active.element,_762);
+}
+},reset:function(_764){
+MochiKit.Base.map(function(drop){
+if(drop.options.activeclass){
+MochiKit.DOM.removeElementClass(drop.element,drop.options.activeclass);
+}
+drop.options.ondesactive(drop.element,_764);
+},this.drops);
+if(this.last_active){
+this.last_active.deactivate();
+}
+}};
+MochiKit.DragAndDrop.Droppable=function(_766,_767){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_766,_767);
+}
+this.__init__(_766,_767);
+};
+MochiKit.DragAndDrop.Droppable.prototype={__class__:MochiKit.DragAndDrop.Droppable,__init__:function(_769,_76a){
+var d=MochiKit.DOM;
+var b=MochiKit.Base;
+this.element=d.getElement(_769);
+this.options=b.update({greedy:true,hoverclass:null,activeclass:null,hoverfunc:b.noop,accept:null,onactive:b.noop,ondesactive:b.noop,onhover:b.noop,ondrop:b.noop,containment:[],tree:false},_76a);
+this.options._containers=[];
+b.map(MochiKit.Base.bind(function(c){
+this.options._containers.push(d.getElement(c));
+},this),this.options.containment);
+MochiKit.Style.makePositioned(this.element);
+MochiKit.DragAndDrop.Droppables.register(this);
+},isContained:function(_76e){
+if(this.options._containers.length){
+var _76f;
+if(this.options.tree){
+_76f=_76e.treeNode;
+}else{
+_76f=_76e.parentNode;
+}
+return MochiKit.Iter.some(this.options._containers,function(c){
+return _76f==c;
+});
+}else{
+return true;
+}
+},isAccepted:function(_771){
+return ((!this.options.accept)||MochiKit.Iter.some(this.options.accept,function(c){
+return MochiKit.DOM.hasElementClass(_771,c);
+}));
+},isAffected:function(_773,_774){
+return ((this.element!=_774)&&this.isContained(_774)&&this.isAccepted(_774)&&MochiKit.Position.within(this.element,_773.page.x,_773.page.y));
+},deactivate:function(){
+if(this.options.hoverclass){
+MochiKit.DOM.removeElementClass(this.element,this.options.hoverclass);
+}
+this.options.hoverfunc(this.element,false);
+MochiKit.DragAndDrop.Droppables.last_active=null;
+},activate:function(){
+if(this.options.hoverclass){
+MochiKit.DOM.addElementClass(this.element,this.options.hoverclass);
+}
+this.options.hoverfunc(this.element,true);
+MochiKit.DragAndDrop.Droppables.last_active=this;
+},destroy:function(){
+MochiKit.DragAndDrop.Droppables.unregister(this);
+},repr:function(){
+return "["+this.__class__.NAME+", options:"+MochiKit.Base.repr(this.options)+"]";
+}};
+MochiKit.DragAndDrop.Draggables={drags:[],register:function(_775){
+if(this.drags.length===0){
+var conn=MochiKit.Signal.connect;
+this.eventMouseUp=conn(document,"onmouseup",this,this.endDrag);
+this.eventMouseMove=conn(document,"onmousemove",this,this.updateDrag);
+this.eventKeypress=conn(document,"onkeypress",this,this.keyPress);
+}
+this.drags.push(_775);
+},unregister:function(_777){
+this.drags=MochiKit.Base.filter(function(d){
+return d!=_777;
+},this.drags);
+if(this.drags.length===0){
+var disc=MochiKit.Signal.disconnect;
+disc(this.eventMouseUp);
+disc(this.eventMouseMove);
+disc(this.eventKeypress);
+}
+},activate:function(_77a){
+window.focus();
+this.activeDraggable=_77a;
+},deactivate:function(){
+this.activeDraggable=null;
+},updateDrag:function(_77b){
+if(!this.activeDraggable){
+return;
+}
+var _77c=_77b.mouse();
+if(this._lastPointer&&(MochiKit.Base.repr(this._lastPointer.page)==MochiKit.Base.repr(_77c.page))){
+return;
+}
+this._lastPointer=_77c;
+this.activeDraggable.updateDrag(_77b,_77c);
+},endDrag:function(_77d){
+if(!this.activeDraggable){
+return;
+}
+this._lastPointer=null;
+this.activeDraggable.endDrag(_77d);
+this.activeDraggable=null;
+},keyPress:function(_77e){
+if(this.activeDraggable){
+this.activeDraggable.keyPress(_77e);
+}
+},notify:function(_77f,_780,_781){
+MochiKit.Signal.signal(this,_77f,_780,_781);
+}};
+MochiKit.DragAndDrop.Draggable=function(_782,_783){
+var cls=arguments.callee;
+if(!(this instanceof cls)){
+return new cls(_782,_783);
+}
+this.__init__(_782,_783);
+};
+MochiKit.DragAndDrop.Draggable.prototype={__class__:MochiKit.DragAndDrop.Draggable,__init__:function(_785,_786){
+var v=MochiKit.Visual;
+var b=MochiKit.Base;
+_786=b.update({handle:false,starteffect:function(_789){
+this._savedOpacity=MochiKit.Style.getStyle(_789,"opacity")||1;
+new v.Opacity(_789,{duration:0.2,from:this._savedOpacity,to:0.7});
+},reverteffect:function(_78a,_78b,_78c){
+var dur=Math.sqrt(Math.abs(_78b^2)+Math.abs(_78c^2))*0.02;
+return new v.Move(_78a,{x:-_78c,y:-_78b,duration:dur});
+},endeffect:function(_78e){
+new v.Opacity(_78e,{duration:0.2,from:0.7,to:this._savedOpacity});
+},onchange:b.noop,zindex:1000,revert:false,scroll:false,scrollSensitivity:20,scrollSpeed:15,snap:false},_786);
+var d=MochiKit.DOM;
+this.element=d.getElement(_785);
+if(_786.handle&&(typeof (_786.handle)=="string")){
+this.handle=d.getFirstElementByTagAndClassName(null,_786.handle,this.element);
+}
+if(!this.handle){
+this.handle=d.getElement(_786.handle);
+}
+if(!this.handle){
+this.handle=this.element;
+}
+if(_786.scroll&&!_786.scroll.scrollTo&&!_786.scroll.outerHTML){
+_786.scroll=d.getElement(_786.scroll);
+this._isScrollChild=MochiKit.DOM.isChildNode(this.element,_786.scroll);
+}
+MochiKit.Style.makePositioned(this.element);
+this.delta=this.currentDelta();
+this.options=_786;
+this.dragging=false;
+this.eventMouseDown=MochiKit.Signal.connect(this.handle,"onmousedown",this,this.initDrag);
+MochiKit.DragAndDrop.Draggables.register(this);
+},destroy:function(){
+MochiKit.Signal.disconnect(this.eventMouseDown);
+MochiKit.DragAndDrop.Draggables.unregister(this);
+},currentDelta:function(){
+var s=MochiKit.Style.getStyle;
+return [parseInt(s(this.element,"left")||"0"),parseInt(s(this.element,"top")||"0")];
+},initDrag:function(_791){
+if(!_791.mouse().button.left){
+return;
+}
+var src=_791.target();
+var _793=(src.tagName||"").toUpperCase();
+if(_793==="INPUT"||_793==="SELECT"||_793==="OPTION"||_793==="BUTTON"||_793==="TEXTAREA"){
+return;
+}
+if(this._revert){
+this._revert.cancel();
+this._revert=null;
+}
+var _794=_791.mouse();
+var pos=MochiKit.Position.cumulativeOffset(this.element);
+this.offset=[_794.page.x-pos.x,_794.page.y-pos.y];
+MochiKit.DragAndDrop.Draggables.activate(this);
+_791.stop();
+},startDrag:function(_796){
+this.dragging=true;
+if(this.options.selectclass){
+MochiKit.DOM.addElementClass(this.element,this.options.selectclass);
+}
+if(this.options.zindex){
+this.originalZ=parseInt(MochiKit.Style.getStyle(this.element,"z-index")||"0");
+this.element.style.zIndex=this.options.zindex;
+}
+if(this.options.ghosting){
+this._clone=this.element.cloneNode(true);
+this.ghostPosition=MochiKit.Position.absolutize(this.element);
+this.element.parentNode.insertBefore(this._clone,this.element);
+}
+if(this.options.scroll){
+if(this.options.scroll==window){
+var _797=this._getWindowScroll(this.options.scroll);
+this.originalScrollLeft=_797.left;
+this.originalScrollTop=_797.top;
+}else{
+this.originalScrollLeft=this.options.scroll.scrollLeft;
+this.originalScrollTop=this.options.scroll.scrollTop;
+}
+}
+MochiKit.DragAndDrop.Droppables.prepare(this.element);
+MochiKit.DragAndDrop.Draggables.notify("start",this,_796);
+if(this.options.starteffect){
+this.options.starteffect(this.element);
+}
+},updateDrag:function(_798,_799){
+if(!this.dragging){
+this.startDrag(_798);
+}
+MochiKit.Position.prepare();
+MochiKit.DragAndDrop.Droppables.show(_799,this.element);
+MochiKit.DragAndDrop.Draggables.notify("drag",this,_798);
+this.draw(_799);
+this.options.onchange(this);
+if(this.options.scroll){
+this.stopScrolling();
+var p,q;
+if(this.options.scroll==window){
+var s=this._getWindowScroll(this.options.scroll);
+p=new MochiKit.Style.Coordinates(s.left,s.top);
+q=new MochiKit.Style.Coordinates(s.left+s.width,s.top+s.height);
+}else{
+p=MochiKit.Position.page(this.options.scroll);
+p.x+=this.options.scroll.scrollLeft;
+p.y+=this.options.scroll.scrollTop;
+p.x+=(window.pageXOffset||document.documentElement.scrollLeft||document.body.scrollLeft||0);
+p.y+=(window.pageYOffset||document.documentElement.scrollTop||document.body.scrollTop||0);
+q=new MochiKit.Style.Coordinates(p.x+this.options.scroll.offsetWidth,p.y+this.options.scroll.offsetHeight);
+}
+var _79d=[0,0];
+if(_799.page.x>(q.x-this.options.scrollSensitivity)){
+_79d[0]=_799.page.x-(q.x-this.options.scrollSensitivity);
+}else{
+if(_799.page.x<(p.x+this.options.scrollSensitivity)){
+_79d[0]=_799.page.x-(p.x+this.options.scrollSensitivity);
+}
+}
+if(_799.page.y>(q.y-this.options.scrollSensitivity)){
+_79d[1]=_799.page.y-(q.y-this.options.scrollSensitivity);
+}else{
+if(_799.page.y<(p.y+this.options.scrollSensitivity)){
+_79d[1]=_799.page.y-(p.y+this.options.scrollSensitivity);
+}
+}
+this.startScrolling(_79d);
+}
+if(/AppleWebKit/.test(navigator.appVersion)){
+window.scrollBy(0,0);
+}
+_798.stop();
+},finishDrag:function(_79e,_79f){
+var dr=MochiKit.DragAndDrop;
+this.dragging=false;
+if(this.options.selectclass){
+MochiKit.DOM.removeElementClass(this.element,this.options.selectclass);
+}
+if(this.options.ghosting){
+MochiKit.Position.relativize(this.element,this.ghostPosition);
+MochiKit.DOM.removeElement(this._clone);
+this._clone=null;
+}
+if(_79f){
+dr.Droppables.fire(_79e,this.element);
+}
+dr.Draggables.notify("end",this,_79e);
+var _7a1=this.options.revert;
+if(_7a1&&typeof (_7a1)=="function"){
+_7a1=_7a1(this.element);
+}
+var d=this.currentDelta();
+if(_7a1&&this.options.reverteffect){
+this._revert=this.options.reverteffect(this.element,d[1]-this.delta[1],d[0]-this.delta[0]);
+}else{
+this.delta=d;
+}
+if(this.options.zindex){
+this.element.style.zIndex=this.originalZ;
+}
+if(this.options.endeffect){
+this.options.endeffect(this.element);
+}
+dr.Draggables.deactivate();
+dr.Droppables.reset(this.element);
+},keyPress:function(_7a3){
+if(_7a3.key().string!="KEY_ESCAPE"){
+return;
+}
+this.finishDrag(_7a3,false);
+_7a3.stop();
+},endDrag:function(_7a4){
+if(!this.dragging){
+return;
+}
+this.stopScrolling();
+this.finishDrag(_7a4,true);
+_7a4.stop();
+},draw:function(_7a5){
+var pos=MochiKit.Position.cumulativeOffset(this.element);
+var d=this.currentDelta();
+pos.x-=d[0];
+pos.y-=d[1];
+if(this.options.scroll&&(this.options.scroll!=window&&this._isScrollChild)){
+pos.x-=this.options.scroll.scrollLeft-this.originalScrollLeft;
+pos.y-=this.options.scroll.scrollTop-this.originalScrollTop;
+}
+var p=[_7a5.page.x-pos.x-this.offset[0],_7a5.page.y-pos.y-this.offset[1]];
+if(this.options.snap){
+if(typeof (this.options.snap)=="function"){
+p=this.options.snap(p[0],p[1]);
+}else{
+if(this.options.snap instanceof Array){
+var i=-1;
+p=MochiKit.Base.map(MochiKit.Base.bind(function(v){
+i+=1;
+return Math.round(v/this.options.snap[i])*this.options.snap[i];
+},this),p);
+}else{
+p=MochiKit.Base.map(MochiKit.Base.bind(function(v){
+return Math.round(v/this.options.snap)*this.options.snap;
+},this),p);
+}
+}
+}
+var _7ac=this.element.style;
+if((!this.options.constraint)||(this.options.constraint=="horizontal")){
+_7ac.left=p[0]+"px";
+}
+if((!this.options.constraint)||(this.options.constraint=="vertical")){
+_7ac.top=p[1]+"px";
+}
+if(_7ac.visibility=="hidden"){
+_7ac.visibility="";
+}
+},stopScrolling:function(){
+if(this.scrollInterval){
+clearInterval(this.scrollInterval);
+this.scrollInterval=null;
+MochiKit.DragAndDrop.Draggables._lastScrollPointer=null;
+}
+},startScrolling:function(_7ad){
+if(!_7ad[0]&&!_7ad[1]){
+return;
+}
+this.scrollSpeed=[_7ad[0]*this.options.scrollSpeed,_7ad[1]*this.options.scrollSpeed];
+this.lastScrolled=new Date();
+this.scrollInterval=setInterval(MochiKit.Base.bind(this.scroll,this),10);
+},scroll:function(){
+var _7ae=new Date();
+var _7af=_7ae-this.lastScrolled;
+this.lastScrolled=_7ae;
+if(this.options.scroll==window){
+var s=this._getWindowScroll(this.options.scroll);
+if(this.scrollSpeed[0]||this.scrollSpeed[1]){
+var dm=_7af/1000;
+this.options.scroll.scrollTo(s.left+dm*this.scrollSpeed[0],s.top+dm*this.scrollSpeed[1]);
+}
+}else{
+this.options.scroll.scrollLeft+=this.scrollSpeed[0]*_7af/1000;
+this.options.scroll.scrollTop+=this.scrollSpeed[1]*_7af/1000;
+}
+var d=MochiKit.DragAndDrop;
+MochiKit.Position.prepare();
+d.Droppables.show(d.Draggables._lastPointer,this.element);
+d.Draggables.notify("drag",this);
+if(this._isScrollChild){
+d.Draggables._lastScrollPointer=d.Draggables._lastScrollPointer||d.Draggables._lastPointer;
+d.Draggables._lastScrollPointer.x+=this.scrollSpeed[0]*_7af/1000;
+d.Draggables._lastScrollPointer.y+=this.scrollSpeed[1]*_7af/1000;
+if(d.Draggables._lastScrollPointer.x<0){
+d.Draggables._lastScrollPointer.x=0;
+}
+if(d.Draggables._lastScrollPointer.y<0){
+d.Draggables._lastScrollPointer.y=0;
+}
+this.draw(d.Draggables._lastScrollPointer);
+}
+this.options.onchange(this);
+},_getWindowScroll:function(win){
+var vp,w,h;
+MochiKit.DOM.withWindow(win,function(){
+vp=MochiKit.Style.getViewportPosition(win.document);
+});
+if(win.innerWidth){
+w=win.innerWidth;
+h=win.innerHeight;
+}else{
+if(win.document.documentElement&&win.document.documentElement.clientWidth){
+w=win.document.documentElement.clientWidth;
+h=win.document.documentElement.clientHeight;
+}else{
+w=win.document.body.offsetWidth;
+h=win.document.body.offsetHeight;
+}
+}
+return {top:vp.y,left:vp.x,width:w,height:h};
+},repr:function(){
+return "["+this.__class__.NAME+", options:"+MochiKit.Base.repr(this.options)+"]";
+}};
+MochiKit.DragAndDrop.__new__=function(){
+MochiKit.Base.nameFunctions(this);
+this.EXPORT_TAGS={":common":this.EXPORT,":all":MochiKit.Base.concat(this.EXPORT,this.EXPORT_OK)};
+};
+MochiKit.DragAndDrop.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.DragAndDrop);
+MochiKit.Base._deps("Sortable",["Base","Iter","DOM","Position","DragAndDrop"]);
+MochiKit.Sortable.NAME="MochiKit.Sortable";
+MochiKit.Sortable.VERSION="1.4.2";
+MochiKit.Sortable.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.Sortable.toString=function(){
+return this.__repr__();
+};
+MochiKit.Sortable.EXPORT=[];
+MochiKit.Sortable.EXPORT_OK=[];
+MochiKit.Base.update(MochiKit.Sortable,{sortables:{},_findRootElement:function(_7b7){
+while(_7b7.tagName.toUpperCase()!="BODY"){
+if(_7b7.id&&MochiKit.Sortable.sortables[_7b7.id]){
+return _7b7;
+}
+_7b7=_7b7.parentNode;
+}
+},_createElementId:function(_7b8){
+if(_7b8.id==null||_7b8.id==""){
+var d=MochiKit.DOM;
+var id;
+var _7bb=1;
+while(d.getElement(id="sortable"+_7bb)!=null){
+_7bb+=1;
+}
+d.setNodeAttribute(_7b8,"id",id);
+}
+},options:function(_7bc){
+_7bc=MochiKit.Sortable._findRootElement(MochiKit.DOM.getElement(_7bc));
+if(!_7bc){
+return;
+}
+return MochiKit.Sortable.sortables[_7bc.id];
+},destroy:function(_7bd){
+var s=MochiKit.Sortable.options(_7bd);
+var b=MochiKit.Base;
+var d=MochiKit.DragAndDrop;
+if(s){
+MochiKit.Signal.disconnect(s.startHandle);
+MochiKit.Signal.disconnect(s.endHandle);
+b.map(function(dr){
+d.Droppables.remove(dr);
+},s.droppables);
+b.map(function(dr){
+dr.destroy();
+},s.draggables);
+delete MochiKit.Sortable.sortables[s.element.id];
+}
+},create:function(_7c3,_7c4){
+_7c3=MochiKit.DOM.getElement(_7c3);
+var self=MochiKit.Sortable;
+self._createElementId(_7c3);
+_7c4=MochiKit.Base.update({element:_7c3,tag:"li",dropOnEmpty:false,tree:false,treeTag:"ul",overlap:"vertical",constraint:"vertical",containment:[_7c3],handle:false,only:false,hoverclass:null,ghosting:false,scroll:false,scrollSensitivity:20,scrollSpeed:15,format:/^[^_]*_(.*)$/,onChange:MochiKit.Base.noop,onUpdate:MochiKit.Base.noop,accept:null},_7c4);
+self.destroy(_7c3);
+var _7c6={revert:true,ghosting:_7c4.ghosting,scroll:_7c4.scroll,scrollSensitivity:_7c4.scrollSensitivity,scrollSpeed:_7c4.scrollSpeed,constraint:_7c4.constraint,handle:_7c4.handle};
+if(_7c4.starteffect){
+_7c6.starteffect=_7c4.starteffect;
+}
+if(_7c4.reverteffect){
+_7c6.reverteffect=_7c4.reverteffect;
+}else{
+if(_7c4.ghosting){
+_7c6.reverteffect=function(_7c7){
+_7c7.style.top=0;
+_7c7.style.left=0;
+};
+}
+}
+if(_7c4.endeffect){
+_7c6.endeffect=_7c4.endeffect;
+}
+if(_7c4.zindex){
+_7c6.zindex=_7c4.zindex;
+}
+var _7c8={overlap:_7c4.overlap,containment:_7c4.containment,hoverclass:_7c4.hoverclass,onhover:self.onHover,tree:_7c4.tree,accept:_7c4.accept};
+var _7c9={onhover:self.onEmptyHover,overlap:_7c4.overlap,containment:_7c4.containment,hoverclass:_7c4.hoverclass,accept:_7c4.accept};
+MochiKit.DOM.removeEmptyTextNodes(_7c3);
+_7c4.draggables=[];
+_7c4.droppables=[];
+if(_7c4.dropOnEmpty||_7c4.tree){
+new MochiKit.DragAndDrop.Droppable(_7c3,_7c9);
+_7c4.droppables.push(_7c3);
+}
+MochiKit.Base.map(function(e){
+var _7cb=_7c4.handle?MochiKit.DOM.getFirstElementByTagAndClassName(null,_7c4.handle,e):e;
+_7c4.draggables.push(new MochiKit.DragAndDrop.Draggable(e,MochiKit.Base.update(_7c6,{handle:_7cb})));
+new MochiKit.DragAndDrop.Droppable(e,_7c8);
+if(_7c4.tree){
+e.treeNode=_7c3;
+}
+_7c4.droppables.push(e);
+},(self.findElements(_7c3,_7c4)||[]));
+if(_7c4.tree){
+MochiKit.Base.map(function(e){
+new MochiKit.DragAndDrop.Droppable(e,_7c9);
+e.treeNode=_7c3;
+_7c4.droppables.push(e);
+},(self.findTreeElements(_7c3,_7c4)||[]));
+}
+self.sortables[_7c3.id]=_7c4;
+_7c4.lastValue=self.serialize(_7c3);
+_7c4.startHandle=MochiKit.Signal.connect(MochiKit.DragAndDrop.Draggables,"start",MochiKit.Base.partial(self.onStart,_7c3));
+_7c4.endHandle=MochiKit.Signal.connect(MochiKit.DragAndDrop.Draggables,"end",MochiKit.Base.partial(self.onEnd,_7c3));
+},onStart:function(_7cd,_7ce){
+var self=MochiKit.Sortable;
+var _7d0=self.options(_7cd);
+_7d0.lastValue=self.serialize(_7d0.element);
+},onEnd:function(_7d1,_7d2){
+var self=MochiKit.Sortable;
+self.unmark();
+var _7d4=self.options(_7d1);
+if(_7d4.lastValue!=self.serialize(_7d4.element)){
+_7d4.onUpdate(_7d4.element);
+}
+},findElements:function(_7d5,_7d6){
+return MochiKit.Sortable.findChildren(_7d5,_7d6.only,_7d6.tree,_7d6.tag);
+},findTreeElements:function(_7d7,_7d8){
+return MochiKit.Sortable.findChildren(_7d7,_7d8.only,_7d8.tree?true:false,_7d8.treeTag);
+},findChildren:function(_7d9,only,_7db,_7dc){
+if(!_7d9.hasChildNodes()){
+return null;
+}
+_7dc=_7dc.toUpperCase();
+if(only){
+only=MochiKit.Base.flattenArray([only]);
+}
+var _7dd=[];
+MochiKit.Base.map(function(e){
+if(e.tagName&&e.tagName.toUpperCase()==_7dc&&(!only||MochiKit.Iter.some(only,function(c){
+return MochiKit.DOM.hasElementClass(e,c);
+}))){
+_7dd.push(e);
+}
+if(_7db){
+var _7e0=MochiKit.Sortable.findChildren(e,only,_7db,_7dc);
+if(_7e0&&_7e0.length>0){
+_7dd=_7dd.concat(_7e0);
+}
+}
+},_7d9.childNodes);
+return _7dd;
+},onHover:function(_7e1,_7e2,_7e3){
+if(MochiKit.DOM.isChildNode(_7e2,_7e1)){
+return;
+}
+var self=MochiKit.Sortable;
+if(_7e3>0.33&&_7e3<0.66&&self.options(_7e2).tree){
+return;
+}else{
+if(_7e3>0.5){
+self.mark(_7e2,"before");
+if(_7e2.previousSibling!=_7e1){
+var _7e5=_7e1.parentNode;
+_7e1.style.visibility="hidden";
+_7e2.parentNode.insertBefore(_7e1,_7e2);
+if(_7e2.parentNode!=_7e5){
+self.options(_7e5).onChange(_7e1);
+}
+self.options(_7e2.parentNode).onChange(_7e1);
+}
+}else{
+self.mark(_7e2,"after");
+var _7e6=_7e2.nextSibling||null;
+if(_7e6!=_7e1){
+var _7e5=_7e1.parentNode;
+_7e1.style.visibility="hidden";
+_7e2.parentNode.insertBefore(_7e1,_7e6);
+if(_7e2.parentNode!=_7e5){
+self.options(_7e5).onChange(_7e1);
+}
+self.options(_7e2.parentNode).onChange(_7e1);
+}
+}
+}
+},_offsetSize:function(_7e7,type){
+if(type=="vertical"||type=="height"){
+return _7e7.offsetHeight;
+}else{
+return _7e7.offsetWidth;
+}
+},onEmptyHover:function(_7e9,_7ea,_7eb){
+var _7ec=_7e9.parentNode;
+var self=MochiKit.Sortable;
+var _7ee=self.options(_7ea);
+if(!MochiKit.DOM.isChildNode(_7ea,_7e9)){
+var _7ef;
+var _7f0=self.findElements(_7ea,{tag:_7ee.tag,only:_7ee.only});
+var _7f1=null;
+if(_7f0){
+var _7f2=self._offsetSize(_7ea,_7ee.overlap)*(1-_7eb);
+for(_7ef=0;_7ef<_7f0.length;_7ef+=1){
+if(_7f2-self._offsetSize(_7f0[_7ef],_7ee.overlap)>=0){
+_7f2-=self._offsetSize(_7f0[_7ef],_7ee.overlap);
+}else{
+if(_7f2-(self._offsetSize(_7f0[_7ef],_7ee.overlap)/2)>=0){
+_7f1=_7ef+1<_7f0.length?_7f0[_7ef+1]:null;
+break;
+}else{
+_7f1=_7f0[_7ef];
+break;
+}
+}
+}
+}
+_7ea.insertBefore(_7e9,_7f1);
+self.options(_7ec).onChange(_7e9);
+_7ee.onChange(_7e9);
+}
+},unmark:function(){
+var m=MochiKit.Sortable._marker;
+if(m){
+MochiKit.Style.hideElement(m);
+}
+},mark:function(_7f4,_7f5){
+var d=MochiKit.DOM;
+var self=MochiKit.Sortable;
+var _7f8=self.options(_7f4.parentNode);
+if(_7f8&&!_7f8.ghosting){
+return;
+}
+if(!self._marker){
+self._marker=d.getElement("dropmarker")||document.createElement("DIV");
+MochiKit.Style.hideElement(self._marker);
+d.addElementClass(self._marker,"dropmarker");
+self._marker.style.position="absolute";
+document.getElementsByTagName("body").item(0).appendChild(self._marker);
+}
+var _7f9=MochiKit.Position.cumulativeOffset(_7f4);
+self._marker.style.left=_7f9.x+"px";
+self._marker.style.top=_7f9.y+"px";
+if(_7f5=="after"){
+if(_7f8.overlap=="horizontal"){
+self._marker.style.left=(_7f9.x+_7f4.clientWidth)+"px";
+}else{
+self._marker.style.top=(_7f9.y+_7f4.clientHeight)+"px";
+}
+}
+MochiKit.Style.showElement(self._marker);
+},_tree:function(_7fa,_7fb,_7fc){
+var self=MochiKit.Sortable;
+var _7fe=self.findElements(_7fa,_7fb)||[];
+for(var i=0;i<_7fe.length;++i){
+var _800=_7fe[i].id.match(_7fb.format);
+if(!_800){
+continue;
+}
+var _801={id:encodeURIComponent(_800?_800[1]:null),element:_7fa,parent:_7fc,children:[],position:_7fc.children.length,container:self._findChildrenElement(_7fe[i],_7fb.treeTag.toUpperCase())};
+if(_801.container){
+self._tree(_801.container,_7fb,_801);
+}
+_7fc.children.push(_801);
+}
+return _7fc;
+},_findChildrenElement:function(_802,_803){
+if(_802&&_802.hasChildNodes){
+_803=_803.toUpperCase();
+for(var i=0;i<_802.childNodes.length;++i){
+if(_802.childNodes[i].tagName.toUpperCase()==_803){
+return _802.childNodes[i];
+}
+}
+}
+return null;
+},tree:function(_805,_806){
+_805=MochiKit.DOM.getElement(_805);
+var _807=MochiKit.Sortable.options(_805);
+_806=MochiKit.Base.update({tag:_807.tag,treeTag:_807.treeTag,only:_807.only,name:_805.id,format:_807.format},_806||{});
+var root={id:null,parent:null,children:new Array,container:_805,position:0};
+return MochiKit.Sortable._tree(_805,_806,root);
+},setSequence:function(_809,_80a,_80b){
+var self=MochiKit.Sortable;
+var b=MochiKit.Base;
+_809=MochiKit.DOM.getElement(_809);
+_80b=b.update(self.options(_809),_80b||{});
+var _80e={};
+b.map(function(n){
+var m=n.id.match(_80b.format);
+if(m){
+_80e[m[1]]=[n,n.parentNode];
+}
+n.parentNode.removeChild(n);
+},self.findElements(_809,_80b));
+b.map(function(_811){
+var n=_80e[_811];
+if(n){
+n[1].appendChild(n[0]);
+delete _80e[_811];
+}
+},_80a);
+},_constructIndex:function(node){
+var _814="";
+do{
+if(node.id){
+_814="["+node.position+"]"+_814;
+}
+}while((node=node.parent)!=null);
+return _814;
+},sequence:function(_815,_816){
+_815=MochiKit.DOM.getElement(_815);
+var self=MochiKit.Sortable;
+var _816=MochiKit.Base.update(self.options(_815),_816||{});
+return MochiKit.Base.map(function(item){
+return item.id.match(_816.format)?item.id.match(_816.format)[1]:"";
+},MochiKit.DOM.getElement(self.findElements(_815,_816)||[]));
+},serialize:function(_819,_81a){
+_819=MochiKit.DOM.getElement(_819);
+var self=MochiKit.Sortable;
+_81a=MochiKit.Base.update(self.options(_819),_81a||{});
+var name=encodeURIComponent(_81a.name||_819.id);
+if(_81a.tree){
+return MochiKit.Base.flattenArray(MochiKit.Base.map(function(item){
+return [name+self._constructIndex(item)+"[id]="+encodeURIComponent(item.id)].concat(item.children.map(arguments.callee));
+},self.tree(_819,_81a).children)).join("&");
+}else{
+return MochiKit.Base.map(function(item){
+return name+"[]="+encodeURIComponent(item);
+},self.sequence(_819,_81a)).join("&");
+}
+}});
+MochiKit.Sortable.Sortable=MochiKit.Sortable;
+MochiKit.Sortable.__new__=function(){
+MochiKit.Base.nameFunctions(this);
+this.EXPORT_TAGS={":common":this.EXPORT,":all":MochiKit.Base.concat(this.EXPORT,this.EXPORT_OK)};
+};
+MochiKit.Sortable.__new__();
+MochiKit.Base._exportSymbols(this,MochiKit.Sortable);
+if(typeof (MochiKit)=="undefined"){
+MochiKit={};
+}
+if(typeof (MochiKit.MochiKit)=="undefined"){
+MochiKit.MochiKit={};
+}
+MochiKit.MochiKit.NAME="MochiKit.MochiKit";
+MochiKit.MochiKit.VERSION="1.4.2";
+MochiKit.MochiKit.__repr__=function(){
+return "["+this.NAME+" "+this.VERSION+"]";
+};
+MochiKit.MochiKit.toString=function(){
+return this.__repr__();
+};
+MochiKit.MochiKit.SUBMODULES=["Base","Iter","Logging","DateTime","Format","Async","DOM","Selector","Style","LoggingPane","Color","Signal","Position","Visual","DragAndDrop","Sortable"];
+if(typeof (JSAN)!="undefined"||typeof (dojo)!="undefined"){
+if(typeof (dojo)!="undefined"){
+dojo.provide("MochiKit.MochiKit");
+(function(lst){
+for(var i=0;i<lst.length;i++){
+dojo.require("MochiKit."+lst[i]);
+}
+})(MochiKit.MochiKit.SUBMODULES);
+}
+if(typeof (JSAN)!="undefined"){
+(function(lst){
+for(var i=0;i<lst.length;i++){
+JSAN.use("MochiKit."+lst[i],[]);
+}
+})(MochiKit.MochiKit.SUBMODULES);
+}
+(function(){
+var _823=MochiKit.Base.extend;
+var self=MochiKit.MochiKit;
+var _825=self.SUBMODULES;
+var _826=[];
+var _827=[];
+var _828={};
+var i,k,m,all;
+for(i=0;i<_825.length;i++){
+m=MochiKit[_825[i]];
+_823(_826,m.EXPORT);
+_823(_827,m.EXPORT_OK);
+for(k in m.EXPORT_TAGS){
+_828[k]=_823(_828[k],m.EXPORT_TAGS[k]);
+}
+all=m.EXPORT_TAGS[":all"];
+if(!all){
+all=_823(null,m.EXPORT,m.EXPORT_OK);
+}
+var j;
+for(j=0;j<all.length;j++){
+k=all[j];
+self[k]=m[k];
+}
+}
+self.EXPORT=_826;
+self.EXPORT_OK=_827;
+self.EXPORT_TAGS=_828;
+}());
+}else{
+if(typeof (MochiKit.__compat__)=="undefined"){
+MochiKit.__compat__=true;
+}
+(function(){
+if(typeof (document)=="undefined"){
+return;
+}
+var _82e=document.getElementsByTagName("script");
+var _82f="http://www.w3.org/1999/xhtml";
+var _830="http://www.w3.org/2000/svg";
+var _831="http://www.w3.org/1999/xlink";
+var _832="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
+var base=null;
+var _834=null;
+var _835={};
+var i;
+var src;
+for(i=0;i<_82e.length;i++){
+src=null;
+switch(_82e[i].namespaceURI){
+case _830:
+src=_82e[i].getAttributeNS(_831,"href");
+break;
+default:
+src=_82e[i].getAttribute("src");
+break;
+}
+if(!src){
+continue;
+}
+_835[src]=true;
+if(src.match(/MochiKit.js(\?.*)?$/)){
+base=src.substring(0,src.lastIndexOf("MochiKit.js"));
+_834=_82e[i];
+}
+}
+if(base===null){
+return;
+}
+var _838=MochiKit.MochiKit.SUBMODULES;
+for(var i=0;i<_838.length;i++){
+if(MochiKit[_838[i]]){
+continue;
+}
+var uri=base+_838[i]+".js";
+if(uri in _835){
+continue;
+}
+if(_834.namespaceURI==_830||_834.namespaceURI==_832){
+var s=document.createElementNS(_834.namespaceURI,"script");
+s.setAttribute("id","MochiKit_"+base+_838[i]);
+if(_834.namespaceURI==_830){
+s.setAttributeNS(_831,"href",uri);
+}else{
+s.setAttribute("src",uri);
+}
+s.setAttribute("type","application/x-javascript");
+_834.parentNode.appendChild(s);
+}else{
+document.write("<"+_834.nodeName+" src=\""+uri+"\" type=\"text/javascript\"></script>");
+}
+}
+})();
+}
+
+
diff --git a/paste/evalexception/media/debug.js b/paste/evalexception/media/debug.js
new file mode 100644
index 0000000..57f9df3
--- /dev/null
+++ b/paste/evalexception/media/debug.js
@@ -0,0 +1,161 @@
+function showFrame(anchor) {
+ var tbid = anchor.getAttribute('tbid');
+ var expanded = anchor.expanded;
+ if (expanded) {
+ MochiKit.DOM.hideElement(anchor.expandedElement);
+ anchor.expanded = false;
+ _swapImage(anchor);
+ return false;
+ }
+ anchor.expanded = true;
+ if (anchor.expandedElement) {
+ MochiKit.DOM.showElement(anchor.expandedElement);
+ _swapImage(anchor);
+ $('debug_input_'+tbid).focus();
+ return false;
+ }
+ var url = debug_base
+ + '/show_frame?tbid=' + tbid
+ + '&debugcount=' + debug_count;
+ var d = MochiKit.Async.doSimpleXMLHttpRequest(url);
+ d.addCallbacks(function (data) {
+ var el = MochiKit.DOM.DIV({});
+ anchor.parentNode.insertBefore(el, anchor.nextSibling);
+ el.innerHTML = data.responseText;
+ anchor.expandedElement = el;
+ _swapImage(anchor);
+ $('debug_input_'+tbid).focus();
+ }, function (error) {
+ showError(error.req.responseText);
+ });
+ return false;
+}
+
+function _swapImage(anchor) {
+ var el = anchor.getElementsByTagName('IMG')[0];
+ if (anchor.expanded) {
+ var img = 'minus.jpg';
+ } else {
+ var img = 'plus.jpg';
+ }
+ el.src = debug_base + '/media/' + img;
+}
+
+function submitInput(button, tbid) {
+ var input = $(button.getAttribute('input-from'));
+ var output = $(button.getAttribute('output-to'));
+ var url = debug_base
+ + '/exec_input';
+ var history = input.form.history;
+ input.historyPosition = 0;
+ if (! history) {
+ history = input.form.history = [];
+ }
+ history.push(input.value);
+ var vars = {
+ tbid: tbid,
+ debugcount: debug_count,
+ input: input.value
+ };
+ MochiKit.DOM.showElement(output);
+ var d = MochiKit.Async.doSimpleXMLHttpRequest(url, vars);
+ d.addCallbacks(function (data) {
+ var result = data.responseText;
+ output.innerHTML += result;
+ input.value = '';
+ input.focus();
+ }, function (error) {
+ showError(error.req.responseText);
+ });
+ return false;
+}
+
+function showError(msg) {
+ var el = $('error-container');
+ if (el.innerHTML) {
+ el.innerHTML += '<hr noshade>\n' + msg;
+ } else {
+ el.innerHTML = msg;
+ }
+ MochiKit.DOM.showElement('error-area');
+}
+
+function clearError() {
+ var el = $('error-container');
+ el.innerHTML = '';
+ MochiKit.DOM.hideElement('error-area');
+}
+
+function expandInput(button) {
+ var input = button.form.elements.input;
+ stdops = {
+ name: 'input',
+ style: 'width: 100%',
+ autocomplete: 'off'
+ };
+ if (input.tagName == 'INPUT') {
+ var newEl = MochiKit.DOM.TEXTAREA(stdops);
+ var text = 'Contract';
+ } else {
+ stdops['type'] = 'text';
+ stdops['onkeypress'] = 'upArrow(this)';
+ var newEl = MochiKit.DOM.INPUT(stdops);
+ var text = 'Expand';
+ }
+ newEl.value = input.value;
+ newEl.id = input.id;
+ MochiKit.DOM.swapDOM(input, newEl);
+ newEl.focus();
+ button.value = text;
+ return false;
+}
+
+function upArrow(input, event) {
+ if (window.event) {
+ event = window.event;
+ }
+ if (event.keyCode != 38 && event.keyCode != 40) {
+ // not an up- or down-arrow
+ return true;
+ }
+ var dir = event.keyCode == 38 ? 1 : -1;
+ var history = input.form.history;
+ if (! history) {
+ history = input.form.history = [];
+ }
+ var pos = input.historyPosition || 0;
+ if (! pos && dir == -1) {
+ return true;
+ }
+ if (! pos && input.value) {
+ history.push(input.value);
+ pos = 1;
+ }
+ pos += dir;
+ if (history.length-pos < 0) {
+ pos = 1;
+ }
+ if (history.length-pos > history.length-1) {
+ input.value = '';
+ return true;
+ }
+ input.historyPosition = pos;
+ var line = history[history.length-pos];
+ input.value = line;
+}
+
+function expandLong(anchor) {
+ var span = anchor;
+ while (span) {
+ if (span.style && span.style.display == 'none') {
+ break;
+ }
+ span = span.nextSibling;
+ }
+ if (! span) {
+ return false;
+ }
+ MochiKit.DOM.showElement(span);
+ MochiKit.DOM.hideElement(anchor);
+ return false;
+}
diff --git a/paste/evalexception/media/minus.jpg b/paste/evalexception/media/minus.jpg
new file mode 100644
index 0000000..05f3306
--- /dev/null
+++ b/paste/evalexception/media/minus.jpg
Binary files differ
diff --git a/paste/evalexception/media/plus.jpg b/paste/evalexception/media/plus.jpg
new file mode 100644
index 0000000..a17aa5e
--- /dev/null
+++ b/paste/evalexception/media/plus.jpg
Binary files differ
diff --git a/paste/evalexception/middleware.py b/paste/evalexception/middleware.py
new file mode 100644
index 0000000..da7876d
--- /dev/null
+++ b/paste/evalexception/middleware.py
@@ -0,0 +1,618 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Exception-catching middleware that allows interactive debugging.
+
+This middleware catches all unexpected exceptions. A normal
+traceback, like produced by
+``paste.exceptions.errormiddleware.ErrorMiddleware`` is given, plus
+controls to see local variables and evaluate expressions in a local
+context.
+
+This can only be used in single-process environments, because
+subsequent requests must go back to the same process that the
+exception originally occurred in. Threaded or non-concurrent
+environments both work.
+
+This shouldn't be used in production in any way. That would just be
+silly.
+
+If calling from an XMLHttpRequest call, if the GET variable ``_`` is
+given then it will make the response more compact (and less
+Javascripty), since if you use innerHTML it'll kill your browser. You
+can look for the header X-Debug-URL in your 500 responses if you want
+to see the full debuggable traceback. Also, this URL is printed to
+``wsgi.errors``, so you can open it up in another browser window.
+"""
+
+from __future__ import print_function
+
+import sys
+import os
+import cgi
+import traceback
+import six
+from six.moves import cStringIO as StringIO
+import pprint
+import itertools
+import time
+import re
+from paste.exceptions import errormiddleware, formatter, collector
+from paste import wsgilib
+from paste import urlparser
+from paste import httpexceptions
+from paste import registry
+from paste import request
+from paste import response
+from paste.evalexception import evalcontext
+
+limit = 200
+
+def html_quote(v):
+ """
+ Escape HTML characters, plus translate None to ''
+ """
+ if v is None:
+ return ''
+ return cgi.escape(str(v), 1)
+
+def preserve_whitespace(v, quote=True):
+ """
+ Quote a value for HTML, preserving whitespace (translating
+ newlines to ``<br>`` and multiple spaces to use ``&nbsp;``).
+
+ If ``quote`` is true, then the value will be HTML quoted first.
+ """
+ if quote:
+ v = html_quote(v)
+ v = v.replace('\n', '<br>\n')
+ v = re.sub(r'()( +)', _repl_nbsp, v)
+ v = re.sub(r'(\n)( +)', _repl_nbsp, v)
+ v = re.sub(r'^()( +)', _repl_nbsp, v)
+ return '<code>%s</code>' % v
+
+def _repl_nbsp(match):
+ if len(match.group(2)) == 1:
+ return '&nbsp;'
+ return match.group(1) + '&nbsp;' * (len(match.group(2))-1) + ' '
+
+def simplecatcher(application):
+ """
+ A simple middleware that catches errors and turns them into simple
+ tracebacks.
+ """
+ def simplecatcher_app(environ, start_response):
+ try:
+ return application(environ, start_response)
+ except:
+ out = StringIO()
+ traceback.print_exc(file=out)
+ start_response('500 Server Error',
+ [('content-type', 'text/html')],
+ sys.exc_info())
+ res = out.getvalue()
+ return ['<h3>Error</h3><pre>%s</pre>'
+ % html_quote(res)]
+ return simplecatcher_app
+
+def wsgiapp():
+ """
+ Turns a function or method into a WSGI application.
+ """
+ def decorator(func):
+ def wsgiapp_wrapper(*args):
+ # we get 3 args when this is a method, two when it is
+ # a function :(
+ if len(args) == 3:
+ environ = args[1]
+ start_response = args[2]
+ args = [args[0]]
+ else:
+ environ, start_response = args
+ args = []
+ def application(environ, start_response):
+ form = wsgilib.parse_formvars(environ,
+ include_get_vars=True)
+ headers = response.HeaderDict(
+ {'content-type': 'text/html',
+ 'status': '200 OK'})
+ form['environ'] = environ
+ form['headers'] = headers
+ res = func(*args, **form.mixed())
+ status = headers.pop('status')
+ start_response(status, headers.headeritems())
+ return [res]
+ app = httpexceptions.make_middleware(application)
+ app = simplecatcher(app)
+ return app(environ, start_response)
+ wsgiapp_wrapper.exposed = True
+ return wsgiapp_wrapper
+ return decorator
+
+def get_debug_info(func):
+ """
+ A decorator (meant to be used under ``wsgiapp()``) that resolves
+ the ``debugcount`` variable to a ``DebugInfo`` object (or gives an
+ error if it can't be found).
+ """
+ def debug_info_replacement(self, **form):
+ try:
+ if 'debugcount' not in form:
+ raise ValueError('You must provide a debugcount parameter')
+ debugcount = form.pop('debugcount')
+ try:
+ debugcount = int(debugcount)
+ except ValueError:
+ raise ValueError('Bad value for debugcount')
+ if debugcount not in self.debug_infos:
+ raise ValueError(
+ 'Debug %s no longer found (maybe it has expired?)'
+ % debugcount)
+ debug_info = self.debug_infos[debugcount]
+ return func(self, debug_info=debug_info, **form)
+ except ValueError as e:
+ form['headers']['status'] = '500 Server Error'
+ return '<html>There was an error: %s</html>' % html_quote(e)
+ return debug_info_replacement
+
+debug_counter = itertools.count(int(time.time()))
+def get_debug_count(environ):
+ """
+ Return the unique debug count for the current request
+ """
+ if 'paste.evalexception.debug_count' in environ:
+ return environ['paste.evalexception.debug_count']
+ else:
+ environ['paste.evalexception.debug_count'] = next = six.next(debug_counter)
+ return next
+
+class EvalException(object):
+
+ def __init__(self, application, global_conf=None,
+ xmlhttp_key=None):
+ self.application = application
+ self.debug_infos = {}
+ if xmlhttp_key is None:
+ if global_conf is None:
+ xmlhttp_key = '_'
+ else:
+ xmlhttp_key = global_conf.get('xmlhttp_key', '_')
+ self.xmlhttp_key = xmlhttp_key
+
+ def __call__(self, environ, start_response):
+ assert not environ['wsgi.multiprocess'], (
+ "The EvalException middleware is not usable in a "
+ "multi-process environment")
+ environ['paste.evalexception'] = self
+ if environ.get('PATH_INFO', '').startswith('/_debug/'):
+ return self.debug(environ, start_response)
+ else:
+ return self.respond(environ, start_response)
+
+ def debug(self, environ, start_response):
+ assert request.path_info_pop(environ) == '_debug'
+ next_part = request.path_info_pop(environ)
+ method = getattr(self, next_part, None)
+ if not method:
+ exc = httpexceptions.HTTPNotFound(
+ '%r not found when parsing %r'
+ % (next_part, wsgilib.construct_url(environ)))
+ return exc.wsgi_application(environ, start_response)
+ if not getattr(method, 'exposed', False):
+ exc = httpexceptions.HTTPForbidden(
+ '%r not allowed' % next_part)
+ return exc.wsgi_application(environ, start_response)
+ return method(environ, start_response)
+
+ def media(self, environ, start_response):
+ """
+ Static path where images and other files live
+ """
+ app = urlparser.StaticURLParser(
+ os.path.join(os.path.dirname(__file__), 'media'))
+ return app(environ, start_response)
+ media.exposed = True
+
+ def mochikit(self, environ, start_response):
+ """
+ Static path where MochiKit lives
+ """
+ app = urlparser.StaticURLParser(
+ os.path.join(os.path.dirname(__file__), 'mochikit'))
+ return app(environ, start_response)
+ mochikit.exposed = True
+
+ def summary(self, environ, start_response):
+ """
+ Returns a JSON-format summary of all the cached
+ exception reports
+ """
+ start_response('200 OK', [('Content-type', 'text/x-json')])
+ data = [];
+ items = self.debug_infos.values()
+ items.sort(lambda a, b: cmp(a.created, b.created))
+ data = [item.json() for item in items]
+ return [repr(data)]
+ summary.exposed = True
+
+ def view(self, environ, start_response):
+ """
+ View old exception reports
+ """
+ id = int(request.path_info_pop(environ))
+ if id not in self.debug_infos:
+ start_response(
+ '500 Server Error',
+ [('Content-type', 'text/html')])
+ return [
+ "Traceback by id %s does not exist (maybe "
+ "the server has been restarted?)"
+ % id]
+ debug_info = self.debug_infos[id]
+ return debug_info.wsgi_application(environ, start_response)
+ view.exposed = True
+
+ def make_view_url(self, environ, base_path, count):
+ return base_path + '/_debug/view/%s' % count
+
+ #@wsgiapp()
+ #@get_debug_info
+ def show_frame(self, tbid, debug_info, **kw):
+ frame = debug_info.frame(int(tbid))
+ vars = frame.tb_frame.f_locals
+ if vars:
+ registry.restorer.restoration_begin(debug_info.counter)
+ local_vars = make_table(vars)
+ registry.restorer.restoration_end()
+ else:
+ local_vars = 'No local vars'
+ return input_form(tbid, debug_info) + local_vars
+
+ show_frame = wsgiapp()(get_debug_info(show_frame))
+
+ #@wsgiapp()
+ #@get_debug_info
+ def exec_input(self, tbid, debug_info, input, **kw):
+ if not input.strip():
+ return ''
+ input = input.rstrip() + '\n'
+ frame = debug_info.frame(int(tbid))
+ vars = frame.tb_frame.f_locals
+ glob_vars = frame.tb_frame.f_globals
+ context = evalcontext.EvalContext(vars, glob_vars)
+ registry.restorer.restoration_begin(debug_info.counter)
+ output = context.exec_expr(input)
+ registry.restorer.restoration_end()
+ input_html = formatter.str2html(input)
+ return ('<code style="color: #060">&gt;&gt;&gt;</code> '
+ '<code>%s</code><br>\n%s'
+ % (preserve_whitespace(input_html, quote=False),
+ preserve_whitespace(output)))
+
+ exec_input = wsgiapp()(get_debug_info(exec_input))
+
+ def respond(self, environ, start_response):
+ if environ.get('paste.throw_errors'):
+ return self.application(environ, start_response)
+ base_path = request.construct_url(environ, with_path_info=False,
+ with_query_string=False)
+ environ['paste.throw_errors'] = True
+ started = []
+ def detect_start_response(status, headers, exc_info=None):
+ try:
+ return start_response(status, headers, exc_info)
+ except:
+ raise
+ else:
+ started.append(True)
+ try:
+ __traceback_supplement__ = errormiddleware.Supplement, self, environ
+ app_iter = self.application(environ, detect_start_response)
+ try:
+ return_iter = list(app_iter)
+ return return_iter
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ except:
+ exc_info = sys.exc_info()
+ for expected in environ.get('paste.expected_exceptions', []):
+ if isinstance(exc_info[1], expected):
+ raise
+
+ # Tell the Registry to save its StackedObjectProxies current state
+ # for later restoration
+ registry.restorer.save_registry_state(environ)
+
+ count = get_debug_count(environ)
+ view_uri = self.make_view_url(environ, base_path, count)
+ if not started:
+ headers = [('content-type', 'text/html')]
+ headers.append(('X-Debug-URL', view_uri))
+ start_response('500 Internal Server Error',
+ headers,
+ exc_info)
+ msg = 'Debug at: %s\n' % view_uri
+ if six.PY3:
+ msg = msg.encode('utf8')
+ environ['wsgi.errors'].write(msg)
+
+ exc_data = collector.collect_exception(*exc_info)
+ debug_info = DebugInfo(count, exc_info, exc_data, base_path,
+ environ, view_uri)
+ assert count not in self.debug_infos
+ self.debug_infos[count] = debug_info
+
+ if self.xmlhttp_key:
+ get_vars = request.parse_querystring(environ)
+ if dict(get_vars).get(self.xmlhttp_key):
+ exc_data = collector.collect_exception(*exc_info)
+ html = formatter.format_html(
+ exc_data, include_hidden_frames=False,
+ include_reusable=False, show_extra_data=False)
+ return [html]
+
+ # @@: it would be nice to deal with bad content types here
+ return debug_info.content()
+
+ def exception_handler(self, exc_info, environ):
+ simple_html_error = False
+ if self.xmlhttp_key:
+ get_vars = request.parse_querystring(environ)
+ if dict(get_vars).get(self.xmlhttp_key):
+ simple_html_error = True
+ return errormiddleware.handle_exception(
+ exc_info, environ['wsgi.errors'],
+ html=True,
+ debug_mode=True,
+ simple_html_error=simple_html_error)
+
+class DebugInfo(object):
+
+ def __init__(self, counter, exc_info, exc_data, base_path,
+ environ, view_uri):
+ self.counter = counter
+ self.exc_data = exc_data
+ self.base_path = base_path
+ self.environ = environ
+ self.view_uri = view_uri
+ self.created = time.time()
+ self.exc_type, self.exc_value, self.tb = exc_info
+ __exception_formatter__ = 1
+ self.frames = []
+ n = 0
+ tb = self.tb
+ while tb is not None and (limit is None or n < limit):
+ if tb.tb_frame.f_locals.get('__exception_formatter__'):
+ # Stop recursion. @@: should make a fake ExceptionFrame
+ break
+ self.frames.append(tb)
+ tb = tb.tb_next
+ n += 1
+
+ def json(self):
+ """Return the JSON-able representation of this object"""
+ return {
+ 'uri': self.view_uri,
+ 'created': time.strftime('%c', time.gmtime(self.created)),
+ 'created_timestamp': self.created,
+ 'exception_type': str(self.exc_type),
+ 'exception': str(self.exc_value),
+ }
+
+ def frame(self, tbid):
+ for frame in self.frames:
+ if id(frame) == tbid:
+ return frame
+ else:
+ raise ValueError("No frame by id %s found from %r" % (tbid, self.frames))
+
+ def wsgi_application(self, environ, start_response):
+ start_response('200 OK', [('content-type', 'text/html')])
+ return self.content()
+
+ def content(self):
+ html = format_eval_html(self.exc_data, self.base_path, self.counter)
+ head_html = (formatter.error_css + formatter.hide_display_js)
+ head_html += self.eval_javascript()
+ repost_button = make_repost_button(self.environ)
+ page = error_template % {
+ 'repost_button': repost_button or '',
+ 'head_html': head_html,
+ 'body': html}
+ if six.PY3:
+ page = page.encode('utf8')
+ return [page]
+
+ def eval_javascript(self):
+ base_path = self.base_path + '/_debug'
+ return (
+ '<script type="text/javascript" src="%s/media/MochiKit.packed.js">'
+ '</script>\n'
+ '<script type="text/javascript" src="%s/media/debug.js">'
+ '</script>\n'
+ '<script type="text/javascript">\n'
+ 'debug_base = %r;\n'
+ 'debug_count = %r;\n'
+ '</script>\n'
+ % (base_path, base_path, base_path, self.counter))
+
+class EvalHTMLFormatter(formatter.HTMLFormatter):
+
+ def __init__(self, base_path, counter, **kw):
+ super(EvalHTMLFormatter, self).__init__(**kw)
+ self.base_path = base_path
+ self.counter = counter
+
+ def format_source_line(self, filename, frame):
+ line = formatter.HTMLFormatter.format_source_line(
+ self, filename, frame)
+ return (line +
+ ' <a href="#" class="switch_source" '
+ 'tbid="%s" onClick="return showFrame(this)">&nbsp; &nbsp; '
+ '<img src="%s/_debug/media/plus.jpg" border=0 width=9 '
+ 'height=9> &nbsp; &nbsp;</a>'
+ % (frame.tbid, self.base_path))
+
+def make_table(items):
+ if isinstance(items, dict):
+ items = items.items()
+ items.sort()
+ rows = []
+ i = 0
+ for name, value in items:
+ i += 1
+ out = StringIO()
+ try:
+ pprint.pprint(value, out)
+ except Exception as e:
+ print('Error: %s' % e, file=out)
+ value = html_quote(out.getvalue())
+ if len(value) > 100:
+ # @@: This can actually break the HTML :(
+ # should I truncate before quoting?
+ orig_value = value
+ value = value[:100]
+ value += '<a class="switch_source" style="background-color: #999" href="#" onclick="return expandLong(this)">...</a>'
+ value += '<span style="display: none">%s</span>' % orig_value[100:]
+ value = formatter.make_wrappable(value)
+ if i % 2:
+ attr = ' class="even"'
+ else:
+ attr = ' class="odd"'
+ rows.append('<tr%s style="vertical-align: top;"><td>'
+ '<b>%s</b></td><td style="overflow: auto">%s<td></tr>'
+ % (attr, html_quote(name),
+ preserve_whitespace(value, quote=False)))
+ return '<table>%s</table>' % (
+ '\n'.join(rows))
+
+def format_eval_html(exc_data, base_path, counter):
+ short_formatter = EvalHTMLFormatter(
+ base_path=base_path,
+ counter=counter,
+ include_reusable=False)
+ short_er = short_formatter.format_collected_data(exc_data)
+ long_formatter = EvalHTMLFormatter(
+ base_path=base_path,
+ counter=counter,
+ show_hidden_frames=True,
+ show_extra_data=False,
+ include_reusable=False)
+ long_er = long_formatter.format_collected_data(exc_data)
+ text_er = formatter.format_text(exc_data, show_hidden_frames=True)
+ if short_formatter.filter_frames(exc_data.frames) != \
+ long_formatter.filter_frames(exc_data.frames):
+ # Only display the full traceback when it differs from the
+ # short version
+ full_traceback_html = """
+ <br>
+ <script type="text/javascript">
+ show_button('full_traceback', 'full traceback')
+ </script>
+ <div id="full_traceback" class="hidden-data">
+ %s
+ </div>
+ """ % long_er
+ else:
+ full_traceback_html = ''
+
+ return """
+ %s
+ %s
+ <br>
+ <script type="text/javascript">
+ show_button('text_version', 'text version')
+ </script>
+ <div id="text_version" class="hidden-data">
+ <textarea style="width: 100%%" rows=10 cols=60>%s</textarea>
+ </div>
+ """ % (short_er, full_traceback_html, cgi.escape(text_er))
+
+def make_repost_button(environ):
+ url = request.construct_url(environ)
+ if environ['REQUEST_METHOD'] == 'GET':
+ return ('<button onclick="window.location.href=%r">'
+ 'Re-GET Page</button><br>' % url)
+ else:
+ # @@: I'd like to reconstruct this, but I can't because
+ # the POST body is probably lost at this point, and
+ # I can't get it back :(
+ return None
+ # @@: Use or lose the following code block
+ """
+ fields = []
+ for name, value in wsgilib.parse_formvars(
+ environ, include_get_vars=False).items():
+ if hasattr(value, 'filename'):
+ # @@: Arg, we'll just submit the body, and leave out
+ # the filename :(
+ value = value.value
+ fields.append(
+ '<input type="hidden" name="%s" value="%s">'
+ % (html_quote(name), html_quote(value)))
+ return '''
+<form action="%s" method="POST">
+%s
+<input type="submit" value="Re-POST Page">
+</form>''' % (url, '\n'.join(fields))
+"""
+
+
+def input_form(tbid, debug_info):
+ return '''
+<form action="#" method="POST"
+ onsubmit="return submitInput($(\'submit_%(tbid)s\'), %(tbid)s)">
+<div id="exec-output-%(tbid)s" style="width: 95%%;
+ padding: 5px; margin: 5px; border: 2px solid #000;
+ display: none"></div>
+<input type="text" name="input" id="debug_input_%(tbid)s"
+ style="width: 100%%"
+ autocomplete="off" onkeypress="upArrow(this, event)"><br>
+<input type="submit" value="Execute" name="submitbutton"
+ onclick="return submitInput(this, %(tbid)s)"
+ id="submit_%(tbid)s"
+ input-from="debug_input_%(tbid)s"
+ output-to="exec-output-%(tbid)s">
+<input type="submit" value="Expand"
+ onclick="return expandInput(this)">
+</form>
+ ''' % {'tbid': tbid}
+
+error_template = '''
+<html>
+<head>
+ <title>Server Error</title>
+ %(head_html)s
+</head>
+<body>
+
+<div id="error-area" style="display: none; background-color: #600; color: #fff; border: 2px solid black">
+<div id="error-container"></div>
+<button onclick="return clearError()">clear this</button>
+</div>
+
+%(repost_button)s
+
+%(body)s
+
+</body>
+</html>
+'''
+
+def make_eval_exception(app, global_conf, xmlhttp_key=None):
+ """
+ Wraps the application in an interactive debugger.
+
+ This debugger is a major security hole, and should only be
+ used during development.
+
+ xmlhttp_key is a string that, if present in QUERY_STRING,
+ indicates that the request is an XMLHttp request, and the
+ Javascript/interactive debugger should not be returned. (If you
+ try to put the debugger somewhere with innerHTML, you will often
+ crash the browser)
+ """
+ if xmlhttp_key is None:
+ xmlhttp_key = global_conf.get('xmlhttp_key', '_')
+ return EvalException(app, xmlhttp_key=xmlhttp_key)
diff --git a/paste/exceptions/__init__.py b/paste/exceptions/__init__.py
new file mode 100644
index 0000000..813f855
--- /dev/null
+++ b/paste/exceptions/__init__.py
@@ -0,0 +1,6 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Package for catching exceptions and displaying annotated exception
+reports
+"""
diff --git a/paste/exceptions/collector.py b/paste/exceptions/collector.py
new file mode 100644
index 0000000..632ce06
--- /dev/null
+++ b/paste/exceptions/collector.py
@@ -0,0 +1,523 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+## Originally zExceptions.ExceptionFormatter from Zope;
+## Modified by Ian Bicking, Imaginary Landscape, 2005
+"""
+An exception collector that finds traceback information plus
+supplements
+"""
+
+import sys
+import traceback
+import time
+from six.moves import cStringIO as StringIO
+import linecache
+from paste.exceptions import serial_number_generator
+import warnings
+
+DEBUG_EXCEPTION_FORMATTER = True
+DEBUG_IDENT_PREFIX = 'E-'
+FALLBACK_ENCODING = 'UTF-8'
+
+__all__ = ['collect_exception', 'ExceptionCollector']
+
+class ExceptionCollector(object):
+
+ """
+ Produces a data structure that can be used by formatters to
+ display exception reports.
+
+ Magic variables:
+
+ If you define one of these variables in your local scope, you can
+ add information to tracebacks that happen in that context. This
+ allows applications to add all sorts of extra information about
+ the context of the error, including URLs, environmental variables,
+ users, hostnames, etc. These are the variables we look for:
+
+ ``__traceback_supplement__``:
+ You can define this locally or globally (unlike all the other
+ variables, which must be defined locally).
+
+ ``__traceback_supplement__`` is a tuple of ``(factory, arg1,
+ arg2...)``. When there is an exception, ``factory(arg1, arg2,
+ ...)`` is called, and the resulting object is inspected for
+ supplemental information.
+
+ ``__traceback_info__``:
+ This information is added to the traceback, usually fairly
+ literally.
+
+ ``__traceback_hide__``:
+ If set and true, this indicates that the frame should be
+ hidden from abbreviated tracebacks. This way you can hide
+ some of the complexity of the larger framework and let the
+ user focus on their own errors.
+
+ By setting it to ``'before'``, all frames before this one will
+ be thrown away. By setting it to ``'after'`` then all frames
+ after this will be thrown away until ``'reset'`` is found. In
+ each case the frame where it is set is included, unless you
+ append ``'_and_this'`` to the value (e.g.,
+ ``'before_and_this'``).
+
+ Note that formatters will ignore this entirely if the frame
+ that contains the error wouldn't normally be shown according
+ to these rules.
+
+ ``__traceback_reporter__``:
+ This should be a reporter object (see the reporter module),
+ or a list/tuple of reporter objects. All reporters found this
+ way will be given the exception, innermost first.
+
+ ``__traceback_decorator__``:
+ This object (defined in a local or global scope) will get the
+ result of this function (the CollectedException defined
+ below). It may modify this object in place, or return an
+ entirely new object. This gives the object the ability to
+ manipulate the traceback arbitrarily.
+
+ The actually interpretation of these values is largely up to the
+ reporters and formatters.
+
+ ``collect_exception(*sys.exc_info())`` will return an object with
+ several attributes:
+
+ ``frames``:
+ A list of frames
+ ``exception_formatted``:
+ The formatted exception, generally a full traceback
+ ``exception_type``:
+ The type of the exception, like ``ValueError``
+ ``exception_value``:
+ The string value of the exception, like ``'x not in list'``
+ ``identification_code``:
+ A hash of the exception data meant to identify the general
+ exception, so that it shares this code with other exceptions
+ that derive from the same problem. The code is a hash of
+ all the module names and function names in the traceback,
+ plus exception_type. This should be shown to users so they
+ can refer to the exception later. (@@: should it include a
+ portion that allows identification of the specific instance
+ of the exception as well?)
+
+ The list of frames goes innermost first. Each frame has these
+ attributes; some values may be None if they could not be
+ determined.
+
+ ``modname``:
+ the name of the module
+ ``filename``:
+ the filename of the module
+ ``lineno``:
+ the line of the error
+ ``revision``:
+ the contents of __version__ or __revision__
+ ``name``:
+ the function name
+ ``supplement``:
+ an object created from ``__traceback_supplement__``
+ ``supplement_exception``:
+ a simple traceback of any exception ``__traceback_supplement__``
+ created
+ ``traceback_info``:
+ the str() of any ``__traceback_info__`` variable found in the local
+ scope (@@: should it str()-ify it or not?)
+ ``traceback_hide``:
+ the value of any ``__traceback_hide__`` variable
+ ``traceback_log``:
+ the value of any ``__traceback_log__`` variable
+
+
+ ``__traceback_supplement__`` is thrown away, but a fixed
+ set of attributes are captured; each of these attributes is
+ optional.
+
+ ``object``:
+ the name of the object being visited
+ ``source_url``:
+ the original URL requested
+ ``line``:
+ the line of source being executed (for interpreters, like ZPT)
+ ``column``:
+ the column of source being executed
+ ``expression``:
+ the expression being evaluated (also for interpreters)
+ ``warnings``:
+ a list of (string) warnings to be displayed
+ ``getInfo``:
+ a function/method that takes no arguments, and returns a string
+ describing any extra information
+ ``extraData``:
+ a function/method that takes no arguments, and returns a
+ dictionary. The contents of this dictionary will not be
+ displayed in the context of the traceback, but globally for
+ the exception. Results will be grouped by the keys in the
+ dictionaries (which also serve as titles). The keys can also
+ be tuples of (importance, title); in this case the importance
+ should be ``important`` (shows up at top), ``normal`` (shows
+ up somewhere; unspecified), ``supplemental`` (shows up at
+ bottom), or ``extra`` (shows up hidden or not at all).
+
+ These are used to create an object with attributes of the same
+ names (``getInfo`` becomes a string attribute, not a method).
+ ``__traceback_supplement__`` implementations should be careful to
+ produce values that are relatively static and unlikely to cause
+ further errors in the reporting system -- any complex
+ introspection should go in ``getInfo()`` and should ultimately
+ return a string.
+
+ Note that all attributes are optional, and under certain
+ circumstances may be None or may not exist at all -- the collector
+ can only do a best effort, but must avoid creating any exceptions
+ itself.
+
+ Formatters may want to use ``__traceback_hide__`` as a hint to
+ hide frames that are part of the 'framework' or underlying system.
+ There are a variety of rules about special values for this
+ variables that formatters should be aware of.
+
+ TODO:
+
+ More attributes in __traceback_supplement__? Maybe an attribute
+ that gives a list of local variables that should also be
+ collected? Also, attributes that would be explicitly meant for
+ the entire request, not just a single frame. Right now some of
+ the fixed set of attributes (e.g., source_url) are meant for this
+ use, but there's no explicit way for the supplement to indicate
+ new values, e.g., logged-in user, HTTP referrer, environment, etc.
+ Also, the attributes that do exist are Zope/Web oriented.
+
+ More information on frames? cgitb, for instance, produces
+ extensive information on local variables. There exists the
+ possibility that getting this information may cause side effects,
+ which can make debugging more difficult; but it also provides
+ fodder for post-mortem debugging. However, the collector is not
+ meant to be configurable, but to capture everything it can and let
+ the formatters be configurable. Maybe this would have to be a
+ configuration value, or maybe it could be indicated by another
+ magical variable (which would probably mean 'show all local
+ variables below this frame')
+ """
+
+ show_revisions = 0
+
+ def __init__(self, limit=None):
+ self.limit = limit
+
+ def getLimit(self):
+ limit = self.limit
+ if limit is None:
+ limit = getattr(sys, 'tracebacklimit', None)
+ return limit
+
+ def getRevision(self, globals):
+ if not self.show_revisions:
+ return None
+ revision = globals.get('__revision__', None)
+ if revision is None:
+ # Incorrect but commonly used spelling
+ revision = globals.get('__version__', None)
+
+ if revision is not None:
+ try:
+ revision = str(revision).strip()
+ except:
+ revision = '???'
+ return revision
+
+ def collectSupplement(self, supplement, tb):
+ result = {}
+
+ for name in ('object', 'source_url', 'line', 'column',
+ 'expression', 'warnings'):
+ result[name] = getattr(supplement, name, None)
+
+ func = getattr(supplement, 'getInfo', None)
+ if func:
+ result['info'] = func()
+ else:
+ result['info'] = None
+ func = getattr(supplement, 'extraData', None)
+ if func:
+ result['extra'] = func()
+ else:
+ result['extra'] = None
+ return SupplementaryData(**result)
+
+ def collectLine(self, tb, extra_data):
+ f = tb.tb_frame
+ lineno = tb.tb_lineno
+ co = f.f_code
+ filename = co.co_filename
+ name = co.co_name
+ globals = f.f_globals
+ locals = f.f_locals
+ if not hasattr(locals, 'keys'):
+ # Something weird about this frame; it's not a real dict
+ warnings.warn(
+ "Frame %s has an invalid locals(): %r" % (
+ globals.get('__name__', 'unknown'), locals))
+ locals = {}
+ data = {}
+ data['modname'] = globals.get('__name__', None)
+ data['filename'] = filename
+ data['lineno'] = lineno
+ data['revision'] = self.getRevision(globals)
+ data['name'] = name
+ data['tbid'] = id(tb)
+
+ # Output a traceback supplement, if any.
+ if '__traceback_supplement__' in locals:
+ # Use the supplement defined in the function.
+ tbs = locals['__traceback_supplement__']
+ elif '__traceback_supplement__' in globals:
+ # Use the supplement defined in the module.
+ # This is used by Scripts (Python).
+ tbs = globals['__traceback_supplement__']
+ else:
+ tbs = None
+ if tbs is not None:
+ factory = tbs[0]
+ args = tbs[1:]
+ try:
+ supp = factory(*args)
+ data['supplement'] = self.collectSupplement(supp, tb)
+ if data['supplement'].extra:
+ for key, value in data['supplement'].extra.items():
+ extra_data.setdefault(key, []).append(value)
+ except:
+ if DEBUG_EXCEPTION_FORMATTER:
+ out = StringIO()
+ traceback.print_exc(file=out)
+ text = out.getvalue()
+ data['supplement_exception'] = text
+ # else just swallow the exception.
+
+ try:
+ tbi = locals.get('__traceback_info__', None)
+ if tbi is not None:
+ data['traceback_info'] = str(tbi)
+ except:
+ pass
+
+ marker = []
+ for name in ('__traceback_hide__', '__traceback_log__',
+ '__traceback_decorator__'):
+ try:
+ tbh = locals.get(name, globals.get(name, marker))
+ if tbh is not marker:
+ data[name[2:-2]] = tbh
+ except:
+ pass
+
+ return data
+
+ def collectExceptionOnly(self, etype, value):
+ return traceback.format_exception_only(etype, value)
+
+ def collectException(self, etype, value, tb, limit=None):
+ # The next line provides a way to detect recursion.
+ __exception_formatter__ = 1
+ frames = []
+ ident_data = []
+ traceback_decorators = []
+ if limit is None:
+ limit = self.getLimit()
+ n = 0
+ extra_data = {}
+ while tb is not None and (limit is None or n < limit):
+ if tb.tb_frame.f_locals.get('__exception_formatter__'):
+ # Stop recursion. @@: should make a fake ExceptionFrame
+ frames.append('(Recursive formatException() stopped)\n')
+ break
+ data = self.collectLine(tb, extra_data)
+ frame = ExceptionFrame(**data)
+ frames.append(frame)
+ if frame.traceback_decorator is not None:
+ traceback_decorators.append(frame.traceback_decorator)
+ ident_data.append(frame.modname or '?')
+ ident_data.append(frame.name or '?')
+ tb = tb.tb_next
+ n = n + 1
+ ident_data.append(str(etype))
+ ident = serial_number_generator.hash_identifier(
+ ' '.join(ident_data), length=5, upper=True,
+ prefix=DEBUG_IDENT_PREFIX)
+
+ result = CollectedException(
+ frames=frames,
+ exception_formatted=self.collectExceptionOnly(etype, value),
+ exception_type=etype,
+ exception_value=self.safeStr(value),
+ identification_code=ident,
+ date=time.localtime(),
+ extra_data=extra_data)
+ if etype is ImportError:
+ extra_data[('important', 'sys.path')] = [sys.path]
+ for decorator in traceback_decorators:
+ try:
+ new_result = decorator(result)
+ if new_result is not None:
+ result = new_result
+ except:
+ pass
+ return result
+
+ def safeStr(self, obj):
+ try:
+ return str(obj)
+ except UnicodeEncodeError:
+ try:
+ return unicode(obj).encode(FALLBACK_ENCODING, 'replace')
+ except UnicodeEncodeError:
+ # This is when something is really messed up, but this can
+ # happen when the __str__ of an object has to handle unicode
+ return repr(obj)
+
+limit = 200
+
+class Bunch(object):
+
+ """
+ A generic container
+ """
+
+ def __init__(self, **attrs):
+ for name, value in attrs.items():
+ setattr(self, name, value)
+
+ def __repr__(self):
+ name = '<%s ' % self.__class__.__name__
+ name += ' '.join(['%s=%r' % (name, str(value)[:30])
+ for name, value in self.__dict__.items()
+ if not name.startswith('_')])
+ return name + '>'
+
+class CollectedException(Bunch):
+ """
+ This is the result of collection the exception; it contains copies
+ of data of interest.
+ """
+ # A list of frames (ExceptionFrame instances), innermost last:
+ frames = []
+ # The result of traceback.format_exception_only; this looks
+ # like a normal traceback you'd see in the interactive interpreter
+ exception_formatted = None
+ # The *string* representation of the type of the exception
+ # (@@: should we give the # actual class? -- we can't keep the
+ # actual exception around, but the class should be safe)
+ # Something like 'ValueError'
+ exception_type = None
+ # The string representation of the exception, from ``str(e)``.
+ exception_value = None
+ # An identifier which should more-or-less classify this particular
+ # exception, including where in the code it happened.
+ identification_code = None
+ # The date, as time.localtime() returns:
+ date = None
+ # A dictionary of supplemental data:
+ extra_data = {}
+
+class SupplementaryData(Bunch):
+ """
+ The result of __traceback_supplement__. We don't keep the
+ supplement object around, for fear of GC problems and whatnot.
+ (@@: Maybe I'm being too superstitious about copying only specific
+ information over)
+ """
+
+ # These attributes are copied from the object, or left as None
+ # if the object doesn't have these attributes:
+ object = None
+ source_url = None
+ line = None
+ column = None
+ expression = None
+ warnings = None
+ # This is the *return value* of supplement.getInfo():
+ info = None
+
+class ExceptionFrame(Bunch):
+ """
+ This represents one frame of the exception. Each frame is a
+ context in the call stack, typically represented by a line
+ number and module name in the traceback.
+ """
+
+ # The name of the module; can be None, especially when the code
+ # isn't associated with a module.
+ modname = None
+ # The filename (@@: when no filename, is it None or '?'?)
+ filename = None
+ # Line number
+ lineno = None
+ # The value of __revision__ or __version__ -- but only if
+ # show_revision = True (by defaut it is false). (@@: Why not
+ # collect this?)
+ revision = None
+ # The name of the function with the error (@@: None or '?' when
+ # unknown?)
+ name = None
+ # A SupplementaryData object, if __traceback_supplement__ was found
+ # (and produced no errors)
+ supplement = None
+ # If accessing __traceback_supplement__ causes any error, the
+ # plain-text traceback is stored here
+ supplement_exception = None
+ # The str() of any __traceback_info__ value found
+ traceback_info = None
+ # The value of __traceback_hide__
+ traceback_hide = False
+ # The value of __traceback_decorator__
+ traceback_decorator = None
+ # The id() of the traceback scope, can be used to reference the
+ # scope for use elsewhere
+ tbid = None
+
+ def get_source_line(self, context=0):
+ """
+ Return the source of the current line of this frame. You
+ probably want to .strip() it as well, as it is likely to have
+ leading whitespace.
+
+ If context is given, then that many lines on either side will
+ also be returned. E.g., context=1 will give 3 lines.
+ """
+ if not self.filename or not self.lineno:
+ return None
+ lines = []
+ for lineno in range(self.lineno-context, self.lineno+context+1):
+ lines.append(linecache.getline(self.filename, lineno))
+ return ''.join(lines)
+
+if hasattr(sys, 'tracebacklimit'):
+ limit = min(limit, sys.tracebacklimit)
+
+col = ExceptionCollector()
+
+def collect_exception(t, v, tb, limit=None):
+ """
+ Collection an exception from ``sys.exc_info()``.
+
+ Use like::
+
+ try:
+ blah blah
+ except:
+ exc_data = collect_exception(*sys.exc_info())
+ """
+ return col.collectException(t, v, tb, limit=limit)
diff --git a/paste/exceptions/errormiddleware.py b/paste/exceptions/errormiddleware.py
new file mode 100644
index 0000000..95c1261
--- /dev/null
+++ b/paste/exceptions/errormiddleware.py
@@ -0,0 +1,466 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Error handler middleware
+"""
+import sys
+import traceback
+import cgi
+from six.moves import cStringIO as StringIO
+from paste.exceptions import formatter, collector, reporter
+from paste import wsgilib
+from paste import request
+import six
+
+__all__ = ['ErrorMiddleware', 'handle_exception']
+
+class _NoDefault(object):
+ def __repr__(self):
+ return '<NoDefault>'
+NoDefault = _NoDefault()
+
+class ErrorMiddleware(object):
+
+ """
+ Error handling middleware
+
+ Usage::
+
+ error_catching_wsgi_app = ErrorMiddleware(wsgi_app)
+
+ Settings:
+
+ ``debug``:
+ If true, then tracebacks will be shown in the browser.
+
+ ``error_email``:
+ an email address (or list of addresses) to send exception
+ reports to
+
+ ``error_log``:
+ a filename to append tracebacks to
+
+ ``show_exceptions_in_wsgi_errors``:
+ If true, then errors will be printed to ``wsgi.errors``
+ (frequently a server error log, or stderr).
+
+ ``from_address``, ``smtp_server``, ``error_subject_prefix``, ``smtp_username``, ``smtp_password``, ``smtp_use_tls``:
+ variables to control the emailed exception reports
+
+ ``error_message``:
+ When debug mode is off, the error message to show to users.
+
+ ``xmlhttp_key``:
+ When this key (default ``_``) is in the request GET variables
+ (not POST!), expect that this is an XMLHttpRequest, and the
+ response should be more minimal; it should not be a complete
+ HTML page.
+
+ Environment Configuration:
+
+ ``paste.throw_errors``:
+ If this setting in the request environment is true, then this
+ middleware is disabled. This can be useful in a testing situation
+ where you don't want errors to be caught and transformed.
+
+ ``paste.expected_exceptions``:
+ When this middleware encounters an exception listed in this
+ environment variable and when the ``start_response`` has not
+ yet occurred, the exception will be re-raised instead of being
+ caught. This should generally be set by middleware that may
+ (but probably shouldn't be) installed above this middleware,
+ and wants to get certain exceptions. Exceptions raised after
+ ``start_response`` have been called are always caught since
+ by definition they are no longer expected.
+
+ """
+
+ def __init__(self, application, global_conf=None,
+ debug=NoDefault,
+ error_email=None,
+ error_log=None,
+ show_exceptions_in_wsgi_errors=NoDefault,
+ from_address=None,
+ smtp_server=None,
+ smtp_username=None,
+ smtp_password=None,
+ smtp_use_tls=False,
+ error_subject_prefix=None,
+ error_message=None,
+ xmlhttp_key=None):
+ from paste.util import converters
+ self.application = application
+ # @@: global_conf should be handled elsewhere in a separate
+ # function for the entry point
+ if global_conf is None:
+ global_conf = {}
+ if debug is NoDefault:
+ debug = converters.asbool(global_conf.get('debug'))
+ if show_exceptions_in_wsgi_errors is NoDefault:
+ show_exceptions_in_wsgi_errors = converters.asbool(global_conf.get('show_exceptions_in_wsgi_errors'))
+ self.debug_mode = converters.asbool(debug)
+ if error_email is None:
+ error_email = (global_conf.get('error_email')
+ or global_conf.get('admin_email')
+ or global_conf.get('webmaster_email')
+ or global_conf.get('sysadmin_email'))
+ self.error_email = converters.aslist(error_email)
+ self.error_log = error_log
+ self.show_exceptions_in_wsgi_errors = show_exceptions_in_wsgi_errors
+ if from_address is None:
+ from_address = global_conf.get('error_from_address', 'errors@localhost')
+ self.from_address = from_address
+ if smtp_server is None:
+ smtp_server = global_conf.get('smtp_server', 'localhost')
+ self.smtp_server = smtp_server
+ self.smtp_username = smtp_username or global_conf.get('smtp_username')
+ self.smtp_password = smtp_password or global_conf.get('smtp_password')
+ self.smtp_use_tls = smtp_use_tls or converters.asbool(global_conf.get('smtp_use_tls'))
+ self.error_subject_prefix = error_subject_prefix or ''
+ if error_message is None:
+ error_message = global_conf.get('error_message')
+ self.error_message = error_message
+ if xmlhttp_key is None:
+ xmlhttp_key = global_conf.get('xmlhttp_key', '_')
+ self.xmlhttp_key = xmlhttp_key
+
+ def __call__(self, environ, start_response):
+ """
+ The WSGI application interface.
+ """
+ # We want to be careful about not sending headers twice,
+ # and the content type that the app has committed to (if there
+ # is an exception in the iterator body of the response)
+ if environ.get('paste.throw_errors'):
+ return self.application(environ, start_response)
+ environ['paste.throw_errors'] = True
+
+ try:
+ __traceback_supplement__ = Supplement, self, environ
+ sr_checker = ResponseStartChecker(start_response)
+ app_iter = self.application(environ, sr_checker)
+ return self.make_catching_iter(app_iter, environ, sr_checker)
+ except:
+ exc_info = sys.exc_info()
+ try:
+ for expect in environ.get('paste.expected_exceptions', []):
+ if isinstance(exc_info[1], expect):
+ raise
+ start_response('500 Internal Server Error',
+ [('content-type', 'text/html')],
+ exc_info)
+ # @@: it would be nice to deal with bad content types here
+ response = self.exception_handler(exc_info, environ)
+ if six.PY3:
+ response = response.encode('utf8')
+ return [response]
+ finally:
+ # clean up locals...
+ exc_info = None
+
+ def make_catching_iter(self, app_iter, environ, sr_checker):
+ if isinstance(app_iter, (list, tuple)):
+ # These don't raise
+ return app_iter
+ return CatchingIter(app_iter, environ, sr_checker, self)
+
+ def exception_handler(self, exc_info, environ):
+ simple_html_error = False
+ if self.xmlhttp_key:
+ get_vars = request.parse_querystring(environ)
+ if dict(get_vars).get(self.xmlhttp_key):
+ simple_html_error = True
+ return handle_exception(
+ exc_info, environ['wsgi.errors'],
+ html=True,
+ debug_mode=self.debug_mode,
+ error_email=self.error_email,
+ error_log=self.error_log,
+ show_exceptions_in_wsgi_errors=self.show_exceptions_in_wsgi_errors,
+ error_email_from=self.from_address,
+ smtp_server=self.smtp_server,
+ smtp_username=self.smtp_username,
+ smtp_password=self.smtp_password,
+ smtp_use_tls=self.smtp_use_tls,
+ error_subject_prefix=self.error_subject_prefix,
+ error_message=self.error_message,
+ simple_html_error=simple_html_error)
+
+class ResponseStartChecker(object):
+ def __init__(self, start_response):
+ self.start_response = start_response
+ self.response_started = False
+
+ def __call__(self, *args):
+ self.response_started = True
+ self.start_response(*args)
+
+class CatchingIter(object):
+
+ """
+ A wrapper around the application iterator that will catch
+ exceptions raised by the a generator, or by the close method, and
+ display or report as necessary.
+ """
+
+ def __init__(self, app_iter, environ, start_checker, error_middleware):
+ self.app_iterable = app_iter
+ self.app_iterator = iter(app_iter)
+ self.environ = environ
+ self.start_checker = start_checker
+ self.error_middleware = error_middleware
+ self.closed = False
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ __traceback_supplement__ = (
+ Supplement, self.error_middleware, self.environ)
+ if self.closed:
+ raise StopIteration
+ try:
+ return self.app_iterator.next()
+ except StopIteration:
+ self.closed = True
+ close_response = self._close()
+ if close_response is not None:
+ return close_response
+ else:
+ raise StopIteration
+ except:
+ self.closed = True
+ close_response = self._close()
+ exc_info = sys.exc_info()
+ response = self.error_middleware.exception_handler(
+ exc_info, self.environ)
+ if close_response is not None:
+ response += (
+ '<hr noshade>Error in .close():<br>%s'
+ % close_response)
+
+ if not self.start_checker.response_started:
+ self.start_checker('500 Internal Server Error',
+ [('content-type', 'text/html')],
+ exc_info)
+
+ if six.PY3:
+ response = response.encode('utf8')
+ return response
+ __next__ = next
+
+ def close(self):
+ # This should at least print something to stderr if the
+ # close method fails at this point
+ if not self.closed:
+ self._close()
+
+ def _close(self):
+ """Close and return any error message"""
+ if not hasattr(self.app_iterable, 'close'):
+ return None
+ try:
+ self.app_iterable.close()
+ return None
+ except:
+ close_response = self.error_middleware.exception_handler(
+ sys.exc_info(), self.environ)
+ return close_response
+
+
+class Supplement(object):
+
+ """
+ This is a supplement used to display standard WSGI information in
+ the traceback.
+ """
+
+ def __init__(self, middleware, environ):
+ self.middleware = middleware
+ self.environ = environ
+ self.source_url = request.construct_url(environ)
+
+ def extraData(self):
+ data = {}
+ cgi_vars = data[('extra', 'CGI Variables')] = {}
+ wsgi_vars = data[('extra', 'WSGI Variables')] = {}
+ hide_vars = ['paste.config', 'wsgi.errors', 'wsgi.input',
+ 'wsgi.multithread', 'wsgi.multiprocess',
+ 'wsgi.run_once', 'wsgi.version',
+ 'wsgi.url_scheme']
+ for name, value in self.environ.items():
+ if name.upper() == name:
+ if value:
+ cgi_vars[name] = value
+ elif name not in hide_vars:
+ wsgi_vars[name] = value
+ if self.environ['wsgi.version'] != (1, 0):
+ wsgi_vars['wsgi.version'] = self.environ['wsgi.version']
+ proc_desc = tuple([int(bool(self.environ[key]))
+ for key in ('wsgi.multiprocess',
+ 'wsgi.multithread',
+ 'wsgi.run_once')])
+ wsgi_vars['wsgi process'] = self.process_combos[proc_desc]
+ wsgi_vars['application'] = self.middleware.application
+ if 'paste.config' in self.environ:
+ data[('extra', 'Configuration')] = dict(self.environ['paste.config'])
+ return data
+
+ process_combos = {
+ # multiprocess, multithread, run_once
+ (0, 0, 0): 'Non-concurrent server',
+ (0, 1, 0): 'Multithreaded',
+ (1, 0, 0): 'Multiprocess',
+ (1, 1, 0): 'Multi process AND threads (?)',
+ (0, 0, 1): 'Non-concurrent CGI',
+ (0, 1, 1): 'Multithread CGI (?)',
+ (1, 0, 1): 'CGI',
+ (1, 1, 1): 'Multi thread/process CGI (?)',
+ }
+
+def handle_exception(exc_info, error_stream, html=True,
+ debug_mode=False,
+ error_email=None,
+ error_log=None,
+ show_exceptions_in_wsgi_errors=False,
+ error_email_from='errors@localhost',
+ smtp_server='localhost',
+ smtp_username=None,
+ smtp_password=None,
+ smtp_use_tls=False,
+ error_subject_prefix='',
+ error_message=None,
+ simple_html_error=False,
+ ):
+ """
+ For exception handling outside of a web context
+
+ Use like::
+
+ import sys
+ from paste.exceptions.errormiddleware import handle_exception
+ try:
+ do stuff
+ except:
+ handle_exception(
+ sys.exc_info(), sys.stderr, html=False, ...other config...)
+
+ If you want to report, but not fully catch the exception, call
+ ``raise`` after ``handle_exception``, which (when given no argument)
+ will reraise the exception.
+ """
+ reported = False
+ exc_data = collector.collect_exception(*exc_info)
+ extra_data = ''
+ if error_email:
+ rep = reporter.EmailReporter(
+ to_addresses=error_email,
+ from_address=error_email_from,
+ smtp_server=smtp_server,
+ smtp_username=smtp_username,
+ smtp_password=smtp_password,
+ smtp_use_tls=smtp_use_tls,
+ subject_prefix=error_subject_prefix)
+ rep_err = send_report(rep, exc_data, html=html)
+ if rep_err:
+ extra_data += rep_err
+ else:
+ reported = True
+ if error_log:
+ rep = reporter.LogReporter(
+ filename=error_log)
+ rep_err = send_report(rep, exc_data, html=html)
+ if rep_err:
+ extra_data += rep_err
+ else:
+ reported = True
+ if show_exceptions_in_wsgi_errors:
+ rep = reporter.FileReporter(
+ file=error_stream)
+ rep_err = send_report(rep, exc_data, html=html)
+ if rep_err:
+ extra_data += rep_err
+ else:
+ reported = True
+ else:
+ line = ('Error - %s: %s\n'
+ % (exc_data.exception_type, exc_data.exception_value))
+ if six.PY3:
+ line = line.encode('utf8')
+ error_stream.write(line)
+ if html:
+ if debug_mode and simple_html_error:
+ return_error = formatter.format_html(
+ exc_data, include_hidden_frames=False,
+ include_reusable=False, show_extra_data=False)
+ reported = True
+ elif debug_mode and not simple_html_error:
+ error_html = formatter.format_html(
+ exc_data,
+ include_hidden_frames=True,
+ include_reusable=False)
+ head_html = formatter.error_css + formatter.hide_display_js
+ return_error = error_template(
+ head_html, error_html, extra_data)
+ extra_data = ''
+ reported = True
+ else:
+ msg = error_message or '''
+ An error occurred. See the error logs for more information.
+ (Turn debug on to display exception reports here)
+ '''
+ return_error = error_template('', msg, '')
+ else:
+ return_error = None
+ if not reported and error_stream:
+ err_report = formatter.format_text(exc_data, show_hidden_frames=True)
+ err_report += '\n' + '-'*60 + '\n'
+ error_stream.write(err_report)
+ if extra_data:
+ error_stream.write(extra_data)
+ return return_error
+
+def send_report(rep, exc_data, html=True):
+ try:
+ rep.report(exc_data)
+ except:
+ output = StringIO()
+ traceback.print_exc(file=output)
+ if html:
+ return """
+ <p>Additionally an error occurred while sending the %s report:
+
+ <pre>%s</pre>
+ </p>""" % (
+ cgi.escape(str(rep)), output.getvalue())
+ else:
+ return (
+ "Additionally an error occurred while sending the "
+ "%s report:\n%s" % (str(rep), output.getvalue()))
+ else:
+ return ''
+
+def error_template(head_html, exception, extra):
+ return '''
+ <html>
+ <head>
+ <title>Server Error</title>
+ %s
+ </head>
+ <body>
+ <h1>Server Error</h1>
+ %s
+ %s
+ </body>
+ </html>''' % (head_html, exception, extra)
+
+def make_error_middleware(app, global_conf, **kw):
+ return ErrorMiddleware(app, global_conf=global_conf, **kw)
+
+doc_lines = ErrorMiddleware.__doc__.splitlines(True)
+for i in range(len(doc_lines)):
+ if doc_lines[i].strip().startswith('Settings'):
+ make_error_middleware.__doc__ = ''.join(doc_lines[i:])
+ break
+del i, doc_lines
diff --git a/paste/exceptions/formatter.py b/paste/exceptions/formatter.py
new file mode 100644
index 0000000..09309de
--- /dev/null
+++ b/paste/exceptions/formatter.py
@@ -0,0 +1,565 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Formatters for the exception data that comes from ExceptionCollector.
+"""
+# @@: TODO:
+# Use this: http://www.zope.org/Members/tino/VisualTraceback/VisualTracebackNews
+
+import cgi
+import six
+import re
+from paste.util import PySourceColor
+
+def html_quote(s):
+ return cgi.escape(str(s), True)
+
+class AbstractFormatter(object):
+
+ general_data_order = ['object', 'source_url']
+
+ def __init__(self, show_hidden_frames=False,
+ include_reusable=True,
+ show_extra_data=True,
+ trim_source_paths=()):
+ self.show_hidden_frames = show_hidden_frames
+ self.trim_source_paths = trim_source_paths
+ self.include_reusable = include_reusable
+ self.show_extra_data = show_extra_data
+
+ def format_collected_data(self, exc_data):
+ general_data = {}
+ if self.show_extra_data:
+ for name, value_list in exc_data.extra_data.items():
+ if isinstance(name, tuple):
+ importance, title = name
+ else:
+ importance, title = 'normal', name
+ for value in value_list:
+ general_data[(importance, name)] = self.format_extra_data(
+ importance, title, value)
+ lines = []
+ frames = self.filter_frames(exc_data.frames)
+ for frame in frames:
+ sup = frame.supplement
+ if sup:
+ if sup.object:
+ general_data[('important', 'object')] = self.format_sup_object(
+ sup.object)
+ if sup.source_url:
+ general_data[('important', 'source_url')] = self.format_sup_url(
+ sup.source_url)
+ if sup.line:
+ lines.append(self.format_sup_line_pos(sup.line, sup.column))
+ if sup.expression:
+ lines.append(self.format_sup_expression(sup.expression))
+ if sup.warnings:
+ for warning in sup.warnings:
+ lines.append(self.format_sup_warning(warning))
+ if sup.info:
+ lines.extend(self.format_sup_info(sup.info))
+ if frame.supplement_exception:
+ lines.append('Exception in supplement:')
+ lines.append(self.quote_long(frame.supplement_exception))
+ if frame.traceback_info:
+ lines.append(self.format_traceback_info(frame.traceback_info))
+ filename = frame.filename
+ if filename and self.trim_source_paths:
+ for path, repl in self.trim_source_paths:
+ if filename.startswith(path):
+ filename = repl + filename[len(path):]
+ break
+ lines.append(self.format_source_line(filename or '?', frame))
+ source = frame.get_source_line()
+ long_source = frame.get_source_line(2)
+ if source:
+ lines.append(self.format_long_source(
+ source, long_source))
+ etype = exc_data.exception_type
+ if not isinstance(etype, six.string_types):
+ etype = etype.__name__
+ exc_info = self.format_exception_info(
+ etype,
+ exc_data.exception_value)
+ data_by_importance = {'important': [], 'normal': [],
+ 'supplemental': [], 'extra': []}
+ for (importance, name), value in general_data.items():
+ data_by_importance[importance].append(
+ (name, value))
+ for value in data_by_importance.values():
+ value.sort()
+ return self.format_combine(data_by_importance, lines, exc_info)
+
+ def filter_frames(self, frames):
+ """
+ Removes any frames that should be hidden, according to the
+ values of traceback_hide, self.show_hidden_frames, and the
+ hidden status of the final frame.
+ """
+ if self.show_hidden_frames:
+ return frames
+ new_frames = []
+ hidden = False
+ for frame in frames:
+ hide = frame.traceback_hide
+ # @@: It would be nice to signal a warning if an unknown
+ # hide string was used, but I'm not sure where to put
+ # that warning.
+ if hide == 'before':
+ new_frames = []
+ hidden = False
+ elif hide == 'before_and_this':
+ new_frames = []
+ hidden = False
+ continue
+ elif hide == 'reset':
+ hidden = False
+ elif hide == 'reset_and_this':
+ hidden = False
+ continue
+ elif hide == 'after':
+ hidden = True
+ elif hide == 'after_and_this':
+ hidden = True
+ continue
+ elif hide:
+ continue
+ elif hidden:
+ continue
+ new_frames.append(frame)
+ if frames[-1] not in new_frames:
+ # We must include the last frame; that we don't indicates
+ # that the error happened where something was "hidden",
+ # so we just have to show everything
+ return frames
+ return new_frames
+
+ def pretty_string_repr(self, s):
+ """
+ Formats the string as a triple-quoted string when it contains
+ newlines.
+ """
+ if '\n' in s:
+ s = repr(s)
+ s = s[0]*3 + s[1:-1] + s[-1]*3
+ s = s.replace('\\n', '\n')
+ return s
+ else:
+ return repr(s)
+
+ def long_item_list(self, lst):
+ """
+ Returns true if the list contains items that are long, and should
+ be more nicely formatted.
+ """
+ how_many = 0
+ for item in lst:
+ if len(repr(item)) > 40:
+ how_many += 1
+ if how_many >= 3:
+ return True
+ return False
+
+class TextFormatter(AbstractFormatter):
+
+ def quote(self, s):
+ return s
+ def quote_long(self, s):
+ return s
+ def emphasize(self, s):
+ return s
+ def format_sup_object(self, obj):
+ return 'In object: %s' % self.emphasize(self.quote(repr(obj)))
+ def format_sup_url(self, url):
+ return 'URL: %s' % self.quote(url)
+ def format_sup_line_pos(self, line, column):
+ if column:
+ return self.emphasize('Line %i, Column %i' % (line, column))
+ else:
+ return self.emphasize('Line %i' % line)
+ def format_sup_expression(self, expr):
+ return self.emphasize('In expression: %s' % self.quote(expr))
+ def format_sup_warning(self, warning):
+ return 'Warning: %s' % self.quote(warning)
+ def format_sup_info(self, info):
+ return [self.quote_long(info)]
+ def format_source_line(self, filename, frame):
+ return 'File %r, line %s in %s' % (
+ filename, frame.lineno or '?', frame.name or '?')
+ def format_long_source(self, source, long_source):
+ return self.format_source(source)
+ def format_source(self, source_line):
+ return ' ' + self.quote(source_line.strip())
+ def format_exception_info(self, etype, evalue):
+ return self.emphasize(
+ '%s: %s' % (self.quote(etype), self.quote(evalue)))
+ def format_traceback_info(self, info):
+ return info
+
+ def format_combine(self, data_by_importance, lines, exc_info):
+ lines[:0] = [value for n, value in data_by_importance['important']]
+ lines.append(exc_info)
+ for name in 'normal', 'supplemental', 'extra':
+ lines.extend([value for n, value in data_by_importance[name]])
+ return self.format_combine_lines(lines)
+
+ def format_combine_lines(self, lines):
+ return '\n'.join(lines)
+
+ def format_extra_data(self, importance, title, value):
+ if isinstance(value, str):
+ s = self.pretty_string_repr(value)
+ if '\n' in s:
+ return '%s:\n%s' % (title, s)
+ else:
+ return '%s: %s' % (title, s)
+ elif isinstance(value, dict):
+ lines = ['\n', title, '-'*len(title)]
+ items = value.items()
+ items = sorted(items)
+ for n, v in items:
+ try:
+ v = repr(v)
+ except Exception as e:
+ v = 'Cannot display: %s' % e
+ v = truncate(v)
+ lines.append(' %s: %s' % (n, v))
+ return '\n'.join(lines)
+ elif (isinstance(value, (list, tuple))
+ and self.long_item_list(value)):
+ parts = [truncate(repr(v)) for v in value]
+ return '%s: [\n %s]' % (
+ title, ',\n '.join(parts))
+ else:
+ return '%s: %s' % (title, truncate(repr(value)))
+
+class HTMLFormatter(TextFormatter):
+
+ def quote(self, s):
+ return html_quote(s)
+ def quote_long(self, s):
+ return '<pre>%s</pre>' % self.quote(s)
+ def emphasize(self, s):
+ return '<b>%s</b>' % s
+ def format_sup_url(self, url):
+ return 'URL: <a href="%s">%s</a>' % (url, url)
+ def format_combine_lines(self, lines):
+ return '<br>\n'.join(lines)
+ def format_source_line(self, filename, frame):
+ name = self.quote(frame.name or '?')
+ return 'Module <span class="module" title="%s">%s</span>:<b>%s</b> in <code>%s</code>' % (
+ filename, frame.modname or '?', frame.lineno or '?',
+ name)
+ return 'File %r, line %s in <tt>%s</tt>' % (
+ filename, frame.lineno, name)
+ def format_long_source(self, source, long_source):
+ q_long_source = str2html(long_source, False, 4, True)
+ q_source = str2html(source, True, 0, False)
+ return ('<code style="display: none" class="source" source-type="long"><a class="switch_source" onclick="return switch_source(this, \'long\')" href="#">&lt;&lt;&nbsp; </a>%s</code>'
+ '<code class="source" source-type="short"><a onclick="return switch_source(this, \'short\')" class="switch_source" href="#">&gt;&gt;&nbsp; </a>%s</code>'
+ % (q_long_source,
+ q_source))
+ def format_source(self, source_line):
+ return '&nbsp;&nbsp;<code class="source">%s</code>' % self.quote(source_line.strip())
+ def format_traceback_info(self, info):
+ return '<pre>%s</pre>' % self.quote(info)
+
+ def format_extra_data(self, importance, title, value):
+ if isinstance(value, str):
+ s = self.pretty_string_repr(value)
+ if '\n' in s:
+ return '%s:<br><pre>%s</pre>' % (title, self.quote(s))
+ else:
+ return '%s: <tt>%s</tt>' % (title, self.quote(s))
+ elif isinstance(value, dict):
+ return self.zebra_table(title, value)
+ elif (isinstance(value, (list, tuple))
+ and self.long_item_list(value)):
+ return '%s: <tt>[<br>\n&nbsp; &nbsp; %s]</tt>' % (
+ title, ',<br>&nbsp; &nbsp; '.join(map(self.quote, map(repr, value))))
+ else:
+ return '%s: <tt>%s</tt>' % (title, self.quote(repr(value)))
+
+ def format_combine(self, data_by_importance, lines, exc_info):
+ lines[:0] = [value for n, value in data_by_importance['important']]
+ lines.append(exc_info)
+ for name in 'normal', 'supplemental':
+ lines.extend([value for n, value in data_by_importance[name]])
+ if data_by_importance['extra']:
+ lines.append(
+ '<script type="text/javascript">\nshow_button(\'extra_data\', \'extra data\');\n</script>\n' +
+ '<div id="extra_data" class="hidden-data">\n')
+ lines.extend([value for n, value in data_by_importance['extra']])
+ lines.append('</div>')
+ text = self.format_combine_lines(lines)
+ if self.include_reusable:
+ return error_css + hide_display_js + text
+ else:
+ # Usually because another error is already on this page,
+ # and so the js & CSS are unneeded
+ return text
+
+ def zebra_table(self, title, rows, table_class="variables"):
+ if isinstance(rows, dict):
+ rows = rows.items()
+ rows = sorted(rows)
+ table = ['<table class="%s">' % table_class,
+ '<tr class="header"><th colspan="2">%s</th></tr>'
+ % self.quote(title)]
+ odd = False
+ for name, value in rows:
+ try:
+ value = repr(value)
+ except Exception as e:
+ value = 'Cannot print: %s' % e
+ odd = not odd
+ table.append(
+ '<tr class="%s"><td>%s</td>'
+ % (odd and 'odd' or 'even', self.quote(name)))
+ table.append(
+ '<td><tt>%s</tt></td></tr>'
+ % make_wrappable(self.quote(truncate(value))))
+ table.append('</table>')
+ return '\n'.join(table)
+
+hide_display_js = r'''
+<script type="text/javascript">
+function hide_display(id) {
+ var el = document.getElementById(id);
+ if (el.className == "hidden-data") {
+ el.className = "";
+ return true;
+ } else {
+ el.className = "hidden-data";
+ return false;
+ }
+}
+document.write('<style type="text/css">\n');
+document.write('.hidden-data {display: none}\n');
+document.write('</style>\n');
+function show_button(toggle_id, name) {
+ document.write('<a href="#' + toggle_id
+ + '" onclick="javascript:hide_display(\'' + toggle_id
+ + '\')" class="button">' + name + '</a><br>');
+}
+
+function switch_source(el, hide_type) {
+ while (el) {
+ if (el.getAttribute &&
+ el.getAttribute('source-type') == hide_type) {
+ break;
+ }
+ el = el.parentNode;
+ }
+ if (! el) {
+ return false;
+ }
+ el.style.display = 'none';
+ if (hide_type == 'long') {
+ while (el) {
+ if (el.getAttribute &&
+ el.getAttribute('source-type') == 'short') {
+ break;
+ }
+ el = el.nextSibling;
+ }
+ } else {
+ while (el) {
+ if (el.getAttribute &&
+ el.getAttribute('source-type') == 'long') {
+ break;
+ }
+ el = el.previousSibling;
+ }
+ }
+ if (el) {
+ el.style.display = '';
+ }
+ return false;
+}
+
+</script>'''
+
+
+error_css = """
+<style type="text/css">
+body {
+ font-family: Helvetica, sans-serif;
+}
+
+table {
+ width: 100%;
+}
+
+tr.header {
+ background-color: #006;
+ color: #fff;
+}
+
+tr.even {
+ background-color: #ddd;
+}
+
+table.variables td {
+ vertical-align: top;
+ overflow: auto;
+}
+
+a.button {
+ background-color: #ccc;
+ border: 2px outset #aaa;
+ color: #000;
+ text-decoration: none;
+}
+
+a.button:hover {
+ background-color: #ddd;
+}
+
+code.source {
+ color: #006;
+}
+
+a.switch_source {
+ color: #090;
+ text-decoration: none;
+}
+
+a.switch_source:hover {
+ background-color: #ddd;
+}
+
+.source-highlight {
+ background-color: #ff9;
+}
+
+</style>
+"""
+
+def format_html(exc_data, include_hidden_frames=False, **ops):
+ if not include_hidden_frames:
+ return HTMLFormatter(**ops).format_collected_data(exc_data)
+ short_er = format_html(exc_data, show_hidden_frames=False, **ops)
+ # @@: This should have a way of seeing if the previous traceback
+ # was actually trimmed at all
+ ops['include_reusable'] = False
+ ops['show_extra_data'] = False
+ long_er = format_html(exc_data, show_hidden_frames=True, **ops)
+ text_er = format_text(exc_data, show_hidden_frames=True, **ops)
+ return """
+ %s
+ <br>
+ <script type="text/javascript">
+ show_button('full_traceback', 'full traceback')
+ </script>
+ <div id="full_traceback" class="hidden-data">
+ %s
+ </div>
+ <br>
+ <script type="text/javascript">
+ show_button('text_version', 'text version')
+ </script>
+ <div id="text_version" class="hidden-data">
+ <textarea style="width: 100%%" rows=10 cols=60>%s</textarea>
+ </div>
+ """ % (short_er, long_er, cgi.escape(text_er))
+
+def format_text(exc_data, **ops):
+ return TextFormatter(**ops).format_collected_data(exc_data)
+
+whitespace_re = re.compile(r' +')
+pre_re = re.compile(r'</?pre.*?>')
+error_re = re.compile(r'<h3>ERROR: .*?</h3>')
+
+def str2html(src, strip=False, indent_subsequent=0,
+ highlight_inner=False):
+ """
+ Convert a string to HTML. Try to be really safe about it,
+ returning a quoted version of the string if nothing else works.
+ """
+ try:
+ return _str2html(src, strip=strip,
+ indent_subsequent=indent_subsequent,
+ highlight_inner=highlight_inner)
+ except:
+ return html_quote(src)
+
+def _str2html(src, strip=False, indent_subsequent=0,
+ highlight_inner=False):
+ if strip:
+ src = src.strip()
+ orig_src = src
+ try:
+ src = PySourceColor.str2html(src, form='snip')
+ src = error_re.sub('', src)
+ src = pre_re.sub('', src)
+ src = re.sub(r'^[\n\r]{0,1}', '', src)
+ src = re.sub(r'[\n\r]{0,1}$', '', src)
+ except:
+ src = html_quote(orig_src)
+ lines = src.splitlines()
+ if len(lines) == 1:
+ return lines[0]
+ indent = ' '*indent_subsequent
+ for i in range(1, len(lines)):
+ lines[i] = indent+lines[i]
+ if highlight_inner and i == len(lines)/2:
+ lines[i] = '<span class="source-highlight">%s</span>' % lines[i]
+ src = '<br>\n'.join(lines)
+ src = whitespace_re.sub(
+ lambda m: '&nbsp;'*(len(m.group(0))-1) + ' ', src)
+ return src
+
+def truncate(string, limit=1000):
+ """
+ Truncate the string to the limit number of
+ characters
+ """
+ if len(string) > limit:
+ return string[:limit-20]+'...'+string[-17:]
+ else:
+ return string
+
+def make_wrappable(html, wrap_limit=60,
+ split_on=';?&@!$#-/\\"\''):
+ # Currently using <wbr>, maybe should use &#8203;
+ # http://www.cs.tut.fi/~jkorpela/html/nobr.html
+ if len(html) <= wrap_limit:
+ return html
+ words = html.split()
+ new_words = []
+ for word in words:
+ wrapped_word = ''
+ while len(word) > wrap_limit:
+ for char in split_on:
+ if char in word:
+ first, rest = word.split(char, 1)
+ wrapped_word += first+char+'<wbr>'
+ word = rest
+ break
+ else:
+ for i in range(0, len(word), wrap_limit):
+ wrapped_word += word[i:i+wrap_limit]+'<wbr>'
+ word = ''
+ wrapped_word += word
+ new_words.append(wrapped_word)
+ return ' '.join(new_words)
+
+def make_pre_wrappable(html, wrap_limit=60,
+ split_on=';?&@!$#-/\\"\''):
+ """
+ Like ``make_wrappable()`` but intended for text that will
+ go in a ``<pre>`` block, so wrap on a line-by-line basis.
+ """
+ lines = html.splitlines()
+ new_lines = []
+ for line in lines:
+ if len(line) > wrap_limit:
+ for char in split_on:
+ if char in line:
+ parts = line.split(char)
+ line = '<wbr>'.join(parts)
+ break
+ new_lines.append(line)
+ return '\n'.join(lines)
diff --git a/paste/exceptions/reporter.py b/paste/exceptions/reporter.py
new file mode 100644
index 0000000..7c0c266
--- /dev/null
+++ b/paste/exceptions/reporter.py
@@ -0,0 +1,141 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+import smtplib
+import time
+try:
+ from socket import sslerror
+except ImportError:
+ sslerror = None
+from paste.exceptions import formatter
+
+class Reporter(object):
+
+ def __init__(self, **conf):
+ for name, value in conf.items():
+ if not hasattr(self, name):
+ raise TypeError(
+ "The keyword argument %s was not expected"
+ % name)
+ setattr(self, name, value)
+ self.check_params()
+
+ def check_params(self):
+ pass
+
+ def format_date(self, exc_data):
+ return time.strftime('%c', exc_data.date)
+
+ def format_html(self, exc_data, **kw):
+ return formatter.format_html(exc_data, **kw)
+
+ def format_text(self, exc_data, **kw):
+ return formatter.format_text(exc_data, **kw)
+
+class EmailReporter(Reporter):
+
+ to_addresses = None
+ from_address = None
+ smtp_server = 'localhost'
+ smtp_username = None
+ smtp_password = None
+ smtp_use_tls = False
+ subject_prefix = ''
+
+ def report(self, exc_data):
+ msg = self.assemble_email(exc_data)
+ server = smtplib.SMTP(self.smtp_server)
+ if self.smtp_use_tls:
+ server.ehlo()
+ server.starttls()
+ server.ehlo()
+ if self.smtp_username and self.smtp_password:
+ server.login(self.smtp_username, self.smtp_password)
+ server.sendmail(self.from_address,
+ self.to_addresses, msg.as_string())
+ try:
+ server.quit()
+ except sslerror:
+ # sslerror is raised in tls connections on closing sometimes
+ pass
+
+ def check_params(self):
+ if not self.to_addresses:
+ raise ValueError("You must set to_addresses")
+ if not self.from_address:
+ raise ValueError("You must set from_address")
+ if isinstance(self.to_addresses, (str, unicode)):
+ self.to_addresses = [self.to_addresses]
+
+ def assemble_email(self, exc_data):
+ short_html_version = self.format_html(
+ exc_data, show_hidden_frames=False)
+ long_html_version = self.format_html(
+ exc_data, show_hidden_frames=True)
+ text_version = self.format_text(
+ exc_data, show_hidden_frames=False)
+ msg = MIMEMultipart()
+ msg.set_type('multipart/alternative')
+ msg.preamble = msg.epilogue = ''
+ text_msg = MIMEText(text_version)
+ text_msg.set_type('text/plain')
+ text_msg.set_param('charset', 'ASCII')
+ msg.attach(text_msg)
+ html_msg = MIMEText(short_html_version)
+ html_msg.set_type('text/html')
+ # @@: Correct character set?
+ html_msg.set_param('charset', 'UTF-8')
+ html_long = MIMEText(long_html_version)
+ html_long.set_type('text/html')
+ html_long.set_param('charset', 'UTF-8')
+ msg.attach(html_msg)
+ msg.attach(html_long)
+ subject = '%s: %s' % (exc_data.exception_type,
+ formatter.truncate(str(exc_data.exception_value)))
+ msg['Subject'] = self.subject_prefix + subject
+ msg['From'] = self.from_address
+ msg['To'] = ', '.join(self.to_addresses)
+ return msg
+
+class LogReporter(Reporter):
+
+ filename = None
+ show_hidden_frames = True
+
+ def check_params(self):
+ assert self.filename is not None, (
+ "You must give a filename")
+
+ def report(self, exc_data):
+ text = self.format_text(
+ exc_data, show_hidden_frames=self.show_hidden_frames)
+ f = open(self.filename, 'a')
+ try:
+ f.write(text + '\n' + '-'*60 + '\n')
+ finally:
+ f.close()
+
+class FileReporter(Reporter):
+
+ file = None
+ show_hidden_frames = True
+
+ def check_params(self):
+ assert self.file is not None, (
+ "You must give a file object")
+
+ def report(self, exc_data):
+ text = self.format_text(
+ exc_data, show_hidden_frames=self.show_hidden_frames)
+ self.file.write(text + '\n' + '-'*60 + '\n')
+
+class WSGIAppReporter(Reporter):
+
+ def __init__(self, exc_data):
+ self.exc_data = exc_data
+
+ def __call__(self, environ, start_response):
+ start_response('500 Server Error', [('Content-type', 'text/html')])
+ return [formatter.format_html(self.exc_data)]
diff --git a/paste/exceptions/serial_number_generator.py b/paste/exceptions/serial_number_generator.py
new file mode 100644
index 0000000..3f80107
--- /dev/null
+++ b/paste/exceptions/serial_number_generator.py
@@ -0,0 +1,129 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Creates a human-readable identifier, using numbers and digits,
+avoiding ambiguous numbers and letters. hash_identifier can be used
+to create compact representations that are unique for a certain string
+(or concatenation of strings)
+"""
+
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+
+import six
+
+good_characters = "23456789abcdefghjkmnpqrtuvwxyz"
+
+base = len(good_characters)
+
+def make_identifier(number):
+ """
+ Encodes a number as an identifier.
+ """
+ if not isinstance(number, six.integer_types):
+ raise ValueError(
+ "You can only make identifiers out of integers (not %r)"
+ % number)
+ if number < 0:
+ raise ValueError(
+ "You cannot make identifiers out of negative numbers: %r"
+ % number)
+ result = []
+ while number:
+ next = number % base
+ result.append(good_characters[next])
+ # Note, this depends on integer rounding of results:
+ number = number // base
+ return ''.join(result)
+
+def hash_identifier(s, length, pad=True, hasher=md5, prefix='',
+ group=None, upper=False):
+ """
+ Hashes the string (with the given hashing module), then turns that
+ hash into an identifier of the given length (using modulo to
+ reduce the length of the identifier). If ``pad`` is False, then
+ the minimum-length identifier will be used; otherwise the
+ identifier will be padded with 0's as necessary.
+
+ ``prefix`` will be added last, and does not count towards the
+ target length. ``group`` will group the characters with ``-`` in
+ the given lengths, and also does not count towards the target
+ length. E.g., ``group=4`` will cause a identifier like
+ ``a5f3-hgk3-asdf``. Grouping occurs before the prefix.
+ """
+ if not callable(hasher):
+ # Accept sha/md5 modules as well as callables
+ hasher = hasher.new
+ if length > 26 and hasher is md5:
+ raise ValueError(
+ "md5 cannot create hashes longer than 26 characters in "
+ "length (you gave %s)" % length)
+ if isinstance(s, six.text_type):
+ s = s.encode('utf-8')
+ elif not isinstance(s, six.binary_type):
+ s = str(s)
+ if six.PY3:
+ s = s.encode('utf-8')
+ h = hasher(s)
+ bin_hash = h.digest()
+ modulo = base ** length
+ number = 0
+ for c in list(bin_hash):
+ number = (number * 256 + six.byte2int([c])) % modulo
+ ident = make_identifier(number)
+ if pad:
+ ident = good_characters[0]*(length-len(ident)) + ident
+ if group:
+ parts = []
+ while ident:
+ parts.insert(0, ident[-group:])
+ ident = ident[:-group]
+ ident = '-'.join(parts)
+ if upper:
+ ident = ident.upper()
+ return prefix + ident
+
+# doctest tests:
+__test__ = {
+ 'make_identifier': """
+ >>> make_identifier(0)
+ ''
+ >>> make_identifier(1000)
+ 'c53'
+ >>> make_identifier(-100)
+ Traceback (most recent call last):
+ ...
+ ValueError: You cannot make identifiers out of negative numbers: -100
+ >>> make_identifier('test')
+ Traceback (most recent call last):
+ ...
+ ValueError: You can only make identifiers out of integers (not 'test')
+ >>> make_identifier(1000000000000)
+ 'c53x9rqh3'
+ """,
+ 'hash_identifier': """
+ >>> hash_identifier(0, 5)
+ 'cy2dr'
+ >>> hash_identifier(0, 10)
+ 'cy2dr6rg46'
+ >>> hash_identifier('this is a test of a long string', 5)
+ 'awatu'
+ >>> hash_identifier(0, 26)
+ 'cy2dr6rg46cx8t4w2f3nfexzk4'
+ >>> hash_identifier(0, 30)
+ Traceback (most recent call last):
+ ...
+ ValueError: md5 cannot create hashes longer than 26 characters in length (you gave 30)
+ >>> hash_identifier(0, 10, group=4)
+ 'cy-2dr6-rg46'
+ >>> hash_identifier(0, 10, group=4, upper=True, prefix='M-')
+ 'M-CY-2DR6-RG46'
+ """}
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/paste/fileapp.py b/paste/fileapp.py
new file mode 100644
index 0000000..e18281a
--- /dev/null
+++ b/paste/fileapp.py
@@ -0,0 +1,356 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Ian Bicking, Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+This module handles sending static content such as in-memory data or
+files. At this time it has cache helpers and understands the
+if-modified-since request header.
+"""
+
+import os, time, mimetypes, zipfile, tarfile
+from paste.httpexceptions import *
+from paste.httpheaders import *
+
+CACHE_SIZE = 4096
+BLOCK_SIZE = 4096 * 16
+
+__all__ = ['DataApp', 'FileApp', 'DirectoryApp', 'ArchiveStore']
+
+class DataApp(object):
+ """
+ Returns an application that will send content in a single chunk,
+ this application has support for setting cache-control and for
+ responding to conditional (or HEAD) requests.
+
+ Constructor Arguments:
+
+ ``content`` the content being sent to the client
+
+ ``headers`` the headers to send /w the response
+
+ The remaining ``kwargs`` correspond to headers, where the
+ underscore is replaced with a dash. These values are only
+ added to the headers if they are not already provided; thus,
+ they can be used for default values. Examples include, but
+ are not limited to:
+
+ ``content_type``
+ ``content_encoding``
+ ``content_location``
+
+ ``cache_control()``
+
+ This method provides validated construction of the ``Cache-Control``
+ header as well as providing for automated filling out of the
+ ``EXPIRES`` header for HTTP/1.0 clients.
+
+ ``set_content()``
+
+ This method provides a mechanism to set the content after the
+ application has been constructed. This method does things
+ like changing ``Last-Modified`` and ``Content-Length`` headers.
+
+ """
+
+ allowed_methods = ('GET', 'HEAD')
+
+ def __init__(self, content, headers=None, allowed_methods=None,
+ **kwargs):
+ assert isinstance(headers, (type(None), list))
+ self.expires = None
+ self.content = None
+ self.content_length = None
+ self.last_modified = 0
+ if allowed_methods is not None:
+ self.allowed_methods = allowed_methods
+ self.headers = headers or []
+ for (k, v) in kwargs.items():
+ header = get_header(k)
+ header.update(self.headers, v)
+ ACCEPT_RANGES.update(self.headers, bytes=True)
+ if not CONTENT_TYPE(self.headers):
+ CONTENT_TYPE.update(self.headers)
+ if content is not None:
+ self.set_content(content)
+
+ def cache_control(self, **kwargs):
+ self.expires = CACHE_CONTROL.apply(self.headers, **kwargs) or None
+ return self
+
+ def set_content(self, content, last_modified=None):
+ assert content is not None
+ if last_modified is None:
+ self.last_modified = time.time()
+ else:
+ self.last_modified = last_modified
+ self.content = content
+ self.content_length = len(content)
+ LAST_MODIFIED.update(self.headers, time=self.last_modified)
+ return self
+
+ def content_disposition(self, **kwargs):
+ CONTENT_DISPOSITION.apply(self.headers, **kwargs)
+ return self
+
+ def __call__(self, environ, start_response):
+ method = environ['REQUEST_METHOD'].upper()
+ if method not in self.allowed_methods:
+ exc = HTTPMethodNotAllowed(
+ 'You cannot %s a file' % method,
+ headers=[('Allow', ','.join(self.allowed_methods))])
+ return exc(environ, start_response)
+ return self.get(environ, start_response)
+
+ def calculate_etag(self):
+ return '"%s-%s"' % (self.last_modified, self.content_length)
+
+ def get(self, environ, start_response):
+ headers = self.headers[:]
+ current_etag = self.calculate_etag()
+ ETAG.update(headers, current_etag)
+ if self.expires is not None:
+ EXPIRES.update(headers, delta=self.expires)
+
+ try:
+ client_etags = IF_NONE_MATCH.parse(environ)
+ if client_etags:
+ for etag in client_etags:
+ if etag == current_etag or etag == '*':
+ # horribly inefficient, n^2 performance, yuck!
+ for head in list_headers(entity=True):
+ head.delete(headers)
+ start_response('304 Not Modified', headers)
+ return [b'']
+ except HTTPBadRequest as exce:
+ return exce.wsgi_application(environ, start_response)
+
+ # If we get If-None-Match and If-Modified-Since, and
+ # If-None-Match doesn't match, then we should not try to
+ # figure out If-Modified-Since (which has 1-second granularity
+ # and just isn't as accurate)
+ if not client_etags:
+ try:
+ client_clock = IF_MODIFIED_SINCE.parse(environ)
+ if (client_clock is not None
+ and client_clock >= int(self.last_modified)):
+ # horribly inefficient, n^2 performance, yuck!
+ for head in list_headers(entity=True):
+ head.delete(headers)
+ start_response('304 Not Modified', headers)
+ return [b''] # empty body
+ except HTTPBadRequest as exce:
+ return exce.wsgi_application(environ, start_response)
+
+ (lower, upper) = (0, self.content_length - 1)
+ range = RANGE.parse(environ)
+ if range and 'bytes' == range[0] and 1 == len(range[1]):
+ (lower, upper) = range[1][0]
+ upper = upper or (self.content_length - 1)
+ if upper >= self.content_length or lower > upper:
+ return HTTPRequestRangeNotSatisfiable((
+ "Range request was made beyond the end of the content,\r\n"
+ "which is %s long.\r\n Range: %s\r\n") % (
+ self.content_length, RANGE(environ))
+ ).wsgi_application(environ, start_response)
+
+ content_length = upper - lower + 1
+ CONTENT_RANGE.update(headers, first_byte=lower, last_byte=upper,
+ total_length = self.content_length)
+ CONTENT_LENGTH.update(headers, content_length)
+ if range or content_length != self.content_length:
+ start_response('206 Partial Content', headers)
+ else:
+ start_response('200 OK', headers)
+ if self.content is not None:
+ return [self.content[lower:upper+1]]
+ return (lower, content_length)
+
+class FileApp(DataApp):
+ """
+ Returns an application that will send the file at the given
+ filename. Adds a mime type based on ``mimetypes.guess_type()``.
+ See DataApp for the arguments beyond ``filename``.
+ """
+
+ def __init__(self, filename, headers=None, **kwargs):
+ self.filename = filename
+ content_type, content_encoding = self.guess_type()
+ if content_type and 'content_type' not in kwargs:
+ kwargs['content_type'] = content_type
+ if content_encoding and 'content_encoding' not in kwargs:
+ kwargs['content_encoding'] = content_encoding
+ DataApp.__init__(self, None, headers, **kwargs)
+
+ def guess_type(self):
+ return mimetypes.guess_type(self.filename)
+
+ def update(self, force=False):
+ stat = os.stat(self.filename)
+ if not force and stat.st_mtime == self.last_modified:
+ return
+ self.last_modified = stat.st_mtime
+ if stat.st_size < CACHE_SIZE:
+ fh = open(self.filename,"rb")
+ self.set_content(fh.read(), stat.st_mtime)
+ fh.close()
+ else:
+ self.content = None
+ self.content_length = stat.st_size
+ # This is updated automatically if self.set_content() is
+ # called
+ LAST_MODIFIED.update(self.headers, time=self.last_modified)
+
+ def get(self, environ, start_response):
+ is_head = environ['REQUEST_METHOD'].upper() == 'HEAD'
+ if 'max-age=0' in CACHE_CONTROL(environ).lower():
+ self.update(force=True) # RFC 2616 13.2.6
+ else:
+ self.update()
+ if not self.content:
+ if not os.path.exists(self.filename):
+ exc = HTTPNotFound(
+ 'The resource does not exist',
+ comment="No file at %r" % self.filename)
+ return exc(environ, start_response)
+ try:
+ file = open(self.filename, 'rb')
+ except (IOError, OSError) as e:
+ exc = HTTPForbidden(
+ 'You are not permitted to view this file (%s)' % e)
+ return exc.wsgi_application(
+ environ, start_response)
+ retval = DataApp.get(self, environ, start_response)
+ if isinstance(retval, list):
+ # cached content, exception, or not-modified
+ if is_head:
+ return [b'']
+ return retval
+ (lower, content_length) = retval
+ if is_head:
+ return [b'']
+ file.seek(lower)
+ file_wrapper = environ.get('wsgi.file_wrapper', None)
+ if file_wrapper:
+ return file_wrapper(file, BLOCK_SIZE)
+ else:
+ return _FileIter(file, size=content_length)
+
+class _FileIter(object):
+
+ def __init__(self, file, block_size=None, size=None):
+ self.file = file
+ self.size = size
+ self.block_size = block_size or BLOCK_SIZE
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ chunk_size = self.block_size
+ if self.size is not None:
+ if chunk_size > self.size:
+ chunk_size = self.size
+ self.size -= chunk_size
+ data = self.file.read(chunk_size)
+ if not data:
+ raise StopIteration
+ return data
+ __next__ = next
+
+ def close(self):
+ self.file.close()
+
+
+class DirectoryApp(object):
+ """
+ Returns an application that dispatches requests to corresponding FileApps based on PATH_INFO.
+ FileApp instances are cached. This app makes sure not to serve any files that are not in a subdirectory.
+ To customize FileApp creation override ``DirectoryApp.make_fileapp``
+ """
+
+ def __init__(self, path):
+ self.path = os.path.abspath(path)
+ if not self.path.endswith(os.path.sep):
+ self.path += os.path.sep
+ assert os.path.isdir(self.path)
+ self.cached_apps = {}
+
+ make_fileapp = FileApp
+
+ def __call__(self, environ, start_response):
+ path_info = environ['PATH_INFO']
+ app = self.cached_apps.get(path_info)
+ if app is None:
+ path = os.path.join(self.path, path_info.lstrip('/'))
+ if not os.path.normpath(path).startswith(self.path):
+ app = HTTPForbidden()
+ elif os.path.isfile(path):
+ app = self.make_fileapp(path)
+ self.cached_apps[path_info] = app
+ else:
+ app = HTTPNotFound(comment=path)
+ return app(environ, start_response)
+
+
+class ArchiveStore(object):
+ """
+ Returns an application that serves up a DataApp for items requested
+ in a given zip or tar archive.
+
+ Constructor Arguments:
+
+ ``filepath`` the path to the archive being served
+
+ ``cache_control()``
+
+ This method provides validated construction of the ``Cache-Control``
+ header as well as providing for automated filling out of the
+ ``EXPIRES`` header for HTTP/1.0 clients.
+ """
+
+ def __init__(self, filepath):
+ if zipfile.is_zipfile(filepath):
+ self.archive = zipfile.ZipFile(filepath,"r")
+ elif tarfile.is_tarfile(filepath):
+ self.archive = tarfile.TarFileCompat(filepath,"r")
+ else:
+ raise AssertionError("filepath '%s' is not a zip or tar " % filepath)
+ self.expires = None
+ self.last_modified = time.time()
+ self.cache = {}
+
+ def cache_control(self, **kwargs):
+ self.expires = CACHE_CONTROL.apply(self.headers, **kwargs) or None
+ return self
+
+ def __call__(self, environ, start_response):
+ path = environ.get("PATH_INFO","")
+ if path.startswith("/"):
+ path = path[1:]
+ application = self.cache.get(path)
+ if application:
+ return application(environ, start_response)
+ try:
+ info = self.archive.getinfo(path)
+ except KeyError:
+ exc = HTTPNotFound("The file requested, '%s', was not found." % path)
+ return exc.wsgi_application(environ, start_response)
+ if info.filename.endswith("/"):
+ exc = HTTPNotFound("Path requested, '%s', is not a file." % path)
+ return exc.wsgi_application(environ, start_response)
+ content_type, content_encoding = mimetypes.guess_type(info.filename)
+ # 'None' is not a valid content-encoding, so don't set the header if
+ # mimetypes.guess_type returns None
+ if content_encoding is not None:
+ app = DataApp(None, content_type = content_type,
+ content_encoding = content_encoding)
+ else:
+ app = DataApp(None, content_type = content_type)
+ app.set_content(self.archive.read(path),
+ time.mktime(info.date_time + (0,0,0)))
+ self.cache[path] = app
+ app.expires = self.expires
+ return app(environ, start_response)
+
diff --git a/paste/fixture.py b/paste/fixture.py
new file mode 100644
index 0000000..363f119
--- /dev/null
+++ b/paste/fixture.py
@@ -0,0 +1,1755 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Routines for testing WSGI applications.
+
+Most interesting is the `TestApp <class-paste.fixture.TestApp.html>`_
+for testing WSGI applications, and the `TestFileEnvironment
+<class-paste.fixture.TestFileEnvironment.html>`_ class for testing the
+effects of command-line scripts.
+"""
+
+from __future__ import print_function
+
+import sys
+import random
+import mimetypes
+import time
+import os
+import shutil
+import smtplib
+import shlex
+import re
+import six
+import subprocess
+from six.moves import cStringIO as StringIO
+from six.moves.urllib.parse import urlencode
+from six.moves.urllib import parse as urlparse
+try:
+ # Python 3
+ from http.cookies import BaseCookie
+ from urllib.parse import splittype, splithost
+except ImportError:
+ # Python 2
+ from Cookie import BaseCookie
+ from urllib import splittype, splithost
+
+from paste import wsgilib
+from paste import lint
+from paste.response import HeaderDict
+
+def tempnam_no_warning(*args):
+ """
+ An os.tempnam with the warning turned off, because sometimes
+ you just need to use this and don't care about the stupid
+ security warning.
+ """
+ return os.tempnam(*args)
+
+class NoDefault(object):
+ pass
+
+def sorted(l):
+ l = list(l)
+ l.sort()
+ return l
+
+class Dummy_smtplib(object):
+
+ existing = None
+
+ def __init__(self, server):
+ import warnings
+ warnings.warn(
+ 'Dummy_smtplib is not maintained and is deprecated',
+ DeprecationWarning, 2)
+ assert not self.existing, (
+ "smtplib.SMTP() called again before Dummy_smtplib.existing.reset() "
+ "called.")
+ self.server = server
+ self.open = True
+ self.__class__.existing = self
+
+ def quit(self):
+ assert self.open, (
+ "Called %s.quit() twice" % self)
+ self.open = False
+
+ def sendmail(self, from_address, to_addresses, msg):
+ self.from_address = from_address
+ self.to_addresses = to_addresses
+ self.message = msg
+
+ def install(cls):
+ smtplib.SMTP = cls
+
+ install = classmethod(install)
+
+ def reset(self):
+ assert not self.open, (
+ "SMTP connection not quit")
+ self.__class__.existing = None
+
+class AppError(Exception):
+ pass
+
+class TestApp(object):
+
+ # for py.test
+ disabled = True
+
+ def __init__(self, app, namespace=None, relative_to=None,
+ extra_environ=None, pre_request_hook=None,
+ post_request_hook=None):
+ """
+ Wraps a WSGI application in a more convenient interface for
+ testing.
+
+ ``app`` may be an application, or a Paste Deploy app
+ URI, like ``'config:filename.ini#test'``.
+
+ ``namespace`` is a dictionary that will be written to (if
+ provided). This can be used with doctest or some other
+ system, and the variable ``res`` will be assigned everytime
+ you make a request (instead of returning the request).
+
+ ``relative_to`` is a directory, and filenames used for file
+ uploads are calculated relative to this. Also ``config:``
+ URIs that aren't absolute.
+
+ ``extra_environ`` is a dictionary of values that should go
+ into the environment for each request. These can provide a
+ communication channel with the application.
+
+ ``pre_request_hook`` is a function to be called prior to
+ making requests (such as ``post`` or ``get``). This function
+ must take one argument (the instance of the TestApp).
+
+ ``post_request_hook`` is a function, similar to
+ ``pre_request_hook``, to be called after requests are made.
+ """
+ if isinstance(app, (six.binary_type, six.text_type)):
+ from paste.deploy import loadapp
+ # @@: Should pick up relative_to from calling module's
+ # __file__
+ app = loadapp(app, relative_to=relative_to)
+ self.app = app
+ self.namespace = namespace
+ self.relative_to = relative_to
+ if extra_environ is None:
+ extra_environ = {}
+ self.extra_environ = extra_environ
+ self.pre_request_hook = pre_request_hook
+ self.post_request_hook = post_request_hook
+ self.reset()
+
+ def reset(self):
+ """
+ Resets the state of the application; currently just clears
+ saved cookies.
+ """
+ self.cookies = {}
+
+ def _make_environ(self):
+ environ = self.extra_environ.copy()
+ environ['paste.throw_errors'] = True
+ return environ
+
+ def get(self, url, params=None, headers=None, extra_environ=None,
+ status=None, expect_errors=False):
+ """
+ Get the given url (well, actually a path like
+ ``'/page.html'``).
+
+ ``params``:
+ A query string, or a dictionary that will be encoded
+ into a query string. You may also include a query
+ string on the ``url``.
+
+ ``headers``:
+ A dictionary of extra headers to send.
+
+ ``extra_environ``:
+ A dictionary of environmental variables that should
+ be added to the request.
+
+ ``status``:
+ The integer status code you expect (if not 200 or 3xx).
+ If you expect a 404 response, for instance, you must give
+ ``status=404`` or it will be an error. You can also give
+ a wildcard, like ``'3*'`` or ``'*'``.
+
+ ``expect_errors``:
+ If this is not true, then if anything is written to
+ ``wsgi.errors`` it will be an error. If it is true, then
+ non-200/3xx responses are also okay.
+
+ Returns a `response object
+ <class-paste.fixture.TestResponse.html>`_
+ """
+ if extra_environ is None:
+ extra_environ = {}
+ # Hide from py.test:
+ __tracebackhide__ = True
+ if params:
+ if not isinstance(params, (six.binary_type, six.text_type)):
+ params = urlencode(params, doseq=True)
+ if '?' in url:
+ url += '&'
+ else:
+ url += '?'
+ url += params
+ environ = self._make_environ()
+ url = str(url)
+ if '?' in url:
+ url, environ['QUERY_STRING'] = url.split('?', 1)
+ else:
+ environ['QUERY_STRING'] = ''
+ self._set_headers(headers, environ)
+ environ.update(extra_environ)
+ req = TestRequest(url, environ, expect_errors)
+ return self.do_request(req, status=status)
+
+ def _gen_request(self, method, url, params=b'', headers=None, extra_environ=None,
+ status=None, upload_files=None, expect_errors=False):
+ """
+ Do a generic request.
+ """
+ if headers is None:
+ headers = {}
+ if extra_environ is None:
+ extra_environ = {}
+ environ = self._make_environ()
+ # @@: Should this be all non-strings?
+ if isinstance(params, (list, tuple, dict)):
+ params = urlencode(params)
+ if hasattr(params, 'items'):
+ # Some other multi-dict like format
+ params = urlencode(params.items())
+ if six.PY3 and isinstance(params, six.text_type):
+ params = params.encode('utf8')
+ if upload_files:
+ params = urlparse.parse_qsl(params, keep_blank_values=True)
+ content_type, params = self.encode_multipart(
+ params, upload_files)
+ environ['CONTENT_TYPE'] = content_type
+ elif params:
+ environ.setdefault('CONTENT_TYPE', 'application/x-www-form-urlencoded')
+ if '?' in url:
+ url, environ['QUERY_STRING'] = url.split('?', 1)
+ else:
+ environ['QUERY_STRING'] = ''
+ environ['CONTENT_LENGTH'] = str(len(params))
+ environ['REQUEST_METHOD'] = method
+ environ['wsgi.input'] = six.BytesIO(params)
+ self._set_headers(headers, environ)
+ environ.update(extra_environ)
+ req = TestRequest(url, environ, expect_errors)
+ return self.do_request(req, status=status)
+
+ def post(self, url, params=b'', headers=None, extra_environ=None,
+ status=None, upload_files=None, expect_errors=False):
+ """
+ Do a POST request. Very like the ``.get()`` method.
+ ``params`` are put in the body of the request.
+
+ ``upload_files`` is for file uploads. It should be a list of
+ ``[(fieldname, filename, file_content)]``. You can also use
+ just ``[(fieldname, filename)]`` and the file content will be
+ read from disk.
+
+ Returns a `response object
+ <class-paste.fixture.TestResponse.html>`_
+ """
+ return self._gen_request('POST', url, params=params, headers=headers,
+ extra_environ=extra_environ,status=status,
+ upload_files=upload_files,
+ expect_errors=expect_errors)
+
+ def put(self, url, params=b'', headers=None, extra_environ=None,
+ status=None, upload_files=None, expect_errors=False):
+ """
+ Do a PUT request. Very like the ``.get()`` method.
+ ``params`` are put in the body of the request.
+
+ ``upload_files`` is for file uploads. It should be a list of
+ ``[(fieldname, filename, file_content)]``. You can also use
+ just ``[(fieldname, filename)]`` and the file content will be
+ read from disk.
+
+ Returns a `response object
+ <class-paste.fixture.TestResponse.html>`_
+ """
+ return self._gen_request('PUT', url, params=params, headers=headers,
+ extra_environ=extra_environ,status=status,
+ upload_files=upload_files,
+ expect_errors=expect_errors)
+
+ def delete(self, url, params=b'', headers=None, extra_environ=None,
+ status=None, expect_errors=False):
+ """
+ Do a DELETE request. Very like the ``.get()`` method.
+ ``params`` are put in the body of the request.
+
+ Returns a `response object
+ <class-paste.fixture.TestResponse.html>`_
+ """
+ return self._gen_request('DELETE', url, params=params, headers=headers,
+ extra_environ=extra_environ,status=status,
+ upload_files=None, expect_errors=expect_errors)
+
+
+
+
+ def _set_headers(self, headers, environ):
+ """
+ Turn any headers into environ variables
+ """
+ if not headers:
+ return
+ for header, value in headers.items():
+ if header.lower() == 'content-type':
+ var = 'CONTENT_TYPE'
+ elif header.lower() == 'content-length':
+ var = 'CONTENT_LENGTH'
+ else:
+ var = 'HTTP_%s' % header.replace('-', '_').upper()
+ environ[var] = value
+
+ def encode_multipart(self, params, files):
+ """
+ Encodes a set of parameters (typically a name/value list) and
+ a set of files (a list of (name, filename, file_body)) into a
+ typical POST body, returning the (content_type, body).
+ """
+ boundary = '----------a_BoUnDaRy%s$' % random.random()
+ content_type = 'multipart/form-data; boundary=%s' % boundary
+ if six.PY3:
+ boundary = boundary.encode('ascii')
+
+ lines = []
+ for key, value in params:
+ lines.append(b'--'+boundary)
+ line = 'Content-Disposition: form-data; name="%s"' % key
+ if six.PY3:
+ line = line.encode('utf8')
+ lines.append(line)
+ lines.append(b'')
+ line = value
+ if six.PY3 and isinstance(line, six.text_type):
+ line = line.encode('utf8')
+ lines.append(line)
+ for file_info in files:
+ key, filename, value = self._get_file_info(file_info)
+ lines.append(b'--'+boundary)
+ line = ('Content-Disposition: form-data; name="%s"; filename="%s"'
+ % (key, filename))
+ if six.PY3:
+ line = line.encode('utf8')
+ lines.append(line)
+ fcontent = mimetypes.guess_type(filename)[0]
+ line = ('Content-Type: %s'
+ % (fcontent or 'application/octet-stream'))
+ if six.PY3:
+ line = line.encode('utf8')
+ lines.append(line)
+ lines.append(b'')
+ lines.append(value)
+ lines.append(b'--' + boundary + b'--')
+ lines.append(b'')
+ body = b'\r\n'.join(lines)
+ return content_type, body
+
+ def _get_file_info(self, file_info):
+ if len(file_info) == 2:
+ # It only has a filename
+ filename = file_info[1]
+ if self.relative_to:
+ filename = os.path.join(self.relative_to, filename)
+ f = open(filename, 'rb')
+ content = f.read()
+ f.close()
+ return (file_info[0], filename, content)
+ elif len(file_info) == 3:
+ return file_info
+ else:
+ raise ValueError(
+ "upload_files need to be a list of tuples of (fieldname, "
+ "filename, filecontent) or (fieldname, filename); "
+ "you gave: %r"
+ % repr(file_info)[:100])
+
+ def do_request(self, req, status):
+ """
+ Executes the given request (``req``), with the expected
+ ``status``. Generally ``.get()`` and ``.post()`` are used
+ instead.
+ """
+ if self.pre_request_hook:
+ self.pre_request_hook(self)
+ __tracebackhide__ = True
+ if self.cookies:
+ c = BaseCookie()
+ for name, value in self.cookies.items():
+ c[name] = value
+ hc = '; '.join(['='.join([m.key, m.value]) for m in c.values()])
+ req.environ['HTTP_COOKIE'] = hc
+ req.environ['paste.testing'] = True
+ req.environ['paste.testing_variables'] = {}
+ app = lint.middleware(self.app)
+ old_stdout = sys.stdout
+ out = CaptureStdout(old_stdout)
+ try:
+ sys.stdout = out
+ start_time = time.time()
+ raise_on_wsgi_error = not req.expect_errors
+ raw_res = wsgilib.raw_interactive(
+ app, req.url,
+ raise_on_wsgi_error=raise_on_wsgi_error,
+ **req.environ)
+ end_time = time.time()
+ finally:
+ sys.stdout = old_stdout
+ sys.stderr.write(out.getvalue())
+ res = self._make_response(raw_res, end_time - start_time)
+ res.request = req
+ for name, value in req.environ['paste.testing_variables'].items():
+ if hasattr(res, name):
+ raise ValueError(
+ "paste.testing_variables contains the variable %r, but "
+ "the response object already has an attribute by that "
+ "name" % name)
+ setattr(res, name, value)
+ if self.namespace is not None:
+ self.namespace['res'] = res
+ if not req.expect_errors:
+ self._check_status(status, res)
+ self._check_errors(res)
+ res.cookies_set = {}
+ for header in res.all_headers('set-cookie'):
+ c = BaseCookie(header)
+ for key, morsel in c.items():
+ self.cookies[key] = morsel.value
+ res.cookies_set[key] = morsel.value
+ if self.post_request_hook:
+ self.post_request_hook(self)
+ if self.namespace is None:
+ # It's annoying to return the response in doctests, as it'll
+ # be printed, so we only return it is we couldn't assign
+ # it anywhere
+ return res
+
+ def _check_status(self, status, res):
+ __tracebackhide__ = True
+ if status == '*':
+ return
+ if isinstance(status, (list, tuple)):
+ if res.status not in status:
+ raise AppError(
+ "Bad response: %s (not one of %s for %s)\n%s"
+ % (res.full_status, ', '.join(map(str, status)),
+ res.request.url, res.body))
+ return
+ if status is None:
+ if res.status >= 200 and res.status < 400:
+ return
+ body = res.body
+ if six.PY3:
+ body = body.decode('utf8', 'xmlcharrefreplace')
+ raise AppError(
+ "Bad response: %s (not 200 OK or 3xx redirect for %s)\n%s"
+ % (res.full_status, res.request.url,
+ body))
+ if status != res.status:
+ raise AppError(
+ "Bad response: %s (not %s)" % (res.full_status, status))
+
+ def _check_errors(self, res):
+ if res.errors:
+ raise AppError(
+ "Application had errors logged:\n%s" % res.errors)
+
+ def _make_response(self, resp, total_time):
+ status, headers, body, errors = resp
+ return TestResponse(self, status, headers, body, errors,
+ total_time)
+
+class CaptureStdout(object):
+
+ def __init__(self, actual):
+ self.captured = StringIO()
+ self.actual = actual
+
+ def write(self, s):
+ self.captured.write(s)
+ self.actual.write(s)
+
+ def flush(self):
+ self.actual.flush()
+
+ def writelines(self, lines):
+ for item in lines:
+ self.write(item)
+
+ def getvalue(self):
+ return self.captured.getvalue()
+
+class TestResponse(object):
+
+ # for py.test
+ disabled = True
+
+ """
+ Instances of this class are return by `TestApp
+ <class-paste.fixture.TestApp.html>`_
+ """
+
+ def __init__(self, test_app, status, headers, body, errors,
+ total_time):
+ self.test_app = test_app
+ self.status = int(status.split()[0])
+ self.full_status = status
+ self.headers = headers
+ self.header_dict = HeaderDict.fromlist(self.headers)
+ self.body = body
+ self.errors = errors
+ self._normal_body = None
+ self.time = total_time
+ self._forms_indexed = None
+
+ def forms__get(self):
+ """
+ Returns a dictionary of ``Form`` objects. Indexes are both in
+ order (from zero) and by form id (if the form is given an id).
+ """
+ if self._forms_indexed is None:
+ self._parse_forms()
+ return self._forms_indexed
+
+ forms = property(forms__get,
+ doc="""
+ A list of <form>s found on the page (instances of
+ `Form <class-paste.fixture.Form.html>`_)
+ """)
+
+ def form__get(self):
+ forms = self.forms
+ if not forms:
+ raise TypeError(
+ "You used response.form, but no forms exist")
+ if 1 in forms:
+ # There is more than one form
+ raise TypeError(
+ "You used response.form, but more than one form exists")
+ return forms[0]
+
+ form = property(form__get,
+ doc="""
+ Returns a single `Form
+ <class-paste.fixture.Form.html>`_ instance; it
+ is an error if there are multiple forms on the
+ page.
+ """)
+
+ _tag_re = re.compile(r'<(/?)([:a-z0-9_\-]*)(.*?)>', re.S|re.I)
+
+ def _parse_forms(self):
+ forms = self._forms_indexed = {}
+ form_texts = []
+ started = None
+ for match in self._tag_re.finditer(self.body):
+ end = match.group(1) == '/'
+ tag = match.group(2).lower()
+ if tag != 'form':
+ continue
+ if end:
+ assert started, (
+ "</form> unexpected at %s" % match.start())
+ form_texts.append(self.body[started:match.end()])
+ started = None
+ else:
+ assert not started, (
+ "Nested form tags at %s" % match.start())
+ started = match.start()
+ assert not started, (
+ "Danging form: %r" % self.body[started:])
+ for i, text in enumerate(form_texts):
+ form = Form(self, text)
+ forms[i] = form
+ if form.id:
+ forms[form.id] = form
+
+ def header(self, name, default=NoDefault):
+ """
+ Returns the named header; an error if there is not exactly one
+ matching header (unless you give a default -- always an error
+ if there is more than one header)
+ """
+ found = None
+ for cur_name, value in self.headers:
+ if cur_name.lower() == name.lower():
+ assert not found, (
+ "Ambiguous header: %s matches %r and %r"
+ % (name, found, value))
+ found = value
+ if found is None:
+ if default is NoDefault:
+ raise KeyError(
+ "No header found: %r (from %s)"
+ % (name, ', '.join([n for n, v in self.headers])))
+ else:
+ return default
+ return found
+
+ def all_headers(self, name):
+ """
+ Gets all headers by the ``name``, returns as a list
+ """
+ found = []
+ for cur_name, value in self.headers:
+ if cur_name.lower() == name.lower():
+ found.append(value)
+ return found
+
+ def follow(self, **kw):
+ """
+ If this request is a redirect, follow that redirect. It
+ is an error if this is not a redirect response. Returns
+ another response object.
+ """
+ assert self.status >= 300 and self.status < 400, (
+ "You can only follow redirect responses (not %s)"
+ % self.full_status)
+ location = self.header('location')
+ type, rest = splittype(location)
+ host, path = splithost(rest)
+ # @@: We should test that it's not a remote redirect
+ return self.test_app.get(location, **kw)
+
+ def click(self, description=None, linkid=None, href=None,
+ anchor=None, index=None, verbose=False):
+ """
+ Click the link as described. Each of ``description``,
+ ``linkid``, and ``url`` are *patterns*, meaning that they are
+ either strings (regular expressions), compiled regular
+ expressions (objects with a ``search`` method), or callables
+ returning true or false.
+
+ All the given patterns are ANDed together:
+
+ * ``description`` is a pattern that matches the contents of the
+ anchor (HTML and all -- everything between ``<a...>`` and
+ ``</a>``)
+
+ * ``linkid`` is a pattern that matches the ``id`` attribute of
+ the anchor. It will receive the empty string if no id is
+ given.
+
+ * ``href`` is a pattern that matches the ``href`` of the anchor;
+ the literal content of that attribute, not the fully qualified
+ attribute.
+
+ * ``anchor`` is a pattern that matches the entire anchor, with
+ its contents.
+
+ If more than one link matches, then the ``index`` link is
+ followed. If ``index`` is not given and more than one link
+ matches, or if no link matches, then ``IndexError`` will be
+ raised.
+
+ If you give ``verbose`` then messages will be printed about
+ each link, and why it does or doesn't match. If you use
+ ``app.click(verbose=True)`` you'll see a list of all the
+ links.
+
+ You can use multiple criteria to essentially assert multiple
+ aspects about the link, e.g., where the link's destination is.
+ """
+ __tracebackhide__ = True
+ found_html, found_desc, found_attrs = self._find_element(
+ tag='a', href_attr='href',
+ href_extract=None,
+ content=description,
+ id=linkid,
+ href_pattern=href,
+ html_pattern=anchor,
+ index=index, verbose=verbose)
+ return self.goto(found_attrs['uri'])
+
+ def clickbutton(self, description=None, buttonid=None, href=None,
+ button=None, index=None, verbose=False):
+ """
+ Like ``.click()``, except looks for link-like buttons.
+ This kind of button should look like
+ ``<button onclick="...location.href='url'...">``.
+ """
+ __tracebackhide__ = True
+ found_html, found_desc, found_attrs = self._find_element(
+ tag='button', href_attr='onclick',
+ href_extract=re.compile(r"location\.href='(.*?)'"),
+ content=description,
+ id=buttonid,
+ href_pattern=href,
+ html_pattern=button,
+ index=index, verbose=verbose)
+ return self.goto(found_attrs['uri'])
+
+ def _find_element(self, tag, href_attr, href_extract,
+ content, id,
+ href_pattern,
+ html_pattern,
+ index, verbose):
+ content_pat = _make_pattern(content)
+ id_pat = _make_pattern(id)
+ href_pat = _make_pattern(href_pattern)
+ html_pat = _make_pattern(html_pattern)
+
+ _tag_re = re.compile(r'<%s\s+(.*?)>(.*?)</%s>' % (tag, tag),
+ re.I+re.S)
+
+ def printlog(s):
+ if verbose:
+ print(s)
+
+ found_links = []
+ total_links = 0
+ for match in _tag_re.finditer(self.body):
+ el_html = match.group(0)
+ el_attr = match.group(1)
+ el_content = match.group(2)
+ attrs = _parse_attrs(el_attr)
+ if verbose:
+ printlog('Element: %r' % el_html)
+ if not attrs.get(href_attr):
+ printlog(' Skipped: no %s attribute' % href_attr)
+ continue
+ el_href = attrs[href_attr]
+ if href_extract:
+ m = href_extract.search(el_href)
+ if not m:
+ printlog(" Skipped: doesn't match extract pattern")
+ continue
+ el_href = m.group(1)
+ attrs['uri'] = el_href
+ if el_href.startswith('#'):
+ printlog(' Skipped: only internal fragment href')
+ continue
+ if el_href.startswith('javascript:'):
+ printlog(' Skipped: cannot follow javascript:')
+ continue
+ total_links += 1
+ if content_pat and not content_pat(el_content):
+ printlog(" Skipped: doesn't match description")
+ continue
+ if id_pat and not id_pat(attrs.get('id', '')):
+ printlog(" Skipped: doesn't match id")
+ continue
+ if href_pat and not href_pat(el_href):
+ printlog(" Skipped: doesn't match href")
+ continue
+ if html_pat and not html_pat(el_html):
+ printlog(" Skipped: doesn't match html")
+ continue
+ printlog(" Accepted")
+ found_links.append((el_html, el_content, attrs))
+ if not found_links:
+ raise IndexError(
+ "No matching elements found (from %s possible)"
+ % total_links)
+ if index is None:
+ if len(found_links) > 1:
+ raise IndexError(
+ "Multiple links match: %s"
+ % ', '.join([repr(anc) for anc, d, attr in found_links]))
+ found_link = found_links[0]
+ else:
+ try:
+ found_link = found_links[index]
+ except IndexError:
+ raise IndexError(
+ "Only %s (out of %s) links match; index %s out of range"
+ % (len(found_links), total_links, index))
+ return found_link
+
+ def goto(self, href, method='get', **args):
+ """
+ Go to the (potentially relative) link ``href``, using the
+ given method (``'get'`` or ``'post'``) and any extra arguments
+ you want to pass to the ``app.get()`` or ``app.post()``
+ methods.
+
+ All hostnames and schemes will be ignored.
+ """
+ scheme, host, path, query, fragment = urlparse.urlsplit(href)
+ # We
+ scheme = host = fragment = ''
+ href = urlparse.urlunsplit((scheme, host, path, query, fragment))
+ href = urlparse.urljoin(self.request.full_url, href)
+ method = method.lower()
+ assert method in ('get', 'post'), (
+ 'Only "get" or "post" are allowed for method (you gave %r)'
+ % method)
+ if method == 'get':
+ method = self.test_app.get
+ else:
+ method = self.test_app.post
+ return method(href, **args)
+
+ _normal_body_regex = re.compile(br'[ \n\r\t]+')
+
+ def normal_body__get(self):
+ if self._normal_body is None:
+ self._normal_body = self._normal_body_regex.sub(
+ b' ', self.body)
+ return self._normal_body
+
+ normal_body = property(normal_body__get,
+ doc="""
+ Return the whitespace-normalized body
+ """)
+
+ def __contains__(self, s):
+ """
+ A response 'contains' a string if it is present in the body
+ of the response. Whitespace is normalized when searching
+ for a string.
+ """
+ if not isinstance(s, (six.binary_type, six.text_type)):
+ s = str(s)
+ if isinstance(s, six.text_type):
+ ## FIXME: we don't know that this response uses utf8:
+ s = s.encode('utf8')
+ return (self.body.find(s) != -1
+ or self.normal_body.find(s) != -1)
+
+ def mustcontain(self, *strings, **kw):
+ """
+ Assert that the response contains all of the strings passed
+ in as arguments.
+
+ Equivalent to::
+
+ assert string in res
+ """
+ if 'no' in kw:
+ no = kw['no']
+ del kw['no']
+ if isinstance(no, (six.binary_type, six.text_type)):
+ no = [no]
+ else:
+ no = []
+ if kw:
+ raise TypeError(
+ "The only keyword argument allowed is 'no'")
+ for s in strings:
+ if not s in self:
+ print("Actual response (no %r):" % s, file=sys.stderr)
+ print(self, file=sys.stderr)
+ raise IndexError(
+ "Body does not contain string %r" % s)
+ for no_s in no:
+ if no_s in self:
+ print("Actual response (has %r)" % no_s, file=sys.stderr)
+ print(self, file=sys.stderr)
+ raise IndexError(
+ "Body contains string %r" % s)
+
+ def __repr__(self):
+ body = self.body
+ if six.PY3:
+ body = body.decode('utf8', 'xmlcharrefreplace')
+ body = body[:20]
+ return '<Response %s %r>' % (self.full_status, body)
+
+ def __str__(self):
+ simple_body = b'\n'.join([l for l in self.body.splitlines()
+ if l.strip()])
+ if six.PY3:
+ simple_body = simple_body.decode('utf8', 'xmlcharrefreplace')
+ return 'Response: %s\n%s\n%s' % (
+ self.status,
+ '\n'.join(['%s: %s' % (n, v) for n, v in self.headers]),
+ simple_body)
+
+ def showbrowser(self):
+ """
+ Show this response in a browser window (for debugging purposes,
+ when it's hard to read the HTML).
+ """
+ import webbrowser
+ fn = tempnam_no_warning(None, 'paste-fixture') + '.html'
+ f = open(fn, 'wb')
+ f.write(self.body)
+ f.close()
+ url = 'file:' + fn.replace(os.sep, '/')
+ webbrowser.open_new(url)
+
+class TestRequest(object):
+
+ # for py.test
+ disabled = True
+
+ """
+ Instances of this class are created by `TestApp
+ <class-paste.fixture.TestApp.html>`_ with the ``.get()`` and
+ ``.post()`` methods, and are consumed there by ``.do_request()``.
+
+ Instances are also available as a ``.req`` attribute on
+ `TestResponse <class-paste.fixture.TestResponse.html>`_ instances.
+
+ Useful attributes:
+
+ ``url``:
+ The url (actually usually the path) of the request, without
+ query string.
+
+ ``environ``:
+ The environment dictionary used for the request.
+
+ ``full_url``:
+ The url/path, with query string.
+ """
+
+ def __init__(self, url, environ, expect_errors=False):
+ if url.startswith('http://localhost'):
+ url = url[len('http://localhost'):]
+ self.url = url
+ self.environ = environ
+ if environ.get('QUERY_STRING'):
+ self.full_url = url + '?' + environ['QUERY_STRING']
+ else:
+ self.full_url = url
+ self.expect_errors = expect_errors
+
+
+class Form(object):
+
+ """
+ This object represents a form that has been found in a page.
+ This has a couple useful attributes:
+
+ ``text``:
+ the full HTML of the form.
+
+ ``action``:
+ the relative URI of the action.
+
+ ``method``:
+ the method (e.g., ``'GET'``).
+
+ ``id``:
+ the id, or None if not given.
+
+ ``fields``:
+ a dictionary of fields, each value is a list of fields by
+ that name. ``<input type=\"radio\">`` and ``<select>`` are
+ both represented as single fields with multiple options.
+ """
+
+ # @@: This really should be using Mechanize/ClientForm or
+ # something...
+
+ _tag_re = re.compile(r'<(/?)([:a-z0-9_\-]*)([^>]*?)>', re.I)
+
+ def __init__(self, response, text):
+ self.response = response
+ self.text = text
+ self._parse_fields()
+ self._parse_action()
+
+ def _parse_fields(self):
+ in_select = None
+ in_textarea = None
+ fields = {}
+ for match in self._tag_re.finditer(self.text):
+ end = match.group(1) == '/'
+ tag = match.group(2).lower()
+ if tag not in ('input', 'select', 'option', 'textarea',
+ 'button'):
+ continue
+ if tag == 'select' and end:
+ assert in_select, (
+ '%r without starting select' % match.group(0))
+ in_select = None
+ continue
+ if tag == 'textarea' and end:
+ assert in_textarea, (
+ "</textarea> with no <textarea> at %s" % match.start())
+ in_textarea[0].value = html_unquote(self.text[in_textarea[1]:match.start()])
+ in_textarea = None
+ continue
+ if end:
+ continue
+ attrs = _parse_attrs(match.group(3))
+ if 'name' in attrs:
+ name = attrs.pop('name')
+ else:
+ name = None
+ if tag == 'option':
+ in_select.options.append((attrs.get('value'),
+ 'selected' in attrs))
+ continue
+ if tag == 'input' and attrs.get('type') == 'radio':
+ field = fields.get(name)
+ if not field:
+ field = Radio(self, tag, name, match.start(), **attrs)
+ fields.setdefault(name, []).append(field)
+ else:
+ field = field[0]
+ assert isinstance(field, Radio)
+ field.options.append((attrs.get('value'),
+ 'checked' in attrs))
+ continue
+ tag_type = tag
+ if tag == 'input':
+ tag_type = attrs.get('type', 'text').lower()
+ FieldClass = Field.classes.get(tag_type, Field)
+ field = FieldClass(self, tag, name, match.start(), **attrs)
+ if tag == 'textarea':
+ assert not in_textarea, (
+ "Nested textareas: %r and %r"
+ % (in_textarea, match.group(0)))
+ in_textarea = field, match.end()
+ elif tag == 'select':
+ assert not in_select, (
+ "Nested selects: %r and %r"
+ % (in_select, match.group(0)))
+ in_select = field
+ fields.setdefault(name, []).append(field)
+ self.fields = fields
+
+ def _parse_action(self):
+ self.action = None
+ for match in self._tag_re.finditer(self.text):
+ end = match.group(1) == '/'
+ tag = match.group(2).lower()
+ if tag != 'form':
+ continue
+ if end:
+ break
+ attrs = _parse_attrs(match.group(3))
+ self.action = attrs.get('action', '')
+ self.method = attrs.get('method', 'GET')
+ self.id = attrs.get('id')
+ # @@: enctype?
+ else:
+ assert 0, "No </form> tag found"
+ assert self.action is not None, (
+ "No <form> tag found")
+
+ def __setitem__(self, name, value):
+ """
+ Set the value of the named field. If there is 0 or multiple
+ fields by that name, it is an error.
+
+ Setting the value of a ``<select>`` selects the given option
+ (and confirms it is an option). Setting radio fields does the
+ same. Checkboxes get boolean values. You cannot set hidden
+ fields or buttons.
+
+ Use ``.set()`` if there is any ambiguity and you must provide
+ an index.
+ """
+ fields = self.fields.get(name)
+ assert fields is not None, (
+ "No field by the name %r found (fields: %s)"
+ % (name, ', '.join(map(repr, self.fields.keys()))))
+ assert len(fields) == 1, (
+ "Multiple fields match %r: %s"
+ % (name, ', '.join(map(repr, fields))))
+ fields[0].value = value
+
+ def __getitem__(self, name):
+ """
+ Get the named field object (ambiguity is an error).
+ """
+ fields = self.fields.get(name)
+ assert fields is not None, (
+ "No field by the name %r found" % name)
+ assert len(fields) == 1, (
+ "Multiple fields match %r: %s"
+ % (name, ', '.join(map(repr, fields))))
+ return fields[0]
+
+ def set(self, name, value, index=None):
+ """
+ Set the given name, using ``index`` to disambiguate.
+ """
+ if index is None:
+ self[name] = value
+ else:
+ fields = self.fields.get(name)
+ assert fields is not None, (
+ "No fields found matching %r" % name)
+ field = fields[index]
+ field.value = value
+
+ def get(self, name, index=None, default=NoDefault):
+ """
+ Get the named/indexed field object, or ``default`` if no field
+ is found.
+ """
+ fields = self.fields.get(name)
+ if fields is None and default is not NoDefault:
+ return default
+ if index is None:
+ return self[name]
+ else:
+ fields = self.fields.get(name)
+ assert fields is not None, (
+ "No fields found matching %r" % name)
+ field = fields[index]
+ return field
+
+ def select(self, name, value, index=None):
+ """
+ Like ``.set()``, except also confirms the target is a
+ ``<select>``.
+ """
+ field = self.get(name, index=index)
+ assert isinstance(field, Select)
+ field.value = value
+
+ def submit(self, name=None, index=None, **args):
+ """
+ Submits the form. If ``name`` is given, then also select that
+ button (using ``index`` to disambiguate)``.
+
+ Any extra keyword arguments are passed to the ``.get()`` or
+ ``.post()`` method.
+
+ Returns a response object.
+ """
+ fields = self.submit_fields(name, index=index)
+ return self.response.goto(self.action, method=self.method,
+ params=fields, **args)
+
+ def submit_fields(self, name=None, index=None):
+ """
+ Return a list of ``[(name, value), ...]`` for the current
+ state of the form.
+ """
+ submit = []
+ if name is not None:
+ field = self.get(name, index=index)
+ submit.append((field.name, field.value_if_submitted()))
+ for name, fields in self.fields.items():
+ if name is None:
+ continue
+ for field in fields:
+ value = field.value
+ if value is None:
+ continue
+ submit.append((name, value))
+ return submit
+
+
+_attr_re = re.compile(r'([^= \n\r\t]+)[ \n\r\t]*(?:=[ \n\r\t]*(?:"([^"]*)"|([^"][^ \n\r\t>]*)))?', re.S)
+
+def _parse_attrs(text):
+ attrs = {}
+ for match in _attr_re.finditer(text):
+ attr_name = match.group(1).lower()
+ attr_body = match.group(2) or match.group(3)
+ attr_body = html_unquote(attr_body or '')
+ attrs[attr_name] = attr_body
+ return attrs
+
+class Field(object):
+
+ """
+ Field object.
+ """
+
+ # Dictionary of field types (select, radio, etc) to classes
+ classes = {}
+
+ settable = True
+
+ def __init__(self, form, tag, name, pos,
+ value=None, id=None, **attrs):
+ self.form = form
+ self.tag = tag
+ self.name = name
+ self.pos = pos
+ self._value = value
+ self.id = id
+ self.attrs = attrs
+
+ def value__set(self, value):
+ if not self.settable:
+ raise AttributeError(
+ "You cannot set the value of the <%s> field %r"
+ % (self.tag, self.name))
+ self._value = value
+
+ def force_value(self, value):
+ """
+ Like setting a value, except forces it even for, say, hidden
+ fields.
+ """
+ self._value = value
+
+ def value__get(self):
+ return self._value
+
+ value = property(value__get, value__set)
+
+class Select(Field):
+
+ """
+ Field representing ``<select>``
+ """
+
+ def __init__(self, *args, **attrs):
+ super(Select, self).__init__(*args, **attrs)
+ self.options = []
+ self.multiple = attrs.get('multiple')
+ assert not self.multiple, (
+ "<select multiple> not yet supported")
+ # Undetermined yet:
+ self.selectedIndex = None
+
+ def value__set(self, value):
+ for i, (option, checked) in enumerate(self.options):
+ if option == str(value):
+ self.selectedIndex = i
+ break
+ else:
+ raise ValueError(
+ "Option %r not found (from %s)"
+ % (value, ', '.join(
+ [repr(o) for o, c in self.options])))
+
+ def value__get(self):
+ if self.selectedIndex is not None:
+ return self.options[self.selectedIndex][0]
+ else:
+ for option, checked in self.options:
+ if checked:
+ return option
+ else:
+ if self.options:
+ return self.options[0][0]
+ else:
+ return None
+
+ value = property(value__get, value__set)
+
+Field.classes['select'] = Select
+
+class Radio(Select):
+
+ """
+ Field representing ``<input type="radio">``
+ """
+
+Field.classes['radio'] = Radio
+
+class Checkbox(Field):
+
+ """
+ Field representing ``<input type="checkbox">``
+ """
+
+ def __init__(self, *args, **attrs):
+ super(Checkbox, self).__init__(*args, **attrs)
+ self.checked = 'checked' in attrs
+
+ def value__set(self, value):
+ self.checked = not not value
+
+ def value__get(self):
+ if self.checked:
+ if self._value is None:
+ return 'on'
+ else:
+ return self._value
+ else:
+ return None
+
+ value = property(value__get, value__set)
+
+Field.classes['checkbox'] = Checkbox
+
+class Text(Field):
+ """
+ Field representing ``<input type="text">``
+ """
+ def __init__(self, form, tag, name, pos,
+ value='', id=None, **attrs):
+ #text fields default to empty string
+ Field.__init__(self, form, tag, name, pos,
+ value=value, id=id, **attrs)
+
+Field.classes['text'] = Text
+
+class Textarea(Text):
+ """
+ Field representing ``<textarea>``
+ """
+
+Field.classes['textarea'] = Textarea
+
+class Hidden(Text):
+ """
+ Field representing ``<input type="hidden">``
+ """
+
+Field.classes['hidden'] = Hidden
+
+class Submit(Field):
+ """
+ Field representing ``<input type="submit">`` and ``<button>``
+ """
+
+ settable = False
+
+ def value__get(self):
+ return None
+
+ value = property(value__get)
+
+ def value_if_submitted(self):
+ return self._value
+
+Field.classes['submit'] = Submit
+
+Field.classes['button'] = Submit
+
+Field.classes['image'] = Submit
+
+############################################################
+## Command-line testing
+############################################################
+
+
+class TestFileEnvironment(object):
+
+ """
+ This represents an environment in which files will be written, and
+ scripts will be run.
+ """
+
+ # for py.test
+ disabled = True
+
+ def __init__(self, base_path, template_path=None,
+ script_path=None,
+ environ=None, cwd=None, start_clear=True,
+ ignore_paths=None, ignore_hidden=True):
+ """
+ Creates an environment. ``base_path`` is used as the current
+ working directory, and generally where changes are looked for.
+
+ ``template_path`` is the directory to look for *template*
+ files, which are files you'll explicitly add to the
+ environment. This is done with ``.writefile()``.
+
+ ``script_path`` is the PATH for finding executables. Usually
+ grabbed from ``$PATH``.
+
+ ``environ`` is the operating system environment,
+ ``os.environ`` if not given.
+
+ ``cwd`` is the working directory, ``base_path`` by default.
+
+ If ``start_clear`` is true (default) then the ``base_path``
+ will be cleared (all files deleted) when an instance is
+ created. You can also use ``.clear()`` to clear the files.
+
+ ``ignore_paths`` is a set of specific filenames that should be
+ ignored when created in the environment. ``ignore_hidden``
+ means, if true (default) that filenames and directories
+ starting with ``'.'`` will be ignored.
+ """
+ self.base_path = base_path
+ self.template_path = template_path
+ if environ is None:
+ environ = os.environ.copy()
+ self.environ = environ
+ if script_path is None:
+ if sys.platform == 'win32':
+ script_path = environ.get('PATH', '').split(';')
+ else:
+ script_path = environ.get('PATH', '').split(':')
+ self.script_path = script_path
+ if cwd is None:
+ cwd = base_path
+ self.cwd = cwd
+ if start_clear:
+ self.clear()
+ elif not os.path.exists(base_path):
+ os.makedirs(base_path)
+ self.ignore_paths = ignore_paths or []
+ self.ignore_hidden = ignore_hidden
+
+ def run(self, script, *args, **kw):
+ """
+ Run the command, with the given arguments. The ``script``
+ argument can have space-separated arguments, or you can use
+ the positional arguments.
+
+ Keywords allowed are:
+
+ ``expect_error``: (default False)
+ Don't raise an exception in case of errors
+ ``expect_stderr``: (default ``expect_error``)
+ Don't raise an exception if anything is printed to stderr
+ ``stdin``: (default ``""``)
+ Input to the script
+ ``printresult``: (default True)
+ Print the result after running
+ ``cwd``: (default ``self.cwd``)
+ The working directory to run in
+
+ Returns a `ProcResponse
+ <class-paste.fixture.ProcResponse.html>`_ object.
+ """
+ __tracebackhide__ = True
+ expect_error = _popget(kw, 'expect_error', False)
+ expect_stderr = _popget(kw, 'expect_stderr', expect_error)
+ cwd = _popget(kw, 'cwd', self.cwd)
+ stdin = _popget(kw, 'stdin', None)
+ printresult = _popget(kw, 'printresult', True)
+ args = list(map(str, args))
+ assert not kw, (
+ "Arguments not expected: %s" % ', '.join(kw.keys()))
+ if ' ' in script:
+ assert not args, (
+ "You cannot give a multi-argument script (%r) "
+ "and arguments (%s)" % (script, args))
+ script, args = script.split(None, 1)
+ args = shlex.split(args)
+ script = self._find_exe(script)
+ all = [script] + args
+ files_before = self._find_files()
+ proc = subprocess.Popen(all, stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ cwd=cwd,
+ env=self.environ)
+ stdout, stderr = proc.communicate(stdin)
+ files_after = self._find_files()
+ result = ProcResult(
+ self, all, stdin, stdout, stderr,
+ returncode=proc.returncode,
+ files_before=files_before,
+ files_after=files_after)
+ if printresult:
+ print(result)
+ print('-'*40)
+ if not expect_error:
+ result.assert_no_error()
+ if not expect_stderr:
+ result.assert_no_stderr()
+ return result
+
+ def _find_exe(self, script_name):
+ if self.script_path is None:
+ script_name = os.path.join(self.cwd, script_name)
+ if not os.path.exists(script_name):
+ raise OSError(
+ "Script %s does not exist" % script_name)
+ return script_name
+ for path in self.script_path:
+ fn = os.path.join(path, script_name)
+ if os.path.exists(fn):
+ return fn
+ raise OSError(
+ "Script %s could not be found in %s"
+ % (script_name, ':'.join(self.script_path)))
+
+ def _find_files(self):
+ result = {}
+ for fn in os.listdir(self.base_path):
+ if self._ignore_file(fn):
+ continue
+ self._find_traverse(fn, result)
+ return result
+
+ def _ignore_file(self, fn):
+ if fn in self.ignore_paths:
+ return True
+ if self.ignore_hidden and os.path.basename(fn).startswith('.'):
+ return True
+ return False
+
+ def _find_traverse(self, path, result):
+ full = os.path.join(self.base_path, path)
+ if os.path.isdir(full):
+ result[path] = FoundDir(self.base_path, path)
+ for fn in os.listdir(full):
+ fn = os.path.join(path, fn)
+ if self._ignore_file(fn):
+ continue
+ self._find_traverse(fn, result)
+ else:
+ result[path] = FoundFile(self.base_path, path)
+
+ def clear(self):
+ """
+ Delete all the files in the base directory.
+ """
+ if os.path.exists(self.base_path):
+ shutil.rmtree(self.base_path)
+ os.mkdir(self.base_path)
+
+ def writefile(self, path, content=None,
+ frompath=None):
+ """
+ Write a file to the given path. If ``content`` is given then
+ that text is written, otherwise the file in ``frompath`` is
+ used. ``frompath`` is relative to ``self.template_path``
+ """
+ full = os.path.join(self.base_path, path)
+ if not os.path.exists(os.path.dirname(full)):
+ os.makedirs(os.path.dirname(full))
+ f = open(full, 'wb')
+ if content is not None:
+ f.write(content)
+ if frompath is not None:
+ if self.template_path:
+ frompath = os.path.join(self.template_path, frompath)
+ f2 = open(frompath, 'rb')
+ f.write(f2.read())
+ f2.close()
+ f.close()
+ return FoundFile(self.base_path, path)
+
+class ProcResult(object):
+
+ """
+ Represents the results of running a command in
+ `TestFileEnvironment
+ <class-paste.fixture.TestFileEnvironment.html>`_.
+
+ Attributes to pay particular attention to:
+
+ ``stdout``, ``stderr``:
+ What is produced
+
+ ``files_created``, ``files_deleted``, ``files_updated``:
+ Dictionaries mapping filenames (relative to the ``base_dir``)
+ to `FoundFile <class-paste.fixture.FoundFile.html>`_ or
+ `FoundDir <class-paste.fixture.FoundDir.html>`_ objects.
+ """
+
+ def __init__(self, test_env, args, stdin, stdout, stderr,
+ returncode, files_before, files_after):
+ self.test_env = test_env
+ self.args = args
+ self.stdin = stdin
+ self.stdout = stdout
+ self.stderr = stderr
+ self.returncode = returncode
+ self.files_before = files_before
+ self.files_after = files_after
+ self.files_deleted = {}
+ self.files_updated = {}
+ self.files_created = files_after.copy()
+ for path, f in files_before.items():
+ if path not in files_after:
+ self.files_deleted[path] = f
+ continue
+ del self.files_created[path]
+ if f.mtime < files_after[path].mtime:
+ self.files_updated[path] = files_after[path]
+
+ def assert_no_error(self):
+ __tracebackhide__ = True
+ assert self.returncode == 0, (
+ "Script returned code: %s" % self.returncode)
+
+ def assert_no_stderr(self):
+ __tracebackhide__ = True
+ if self.stderr:
+ print('Error output:')
+ print(self.stderr)
+ raise AssertionError("stderr output not expected")
+
+ def __str__(self):
+ s = ['Script result: %s' % ' '.join(self.args)]
+ if self.returncode:
+ s.append(' return code: %s' % self.returncode)
+ if self.stderr:
+ s.append('-- stderr: --------------------')
+ s.append(self.stderr)
+ if self.stdout:
+ s.append('-- stdout: --------------------')
+ s.append(self.stdout)
+ for name, files, show_size in [
+ ('created', self.files_created, True),
+ ('deleted', self.files_deleted, True),
+ ('updated', self.files_updated, True)]:
+ if files:
+ s.append('-- %s: -------------------' % name)
+ files = files.items()
+ files.sort()
+ last = ''
+ for path, f in files:
+ t = ' %s' % _space_prefix(last, path, indent=4,
+ include_sep=False)
+ last = path
+ if show_size and f.size != 'N/A':
+ t += ' (%s bytes)' % f.size
+ s.append(t)
+ return '\n'.join(s)
+
+class FoundFile(object):
+
+ """
+ Represents a single file found as the result of a command.
+
+ Has attributes:
+
+ ``path``:
+ The path of the file, relative to the ``base_path``
+
+ ``full``:
+ The full path
+
+ ``stat``:
+ The results of ``os.stat``. Also ``mtime`` and ``size``
+ contain the ``.st_mtime`` and ``st_size`` of the stat.
+
+ ``bytes``:
+ The contents of the file.
+
+ You may use the ``in`` operator with these objects (tested against
+ the contents of the file), and the ``.mustcontain()`` method.
+ """
+
+ file = True
+ dir = False
+
+ def __init__(self, base_path, path):
+ self.base_path = base_path
+ self.path = path
+ self.full = os.path.join(base_path, path)
+ self.stat = os.stat(self.full)
+ self.mtime = self.stat.st_mtime
+ self.size = self.stat.st_size
+ self._bytes = None
+
+ def bytes__get(self):
+ if self._bytes is None:
+ f = open(self.full, 'rb')
+ self._bytes = f.read()
+ f.close()
+ return self._bytes
+ bytes = property(bytes__get)
+
+ def __contains__(self, s):
+ return s in self.bytes
+
+ def mustcontain(self, s):
+ __tracebackhide__ = True
+ bytes_ = self.bytes
+ if s not in bytes_:
+ print('Could not find %r in:' % s)
+ print(bytes_)
+ assert s in bytes_
+
+ def __repr__(self):
+ return '<%s %s:%s>' % (
+ self.__class__.__name__,
+ self.base_path, self.path)
+
+class FoundDir(object):
+
+ """
+ Represents a directory created by a command.
+ """
+
+ file = False
+ dir = True
+
+ def __init__(self, base_path, path):
+ self.base_path = base_path
+ self.path = path
+ self.full = os.path.join(base_path, path)
+ self.size = 'N/A'
+ self.mtime = 'N/A'
+
+ def __repr__(self):
+ return '<%s %s:%s>' % (
+ self.__class__.__name__,
+ self.base_path, self.path)
+
+def _popget(d, key, default=None):
+ """
+ Pop the key if found (else return default)
+ """
+ if key in d:
+ return d.pop(key)
+ return default
+
+def _space_prefix(pref, full, sep=None, indent=None, include_sep=True):
+ """
+ Anything shared by pref and full will be replaced with spaces
+ in full, and full returned.
+ """
+ if sep is None:
+ sep = os.path.sep
+ pref = pref.split(sep)
+ full = full.split(sep)
+ padding = []
+ while pref and full and pref[0] == full[0]:
+ if indent is None:
+ padding.append(' ' * (len(full[0]) + len(sep)))
+ else:
+ padding.append(' ' * indent)
+ full.pop(0)
+ pref.pop(0)
+ if padding:
+ if include_sep:
+ return ''.join(padding) + sep + sep.join(full)
+ else:
+ return ''.join(padding) + sep.join(full)
+ else:
+ return sep.join(full)
+
+def _make_pattern(pat):
+ if pat is None:
+ return None
+ if isinstance(pat, (six.binary_type, six.text_type)):
+ pat = re.compile(pat)
+ if hasattr(pat, 'search'):
+ return pat.search
+ if callable(pat):
+ return pat
+ assert 0, (
+ "Cannot make callable pattern object out of %r" % pat)
+
+def setup_module(module=None):
+ """
+ This is used by py.test if it is in the module, so you can
+ import this directly.
+
+ Use like::
+
+ from paste.fixture import setup_module
+ """
+ # Deprecated June 2008
+ import warnings
+ warnings.warn(
+ 'setup_module is deprecated',
+ DeprecationWarning, 2)
+ if module is None:
+ # The module we were called from must be the module...
+ module = sys._getframe().f_back.f_globals['__name__']
+ if isinstance(module, (six.binary_type, six.text_type)):
+ module = sys.modules[module]
+ if hasattr(module, 'reset_state'):
+ module.reset_state()
+
+def html_unquote(v):
+ """
+ Unquote (some) entities in HTML. (incomplete)
+ """
+ for ent, repl in [('&nbsp;', ' '), ('&gt;', '>'),
+ ('&lt;', '<'), ('&quot;', '"'),
+ ('&amp;', '&')]:
+ v = v.replace(ent, repl)
+ return v
diff --git a/paste/flup_session.py b/paste/flup_session.py
new file mode 100644
index 0000000..6f5c750
--- /dev/null
+++ b/paste/flup_session.py
@@ -0,0 +1,108 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Creates a session object.
+
+In your application, use::
+
+ environ['paste.flup_session_service'].session
+
+This will return a dictionary. The contents of this dictionary will
+be saved to disk when the request is completed. The session will be
+created when you first fetch the session dictionary, and a cookie will
+be sent in that case. There's current no way to use sessions without
+cookies, and there's no way to delete a session except to clear its
+data.
+"""
+
+from paste import httpexceptions
+from paste import wsgilib
+import flup.middleware.session
+flup_session = flup.middleware.session
+
+# This is a dictionary of existing stores, keyed by a tuple of
+# store type and parameters
+store_cache = {}
+
+class NoDefault(object):
+ pass
+
+class SessionMiddleware(object):
+
+ session_classes = {
+ 'memory': (flup_session.MemorySessionStore,
+ [('session_timeout', 'timeout', int, 60)]),
+ 'disk': (flup_session.DiskSessionStore,
+ [('session_timeout', 'timeout', int, 60),
+ ('session_dir', 'storeDir', str, '/tmp/sessions')]),
+ 'shelve': (flup_session.ShelveSessionStore,
+ [('session_timeout', 'timeout', int, 60),
+ ('session_file', 'storeFile', str,
+ '/tmp/session.shelve')]),
+ }
+
+
+ def __init__(self, app,
+ global_conf=None,
+ session_type=NoDefault,
+ cookie_name=NoDefault,
+ **store_config
+ ):
+ self.application = app
+ if session_type is NoDefault:
+ session_type = global_conf.get('session_type', 'disk')
+ self.session_type = session_type
+ try:
+ self.store_class, self.store_args = self.session_classes[self.session_type]
+ except KeyError:
+ raise KeyError(
+ "The session_type %s is unknown (I know about %s)"
+ % (self.session_type,
+ ', '.join(self.session_classes.keys())))
+ kw = {}
+ for config_name, kw_name, coercer, default in self.store_args:
+ value = coercer(store_config.get(config_name, default))
+ kw[kw_name] = value
+ self.store = self.store_class(**kw)
+ if cookie_name is NoDefault:
+ cookie_name = global_conf.get('session_cookie', '_SID_')
+ self.cookie_name = cookie_name
+
+ def __call__(self, environ, start_response):
+ service = flup_session.SessionService(
+ self.store, environ, cookieName=self.cookie_name,
+ fieldName=self.cookie_name)
+ environ['paste.flup_session_service'] = service
+
+ def cookie_start_response(status, headers, exc_info=None):
+ service.addCookie(headers)
+ return start_response(status, headers, exc_info)
+
+ try:
+ app_iter = self.application(environ, cookie_start_response)
+ except httpexceptions.HTTPException as e:
+ headers = (e.headers or {}).items()
+ service.addCookie(headers)
+ e.headers = dict(headers)
+ service.close()
+ raise
+ except:
+ service.close()
+ raise
+
+ return wsgilib.add_close(app_iter, service.close)
+
+def make_session_middleware(app, global_conf,
+ session_type=NoDefault,
+ cookie_name=NoDefault,
+ **store_config):
+ """
+ Wraps the application in a session-managing middleware.
+ The session service can then be found in
+ ``environ['paste.flup_session_service']``
+ """
+ return SessionMiddleware(
+ app, global_conf=global_conf,
+ session_type=session_type, cookie_name=cookie_name,
+ **store_config)
diff --git a/paste/gzipper.py b/paste/gzipper.py
new file mode 100644
index 0000000..eca8775
--- /dev/null
+++ b/paste/gzipper.py
@@ -0,0 +1,107 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+WSGI middleware
+
+Gzip-encodes the response.
+"""
+
+import gzip
+from paste.response import header_value, remove_header
+from paste.httpheaders import CONTENT_LENGTH
+import six
+
+class GzipOutput(object):
+ pass
+
+class middleware(object):
+
+ def __init__(self, application, compress_level=6):
+ self.application = application
+ self.compress_level = int(compress_level)
+
+ def __call__(self, environ, start_response):
+ if 'gzip' not in environ.get('HTTP_ACCEPT_ENCODING', ''):
+ # nothing for us to do, so this middleware will
+ # be a no-op:
+ return self.application(environ, start_response)
+ response = GzipResponse(start_response, self.compress_level)
+ app_iter = self.application(environ,
+ response.gzip_start_response)
+ if app_iter is not None:
+ response.finish_response(app_iter)
+
+ return response.write()
+
+class GzipResponse(object):
+
+ def __init__(self, start_response, compress_level):
+ self.start_response = start_response
+ self.compress_level = compress_level
+ self.buffer = six.BytesIO()
+ self.compressible = False
+ self.content_length = None
+
+ def gzip_start_response(self, status, headers, exc_info=None):
+ self.headers = headers
+ ct = header_value(headers,'content-type')
+ ce = header_value(headers,'content-encoding')
+ self.compressible = False
+ if ct and (ct.startswith('text/') or ct.startswith('application/')) \
+ and 'zip' not in ct:
+ self.compressible = True
+ if ce:
+ self.compressible = False
+ if self.compressible:
+ headers.append(('content-encoding', 'gzip'))
+ remove_header(headers, 'content-length')
+ self.headers = headers
+ self.status = status
+ return self.buffer.write
+
+ def write(self):
+ out = self.buffer
+ out.seek(0)
+ s = out.getvalue()
+ out.close()
+ return [s]
+
+ def finish_response(self, app_iter):
+ if self.compressible:
+ output = gzip.GzipFile(mode='wb', compresslevel=self.compress_level,
+ fileobj=self.buffer)
+ else:
+ output = self.buffer
+ try:
+ for s in app_iter:
+ output.write(s)
+ if self.compressible:
+ output.close()
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ content_length = self.buffer.tell()
+ CONTENT_LENGTH.update(self.headers, content_length)
+ self.start_response(self.status, self.headers)
+
+def filter_factory(application, **conf):
+ import warnings
+ warnings.warn(
+ 'This function is deprecated; use make_gzip_middleware instead',
+ DeprecationWarning, 2)
+ def filter(application):
+ return middleware(application)
+ return filter
+
+def make_gzip_middleware(app, global_conf, compress_level=6):
+ """
+ Wrap the middleware, so that it applies gzipping to a response
+ when it is supported by the browser and the content is of
+ type ``text/*`` or ``application/*``
+ """
+ compress_level = int(compress_level)
+ return middleware(app, compress_level=compress_level)
diff --git a/paste/httpexceptions.py b/paste/httpexceptions.py
new file mode 100644
index 0000000..0b68c2d
--- /dev/null
+++ b/paste/httpexceptions.py
@@ -0,0 +1,667 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Ian Bicking, Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# Some of this code was funded by http://prometheusresearch.com
+"""
+HTTP Exception Middleware
+
+This module processes Python exceptions that relate to HTTP exceptions
+by defining a set of exceptions, all subclasses of HTTPException, and a
+request handler (`middleware`) that catches these exceptions and turns
+them into proper responses.
+
+This module defines exceptions according to RFC 2068 [1]_ : codes with
+100-300 are not really errors; 400's are client errors, and 500's are
+server errors. According to the WSGI specification [2]_ , the application
+can call ``start_response`` more then once only under two conditions:
+(a) the response has not yet been sent, or (b) if the second and
+subsequent invocations of ``start_response`` have a valid ``exc_info``
+argument obtained from ``sys.exc_info()``. The WSGI specification then
+requires the server or gateway to handle the case where content has been
+sent and then an exception was encountered.
+
+Exceptions in the 5xx range and those raised after ``start_response``
+has been called are treated as serious errors and the ``exc_info`` is
+filled-in with information needed for a lower level module to generate a
+stack trace and log information.
+
+Exception
+ HTTPException
+ HTTPRedirection
+ * 300 - HTTPMultipleChoices
+ * 301 - HTTPMovedPermanently
+ * 302 - HTTPFound
+ * 303 - HTTPSeeOther
+ * 304 - HTTPNotModified
+ * 305 - HTTPUseProxy
+ * 306 - Unused (not implemented, obviously)
+ * 307 - HTTPTemporaryRedirect
+ HTTPError
+ HTTPClientError
+ * 400 - HTTPBadRequest
+ * 401 - HTTPUnauthorized
+ * 402 - HTTPPaymentRequired
+ * 403 - HTTPForbidden
+ * 404 - HTTPNotFound
+ * 405 - HTTPMethodNotAllowed
+ * 406 - HTTPNotAcceptable
+ * 407 - HTTPProxyAuthenticationRequired
+ * 408 - HTTPRequestTimeout
+ * 409 - HTTPConfict
+ * 410 - HTTPGone
+ * 411 - HTTPLengthRequired
+ * 412 - HTTPPreconditionFailed
+ * 413 - HTTPRequestEntityTooLarge
+ * 414 - HTTPRequestURITooLong
+ * 415 - HTTPUnsupportedMediaType
+ * 416 - HTTPRequestRangeNotSatisfiable
+ * 417 - HTTPExpectationFailed
+ * 429 - HTTPTooManyRequests
+ HTTPServerError
+ * 500 - HTTPInternalServerError
+ * 501 - HTTPNotImplemented
+ * 502 - HTTPBadGateway
+ * 503 - HTTPServiceUnavailable
+ * 504 - HTTPGatewayTimeout
+ * 505 - HTTPVersionNotSupported
+
+References:
+
+.. [1] http://www.python.org/peps/pep-0333.html#error-handling
+.. [2] http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.5
+
+"""
+
+import six
+from paste.wsgilib import catch_errors_app
+from paste.response import has_header, header_value, replace_header
+from paste.request import resolve_relative_url
+from paste.util.quoting import strip_html, html_quote, no_quote, comment_quote
+
+SERVER_NAME = 'WSGI Server'
+TEMPLATE = """\
+<html>\r
+ <head><title>%(title)s</title></head>\r
+ <body>\r
+ <h1>%(title)s</h1>\r
+ <p>%(body)s</p>\r
+ <hr noshade>\r
+ <div align="right">%(server)s</div>\r
+ </body>\r
+</html>\r
+"""
+
+class HTTPException(Exception):
+ """
+ the HTTP exception base class
+
+ This encapsulates an HTTP response that interrupts normal application
+ flow; but one which is not necessarly an error condition. For
+ example, codes in the 300's are exceptions in that they interrupt
+ normal processing; however, they are not considered errors.
+
+ This class is complicated by 4 factors:
+
+ 1. The content given to the exception may either be plain-text or
+ as html-text.
+
+ 2. The template may want to have string-substitutions taken from
+ the current ``environ`` or values from incoming headers. This
+ is especially troublesome due to case sensitivity.
+
+ 3. The final output may either be text/plain or text/html
+ mime-type as requested by the client application.
+
+ 4. Each exception has a default explanation, but those who
+ raise exceptions may want to provide additional detail.
+
+ Attributes:
+
+ ``code``
+ the HTTP status code for the exception
+
+ ``title``
+ remainder of the status line (stuff after the code)
+
+ ``explanation``
+ a plain-text explanation of the error message that is
+ not subject to environment or header substitutions;
+ it is accessible in the template via %(explanation)s
+
+ ``detail``
+ a plain-text message customization that is not subject
+ to environment or header substitutions; accessible in
+ the template via %(detail)s
+
+ ``template``
+ a content fragment (in HTML) used for environment and
+ header substitution; the default template includes both
+ the explanation and further detail provided in the
+ message
+
+ ``required_headers``
+ a sequence of headers which are required for proper
+ construction of the exception
+
+ Parameters:
+
+ ``detail``
+ a plain-text override of the default ``detail``
+
+ ``headers``
+ a list of (k,v) header pairs
+
+ ``comment``
+ a plain-text additional information which is
+ usually stripped/hidden for end-users
+
+ To override the template (which is HTML content) or the plain-text
+ explanation, one must subclass the given exception; or customize it
+ after it has been created. This particular breakdown of a message
+ into explanation, detail and template allows both the creation of
+ plain-text and html messages for various clients as well as
+ error-free substitution of environment variables and headers.
+ """
+
+ code = None
+ title = None
+ explanation = ''
+ detail = ''
+ comment = ''
+ template = "%(explanation)s\r\n<br/>%(detail)s\r\n<!-- %(comment)s -->"
+ required_headers = ()
+
+ def __init__(self, detail=None, headers=None, comment=None):
+ assert self.code, "Do not directly instantiate abstract exceptions."
+ assert isinstance(headers, (type(None), list)), (
+ "headers must be None or a list: %r"
+ % headers)
+ assert isinstance(detail, (type(None), six.binary_type, six.text_type)), (
+ "detail must be None or a string: %r" % detail)
+ assert isinstance(comment, (type(None), six.binary_type, six.text_type)), (
+ "comment must be None or a string: %r" % comment)
+ self.headers = headers or tuple()
+ for req in self.required_headers:
+ assert headers and has_header(headers, req), (
+ "Exception %s must be passed the header %r "
+ "(got headers: %r)"
+ % (self.__class__.__name__, req, headers))
+ if detail is not None:
+ self.detail = detail
+ if comment is not None:
+ self.comment = comment
+ Exception.__init__(self,"%s %s\n%s\n%s\n" % (
+ self.code, self.title, self.explanation, self.detail))
+
+ def make_body(self, environ, template, escfunc, comment_escfunc=None):
+ comment_escfunc = comment_escfunc or escfunc
+ args = {'explanation': escfunc(self.explanation),
+ 'detail': escfunc(self.detail),
+ 'comment': comment_escfunc(self.comment)}
+ if HTTPException.template != self.template:
+ for (k, v) in environ.items():
+ args[k] = escfunc(v)
+ if self.headers:
+ for (k, v) in self.headers:
+ args[k.lower()] = escfunc(v)
+ if six.PY2:
+ for key, value in args.items():
+ if isinstance(value, six.text_type):
+ args[key] = value.encode('utf8', 'xmlcharrefreplace')
+ return template % args
+
+ def plain(self, environ):
+ """ text/plain representation of the exception """
+ body = self.make_body(environ, strip_html(self.template), no_quote, comment_quote)
+ return ('%s %s\r\n%s\r\n' % (self.code, self.title, body))
+
+ def html(self, environ):
+ """ text/html representation of the exception """
+ body = self.make_body(environ, self.template, html_quote, comment_quote)
+ return TEMPLATE % {
+ 'title': self.title,
+ 'code': self.code,
+ 'server': SERVER_NAME,
+ 'body': body }
+
+ def prepare_content(self, environ):
+ if self.headers:
+ headers = list(self.headers)
+ else:
+ headers = []
+ if 'html' in environ.get('HTTP_ACCEPT','') or \
+ '*/*' in environ.get('HTTP_ACCEPT',''):
+ replace_header(headers, 'content-type', 'text/html')
+ content = self.html(environ)
+ else:
+ replace_header(headers, 'content-type', 'text/plain')
+ content = self.plain(environ)
+ if isinstance(content, six.text_type):
+ content = content.encode('utf8')
+ cur_content_type = (
+ header_value(headers, 'content-type')
+ or 'text/html')
+ replace_header(
+ headers, 'content-type',
+ cur_content_type + '; charset=utf8')
+ return headers, content
+
+ def response(self, environ):
+ from paste.wsgiwrappers import WSGIResponse
+ headers, content = self.prepare_content(environ)
+ resp = WSGIResponse(code=self.code, content=content)
+ resp.headers = resp.headers.fromlist(headers)
+ return resp
+
+ def wsgi_application(self, environ, start_response, exc_info=None):
+ """
+ This exception as a WSGI application
+ """
+ headers, content = self.prepare_content(environ)
+ start_response('%s %s' % (self.code, self.title),
+ headers,
+ exc_info)
+ return [content]
+
+ __call__ = wsgi_application
+
+ def __repr__(self):
+ return '<%s %s; code=%s>' % (self.__class__.__name__,
+ self.title, self.code)
+
+class HTTPError(HTTPException):
+ """
+ base class for status codes in the 400's and 500's
+
+ This is an exception which indicates that an error has occurred,
+ and that any work in progress should not be committed. These are
+ typically results in the 400's and 500's.
+ """
+
+#
+# 3xx Redirection
+#
+# This class of status code indicates that further action needs to be
+# taken by the user agent in order to fulfill the request. The action
+# required MAY be carried out by the user agent without interaction with
+# the user if and only if the method used in the second request is GET or
+# HEAD. A client SHOULD detect infinite redirection loops, since such
+# loops generate network traffic for each redirection.
+#
+
+class HTTPRedirection(HTTPException):
+ """
+ base class for 300's status code (redirections)
+
+ This is an abstract base class for 3xx redirection. It indicates
+ that further action needs to be taken by the user agent in order
+ to fulfill the request. It does not necessarly signal an error
+ condition.
+ """
+
+class _HTTPMove(HTTPRedirection):
+ """
+ redirections which require a Location field
+
+ Since a 'Location' header is a required attribute of 301, 302, 303,
+ 305 and 307 (but not 304), this base class provides the mechanics to
+ make this easy. While this has the same parameters as HTTPException,
+ if a location is not provided in the headers; it is assumed that the
+ detail _is_ the location (this for backward compatibility, otherwise
+ we'd add a new attribute).
+ """
+ required_headers = ('location',)
+ explanation = 'The resource has been moved to'
+ template = (
+ '%(explanation)s <a href="%(location)s">%(location)s</a>;\r\n'
+ 'you should be redirected automatically.\r\n'
+ '%(detail)s\r\n<!-- %(comment)s -->')
+
+ def __init__(self, detail=None, headers=None, comment=None):
+ assert isinstance(headers, (type(None), list))
+ headers = headers or []
+ location = header_value(headers,'location')
+ if not location:
+ location = detail
+ detail = ''
+ headers.append(('location', location))
+ assert location, ("HTTPRedirection specified neither a "
+ "location in the headers nor did it "
+ "provide a detail argument.")
+ HTTPRedirection.__init__(self, location, headers, comment)
+ if detail is not None:
+ self.detail = detail
+
+ def relative_redirect(cls, dest_uri, environ, detail=None, headers=None, comment=None):
+ """
+ Create a redirect object with the dest_uri, which may be relative,
+ considering it relative to the uri implied by the given environ.
+ """
+ location = resolve_relative_url(dest_uri, environ)
+ headers = headers or []
+ headers.append(('Location', location))
+ return cls(detail=detail, headers=headers, comment=comment)
+
+ relative_redirect = classmethod(relative_redirect)
+
+ def location(self):
+ for name, value in self.headers:
+ if name.lower() == 'location':
+ return value
+ else:
+ raise KeyError("No location set for %s" % self)
+
+class HTTPMultipleChoices(_HTTPMove):
+ code = 300
+ title = 'Multiple Choices'
+
+class HTTPMovedPermanently(_HTTPMove):
+ code = 301
+ title = 'Moved Permanently'
+
+class HTTPFound(_HTTPMove):
+ code = 302
+ title = 'Found'
+ explanation = 'The resource was found at'
+
+# This one is safe after a POST (the redirected location will be
+# retrieved with GET):
+class HTTPSeeOther(_HTTPMove):
+ code = 303
+ title = 'See Other'
+
+class HTTPNotModified(HTTPRedirection):
+ # @@: but not always (HTTP section 14.18.1)...?
+ # @@: Removed 'date' requirement, as its not required for an ETag
+ # @@: FIXME: This should require either an ETag or a date header
+ code = 304
+ title = 'Not Modified'
+ message = ''
+ # @@: should include date header, optionally other headers
+ # @@: should not return a content body
+ def plain(self, environ):
+ return ''
+ def html(self, environ):
+ """ text/html representation of the exception """
+ return ''
+
+class HTTPUseProxy(_HTTPMove):
+ # @@: OK, not a move, but looks a little like one
+ code = 305
+ title = 'Use Proxy'
+ explanation = (
+ 'The resource must be accessed through a proxy '
+ 'located at')
+
+class HTTPTemporaryRedirect(_HTTPMove):
+ code = 307
+ title = 'Temporary Redirect'
+
+#
+# 4xx Client Error
+#
+# The 4xx class of status code is intended for cases in which the client
+# seems to have erred. Except when responding to a HEAD request, the
+# server SHOULD include an entity containing an explanation of the error
+# situation, and whether it is a temporary or permanent condition. These
+# status codes are applicable to any request method. User agents SHOULD
+# display any included entity to the user.
+#
+
+class HTTPClientError(HTTPError):
+ """
+ base class for the 400's, where the client is in-error
+
+ This is an error condition in which the client is presumed to be
+ in-error. This is an expected problem, and thus is not considered
+ a bug. A server-side traceback is not warranted. Unless specialized,
+ this is a '400 Bad Request'
+ """
+ code = 400
+ title = 'Bad Request'
+ explanation = ('The server could not comply with the request since\r\n'
+ 'it is either malformed or otherwise incorrect.\r\n')
+
+class HTTPBadRequest(HTTPClientError):
+ pass
+
+class HTTPUnauthorized(HTTPClientError):
+ code = 401
+ title = 'Unauthorized'
+ explanation = (
+ 'This server could not verify that you are authorized to\r\n'
+ 'access the document you requested. Either you supplied the\r\n'
+ 'wrong credentials (e.g., bad password), or your browser\r\n'
+ 'does not understand how to supply the credentials required.\r\n')
+
+class HTTPPaymentRequired(HTTPClientError):
+ code = 402
+ title = 'Payment Required'
+ explanation = ('Access was denied for financial reasons.')
+
+class HTTPForbidden(HTTPClientError):
+ code = 403
+ title = 'Forbidden'
+ explanation = ('Access was denied to this resource.')
+
+class HTTPNotFound(HTTPClientError):
+ code = 404
+ title = 'Not Found'
+ explanation = ('The resource could not be found.')
+
+class HTTPMethodNotAllowed(HTTPClientError):
+ required_headers = ('allow',)
+ code = 405
+ title = 'Method Not Allowed'
+ # override template since we need an environment variable
+ template = ('The method %(REQUEST_METHOD)s is not allowed for '
+ 'this resource.\r\n%(detail)s')
+
+class HTTPNotAcceptable(HTTPClientError):
+ code = 406
+ title = 'Not Acceptable'
+ # override template since we need an environment variable
+ template = ('The resource could not be generated that was '
+ 'acceptable to your browser (content\r\nof type '
+ '%(HTTP_ACCEPT)s).\r\n%(detail)s')
+
+class HTTPProxyAuthenticationRequired(HTTPClientError):
+ code = 407
+ title = 'Proxy Authentication Required'
+ explanation = ('Authentication /w a local proxy is needed.')
+
+class HTTPRequestTimeout(HTTPClientError):
+ code = 408
+ title = 'Request Timeout'
+ explanation = ('The server has waited too long for the request to '
+ 'be sent by the client.')
+
+class HTTPConflict(HTTPClientError):
+ code = 409
+ title = 'Conflict'
+ explanation = ('There was a conflict when trying to complete '
+ 'your request.')
+
+class HTTPGone(HTTPClientError):
+ code = 410
+ title = 'Gone'
+ explanation = ('This resource is no longer available. No forwarding '
+ 'address is given.')
+
+class HTTPLengthRequired(HTTPClientError):
+ code = 411
+ title = 'Length Required'
+ explanation = ('Content-Length header required.')
+
+class HTTPPreconditionFailed(HTTPClientError):
+ code = 412
+ title = 'Precondition Failed'
+ explanation = ('Request precondition failed.')
+
+class HTTPRequestEntityTooLarge(HTTPClientError):
+ code = 413
+ title = 'Request Entity Too Large'
+ explanation = ('The body of your request was too large for this server.')
+
+class HTTPRequestURITooLong(HTTPClientError):
+ code = 414
+ title = 'Request-URI Too Long'
+ explanation = ('The request URI was too long for this server.')
+
+class HTTPUnsupportedMediaType(HTTPClientError):
+ code = 415
+ title = 'Unsupported Media Type'
+ # override template since we need an environment variable
+ template = ('The request media type %(CONTENT_TYPE)s is not '
+ 'supported by this server.\r\n%(detail)s')
+
+class HTTPRequestRangeNotSatisfiable(HTTPClientError):
+ code = 416
+ title = 'Request Range Not Satisfiable'
+ explanation = ('The Range requested is not available.')
+
+class HTTPExpectationFailed(HTTPClientError):
+ code = 417
+ title = 'Expectation Failed'
+ explanation = ('Expectation failed.')
+
+class HTTPTooManyRequests(HTTPClientError):
+ code = 429
+ title = 'Too Many Requests'
+ explanation = ('The client has sent too many requests to the server.')
+
+#
+# 5xx Server Error
+#
+# Response status codes beginning with the digit "5" indicate cases in
+# which the server is aware that it has erred or is incapable of
+# performing the request. Except when responding to a HEAD request, the
+# server SHOULD include an entity containing an explanation of the error
+# situation, and whether it is a temporary or permanent condition. User
+# agents SHOULD display any included entity to the user. These response
+# codes are applicable to any request method.
+#
+
+class HTTPServerError(HTTPError):
+ """
+ base class for the 500's, where the server is in-error
+
+ This is an error condition in which the server is presumed to be
+ in-error. This is usually unexpected, and thus requires a traceback;
+ ideally, opening a support ticket for the customer. Unless specialized,
+ this is a '500 Internal Server Error'
+ """
+ code = 500
+ title = 'Internal Server Error'
+ explanation = (
+ 'The server has either erred or is incapable of performing\r\n'
+ 'the requested operation.\r\n')
+
+class HTTPInternalServerError(HTTPServerError):
+ pass
+
+class HTTPNotImplemented(HTTPServerError):
+ code = 501
+ title = 'Not Implemented'
+ # override template since we need an environment variable
+ template = ('The request method %(REQUEST_METHOD)s is not implemented '
+ 'for this server.\r\n%(detail)s')
+
+class HTTPBadGateway(HTTPServerError):
+ code = 502
+ title = 'Bad Gateway'
+ explanation = ('Bad gateway.')
+
+class HTTPServiceUnavailable(HTTPServerError):
+ code = 503
+ title = 'Service Unavailable'
+ explanation = ('The server is currently unavailable. '
+ 'Please try again at a later time.')
+
+class HTTPGatewayTimeout(HTTPServerError):
+ code = 504
+ title = 'Gateway Timeout'
+ explanation = ('The gateway has timed out.')
+
+class HTTPVersionNotSupported(HTTPServerError):
+ code = 505
+ title = 'HTTP Version Not Supported'
+ explanation = ('The HTTP version is not supported.')
+
+# abstract HTTP related exceptions
+__all__ = ['HTTPException', 'HTTPRedirection', 'HTTPError' ]
+
+_exceptions = {}
+for name, value in six.iteritems(dict(globals())):
+ if (isinstance(value, (type, six.class_types)) and
+ issubclass(value, HTTPException) and
+ value.code):
+ _exceptions[value.code] = value
+ __all__.append(name)
+
+def get_exception(code):
+ return _exceptions[code]
+
+############################################################
+## Middleware implementation:
+############################################################
+
+class HTTPExceptionHandler(object):
+ """
+ catches exceptions and turns them into proper HTTP responses
+
+ This middleware catches any exceptions (which are subclasses of
+ ``HTTPException``) and turns them into proper HTTP responses.
+ Note if the headers have already been sent, the stack trace is
+ always maintained as this indicates a programming error.
+
+ Note that you must raise the exception before returning the
+ app_iter, and you cannot use this with generator apps that don't
+ raise an exception until after their app_iter is iterated over.
+ """
+
+ def __init__(self, application, warning_level=None):
+ assert not warning_level or ( warning_level > 99 and
+ warning_level < 600)
+ if warning_level is not None:
+ import warnings
+ warnings.warn('The warning_level parameter is not used or supported',
+ DeprecationWarning, 2)
+ self.warning_level = warning_level or 500
+ self.application = application
+
+ def __call__(self, environ, start_response):
+ environ['paste.httpexceptions'] = self
+ environ.setdefault('paste.expected_exceptions',
+ []).append(HTTPException)
+ try:
+ return self.application(environ, start_response)
+ except HTTPException as exc:
+ return exc(environ, start_response)
+
+def middleware(*args, **kw):
+ import warnings
+ # deprecated 13 dec 2005
+ warnings.warn('httpexceptions.middleware is deprecated; use '
+ 'make_middleware or HTTPExceptionHandler instead',
+ DeprecationWarning, 2)
+ return make_middleware(*args, **kw)
+
+def make_middleware(app, global_conf=None, warning_level=None):
+ """
+ ``httpexceptions`` middleware; this catches any
+ ``paste.httpexceptions.HTTPException`` exceptions (exceptions like
+ ``HTTPNotFound``, ``HTTPMovedPermanently``, etc) and turns them
+ into proper HTTP responses.
+
+ ``warning_level`` can be an integer corresponding to an HTTP code.
+ Any code over that value will be passed 'up' the chain, potentially
+ reported on by another piece of middleware.
+ """
+ if warning_level:
+ warning_level = int(warning_level)
+ return HTTPExceptionHandler(app, warning_level=warning_level)
+
+__all__.extend(['HTTPExceptionHandler', 'get_exception'])
diff --git a/paste/httpheaders.py b/paste/httpheaders.py
new file mode 100644
index 0000000..5457138
--- /dev/null
+++ b/paste/httpheaders.py
@@ -0,0 +1,1116 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Ian Bicking, Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# Some of this code was funded by: http://prometheusresearch.com
+"""
+HTTP Message Header Fields (see RFC 4229)
+
+This contains general support for HTTP/1.1 message headers [1]_ in a
+manner that supports WSGI ``environ`` [2]_ and ``response_headers``
+[3]_. Specifically, this module defines a ``HTTPHeader`` class whose
+instances correspond to field-name items. The actual field-content for
+the message-header is stored in the appropriate WSGI collection (either
+the ``environ`` for requests, or ``response_headers`` for responses).
+
+Each ``HTTPHeader`` instance is a callable (defining ``__call__``)
+that takes one of the following:
+
+ - an ``environ`` dictionary, returning the corresponding header
+ value by according to the WSGI's ``HTTP_`` prefix mechanism, e.g.,
+ ``USER_AGENT(environ)`` returns ``environ.get('HTTP_USER_AGENT')``
+
+ - a ``response_headers`` list, giving a comma-delimited string for
+ each corresponding ``header_value`` tuple entries (see below).
+
+ - a sequence of string ``*args`` that are comma-delimited into
+ a single string value: ``CONTENT_TYPE("text/html","text/plain")``
+ returns ``"text/html, text/plain"``
+
+ - a set of ``**kwargs`` keyword arguments that are used to create
+ a header value, in a manner dependent upon the particular header in
+ question (to make value construction easier and error-free):
+ ``CONTENT_DISPOSITION(max_age=CONTENT_DISPOSITION.ONEWEEK)``
+ returns ``"public, max-age=60480"``
+
+Each ``HTTPHeader`` instance also provides several methods to act on
+a WSGI collection, for removing and setting header values.
+
+ ``delete(collection)``
+
+ This method removes all entries of the corresponding header from
+ the given collection (``environ`` or ``response_headers``), e.g.,
+ ``USER_AGENT.delete(environ)`` deletes the 'HTTP_USER_AGENT' entry
+ from the ``environ``.
+
+ ``update(collection, *args, **kwargs)``
+
+ This method does an in-place replacement of the given header entry,
+ for example: ``CONTENT_LENGTH(response_headers,len(body))``
+
+ The first argument is a valid ``environ`` dictionary or
+ ``response_headers`` list; remaining arguments are passed on to
+ ``__call__(*args, **kwargs)`` for value construction.
+
+ ``apply(collection, **kwargs)``
+
+ This method is similar to update, only that it may affect other
+ headers. For example, according to recommendations in RFC 2616,
+ certain Cache-Control configurations should also set the
+ ``Expires`` header for HTTP/1.0 clients. By default, ``apply()``
+ is simply ``update()`` but limited to keyword arguments.
+
+This particular approach to managing headers within a WSGI collection
+has several advantages:
+
+ 1. Typos in the header name are easily detected since they become a
+ ``NameError`` when executed. The approach of using header strings
+ directly can be problematic; for example, the following should
+ return ``None`` : ``environ.get("HTTP_ACCEPT_LANGUAGES")``
+
+ 2. For specific headers with validation, using ``__call__`` will
+ result in an automatic header value check. For example, the
+ _ContentDisposition header will reject a value having ``maxage``
+ or ``max_age`` (the appropriate parameter is ``max-age`` ).
+
+ 3. When appending/replacing headers, the field-name has the suggested
+ RFC capitalization (e.g. ``Content-Type`` or ``ETag``) for
+ user-agents that incorrectly use case-sensitive matches.
+
+ 4. Some headers (such as ``Content-Type``) are 0, that is,
+ only one entry of this type may occur in a given set of
+ ``response_headers``. This module knows about those cases and
+ enforces this cardinality constraint.
+
+ 5. The exact details of WSGI header management are abstracted so
+ the programmer need not worry about operational differences
+ between ``environ`` dictionary or ``response_headers`` list.
+
+ 6. Sorting of ``HTTPHeaders`` is done following the RFC suggestion
+ that general-headers come first, followed by request and response
+ headers, and finishing with entity-headers.
+
+ 7. Special care is given to exceptional cases such as Set-Cookie
+ which violates the RFC's recommendation about combining header
+ content into a single entry using comma separation.
+
+A particular difficulty with HTTP message headers is a categorization
+of sorts as described in section 4.2:
+
+ Multiple message-header fields with the same field-name MAY be
+ present in a message if and only if the entire field-value for
+ that header field is defined as a comma-separated list [i.e.,
+ #(values)]. It MUST be possible to combine the multiple header
+ fields into one "field-name: field-value" pair, without changing
+ the semantics of the message, by appending each subsequent
+ field-value to the first, each separated by a comma.
+
+This creates three fundamentally different kinds of headers:
+
+ - Those that do not have a #(values) production, and hence are
+ singular and may only occur once in a set of response fields;
+ this case is handled by the ``_SingleValueHeader`` subclass.
+
+ - Those which have the #(values) production and follow the
+ combining rule outlined above; our ``_MultiValueHeader`` case.
+
+ - Those which are multi-valued, but cannot be combined (such as the
+ ``Set-Cookie`` header due to its ``Expires`` parameter); or where
+ combining them into a single header entry would cause common
+ user-agents to fail (``WWW-Authenticate``, ``Warning``) since
+ they fail to handle dates even when properly quoted. This case
+ is handled by ``_MultiEntryHeader``.
+
+Since this project does not have time to provide rigorous support
+and validation for all headers, it does a basic construction of
+headers listed in RFC 2616 (plus a few others) so that they can
+be obtained by simply doing ``from paste.httpheaders import *``;
+the name of the header instance is the "common name" less any
+dashes to give CamelCase style names.
+
+.. [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
+.. [2] http://www.python.org/peps/pep-0333.html#environ-variables
+.. [3] http://www.python.org/peps/pep-0333.html#the-start-response-callable
+
+"""
+import mimetypes
+import six
+from time import time as now
+try:
+ # Python 3
+ from email.utils import formatdate, parsedate_tz, mktime_tz
+ from urllib.request import AbstractDigestAuthHandler, parse_keqv_list, parse_http_list
+except ImportError:
+ # Python 2
+ from rfc822 import formatdate, parsedate_tz, mktime_tz
+ from urllib2 import AbstractDigestAuthHandler, parse_keqv_list, parse_http_list
+
+from .httpexceptions import HTTPBadRequest
+
+__all__ = ['get_header', 'list_headers', 'normalize_headers',
+ 'HTTPHeader', 'EnvironVariable' ]
+
+class EnvironVariable(str):
+ """
+ a CGI ``environ`` variable as described by WSGI
+
+ This is a helper object so that standard WSGI ``environ`` variables
+ can be extracted w/o syntax error possibility.
+ """
+ def __call__(self, environ):
+ return environ.get(self,'')
+ def __repr__(self):
+ return '<EnvironVariable %s>' % self
+ def update(self, environ, value):
+ environ[self] = value
+REMOTE_USER = EnvironVariable("REMOTE_USER")
+REMOTE_SESSION = EnvironVariable("REMOTE_SESSION")
+AUTH_TYPE = EnvironVariable("AUTH_TYPE")
+REQUEST_METHOD = EnvironVariable("REQUEST_METHOD")
+SCRIPT_NAME = EnvironVariable("SCRIPT_NAME")
+PATH_INFO = EnvironVariable("PATH_INFO")
+
+for _name, _obj in six.iteritems(dict(globals())):
+ if isinstance(_obj, EnvironVariable):
+ __all__.append(_name)
+
+_headers = {}
+
+class HTTPHeader(object):
+ """
+ an HTTP header
+
+ HTTPHeader instances represent a particular ``field-name`` of an
+ HTTP message header. They do not hold a field-value, but instead
+ provide operations that work on is corresponding values. Storage
+ of the actual field values is done with WSGI ``environ`` or
+ ``response_headers`` as appropriate. Typically, a sub-classes that
+ represent a specific HTTP header, such as _ContentDisposition, are
+ 0. Once constructed the HTTPHeader instances themselves
+ are immutable and stateless.
+
+ For purposes of documentation a "container" refers to either a
+ WSGI ``environ`` dictionary, or a ``response_headers`` list.
+
+ Member variables (and correspondingly constructor arguments).
+
+ ``name``
+
+ the ``field-name`` of the header, in "common form"
+ as presented in RFC 2616; e.g. 'Content-Type'
+
+ ``category``
+
+ one of 'general', 'request', 'response', or 'entity'
+
+ ``version``
+
+ version of HTTP (informational) with which the header should
+ be recognized
+
+ ``sort_order``
+
+ sorting order to be applied before sorting on
+ field-name when ordering headers in a response
+
+ Special Methods:
+
+ ``__call__``
+
+ The primary method of the HTTPHeader instance is to make
+ it a callable, it takes either a collection, a string value,
+ or keyword arguments and attempts to find/construct a valid
+ field-value
+
+ ``__lt__``
+
+ This method is used so that HTTPHeader objects can be
+ sorted in a manner suggested by RFC 2616.
+
+ ``__str__``
+
+ The string-value for instances of this class is
+ the ``field-name``.
+
+ Primary Methods:
+
+ ``delete()``
+
+ remove the all occurrences (if any) of the given
+ header in the collection provided
+
+ ``update()``
+
+ replaces (if they exist) all field-value items
+ in the given collection with the value provided
+
+ ``tuples()``
+
+ returns a set of (field-name, field-value) tuples
+ 5 for extending ``response_headers``
+
+ Custom Methods (these may not be implemented):
+
+ ``apply()``
+
+ similar to ``update``, but with two differences; first,
+ only keyword arguments can be used, and second, specific
+ sub-classes may introduce side-effects
+
+ ``parse()``
+
+ converts a string value of the header into a more usable
+ form, such as time in seconds for a date header, etc.
+
+ The collected versions of initialized header instances are immediately
+ registered and accessible through the ``get_header`` function. Do not
+ inherit from this directly, use one of ``_SingleValueHeader``,
+ ``_MultiValueHeader``, or ``_MultiEntryHeader`` as appropriate.
+ """
+
+ #
+ # Things which can be customized
+ #
+ version = '1.1'
+ category = 'general'
+ reference = ''
+ extensions = {}
+
+ def compose(self, **kwargs):
+ """
+ build header value from keyword arguments
+
+ This method is used to build the corresponding header value when
+ keyword arguments (or no arguments) were provided. The result
+ should be a sequence of values. For example, the ``Expires``
+ header takes a keyword argument ``time`` (e.g. time.time()) from
+ which it returns a the corresponding date.
+ """
+ raise NotImplementedError()
+
+ def parse(self, *args, **kwargs):
+ """
+ convert raw header value into more usable form
+
+ This method invokes ``values()`` with the arguments provided,
+ parses the header results, and then returns a header-specific
+ data structure corresponding to the header. For example, the
+ ``Expires`` header returns seconds (as returned by time.time())
+ """
+ raise NotImplementedError()
+
+ def apply(self, collection, **kwargs):
+ """
+ update the collection /w header value (may have side effects)
+
+ This method is similar to ``update`` only that usage may result
+ in other headers being changed as recommended by the corresponding
+ specification. The return value is defined by the particular
+ sub-class. For example, the ``_CacheControl.apply()`` sets the
+ ``Expires`` header in addition to its normal behavior.
+ """
+ self.update(collection, **kwargs)
+
+ #
+ # Things which are standardized (mostly)
+ #
+ def __new__(cls, name, category=None, reference=None, version=None):
+ """
+ construct a new ``HTTPHeader`` instance
+
+ We use the ``__new__`` operator to ensure that only one
+ ``HTTPHeader`` instance exists for each field-name, and to
+ register the header so that it can be found/enumerated.
+ """
+ self = get_header(name, raiseError=False)
+ if self:
+ # Allow the registration to happen again, but assert
+ # that everything is identical.
+ assert self.name == name, \
+ "duplicate registration with different capitalization"
+ assert self.category == category, \
+ "duplicate registration with different category"
+ assert cls == self.__class__, \
+ "duplicate registration with different class"
+ return self
+
+ self = object.__new__(cls)
+ self.name = name
+ assert isinstance(self.name, str)
+ self.category = category or self.category
+ self.version = version or self.version
+ self.reference = reference or self.reference
+ _headers[self.name.lower()] = self
+ self.sort_order = {'general': 1, 'request': 2,
+ 'response': 3, 'entity': 4 }[self.category]
+ self._environ_name = getattr(self, '_environ_name',
+ 'HTTP_'+ self.name.upper().replace("-","_"))
+ self._headers_name = getattr(self, '_headers_name',
+ self.name.lower())
+ assert self.version in ('1.1', '1.0', '0.9')
+ return self
+
+ def __str__(self):
+ return self.name
+
+ def __lt__(self, other):
+ """
+ sort header instances as specified by RFC 2616
+
+ Re-define sorting so that general headers are first, followed
+ by request/response headers, and then entity headers. The
+ list.sort() methods use the less-than operator for this purpose.
+ """
+ if isinstance(other, HTTPHeader):
+ if self.sort_order != other.sort_order:
+ return self.sort_order < other.sort_order
+ return self.name < other.name
+ return False
+
+ def __repr__(self):
+ ref = self.reference and (' (%s)' % self.reference) or ''
+ return '<%s %s%s>' % (self.__class__.__name__, self.name, ref)
+
+ def values(self, *args, **kwargs):
+ """
+ find/construct field-value(s) for the given header
+
+ Resolution is done according to the following arguments:
+
+ - If only keyword arguments are given, then this is equivalent
+ to ``compose(**kwargs)``.
+
+ - If the first (and only) argument is a dict, it is assumed
+ to be a WSGI ``environ`` and the result of the corresponding
+ ``HTTP_`` entry is returned.
+
+ - If the first (and only) argument is a list, it is assumed
+ to be a WSGI ``response_headers`` and the field-value(s)
+ for this header are collected and returned.
+
+ - In all other cases, the arguments are collected, checked that
+ they are string values, possibly verified by the header's
+ logic, and returned.
+
+ At this time it is an error to provide keyword arguments if args
+ is present (this might change). It is an error to provide both
+ a WSGI object and also string arguments. If no arguments are
+ provided, then ``compose()`` is called to provide a default
+ value for the header; if there is not default it is an error.
+ """
+ if not args:
+ return self.compose(**kwargs)
+ if list == type(args[0]):
+ assert 1 == len(args)
+ result = []
+ name = self.name.lower()
+ for value in [value for header, value in args[0]
+ if header.lower() == name]:
+ result.append(value)
+ return result
+ if dict == type(args[0]):
+ assert 1 == len(args) and 'wsgi.version' in args[0]
+ value = args[0].get(self._environ_name)
+ if not value:
+ return ()
+ return (value,)
+ for item in args:
+ assert not type(item) in (dict, list)
+ return args
+
+ def __call__(self, *args, **kwargs):
+ """
+ converts ``values()`` into a string value
+
+ This method converts the results of ``values()`` into a string
+ value for common usage. By default, it is asserted that only
+ one value exists; if you need to access all values then either
+ call ``values()`` directly, or inherit ``_MultiValueHeader``
+ which overrides this method to return a comma separated list of
+ values as described by section 4.2 of RFC 2616.
+ """
+ values = self.values(*args, **kwargs)
+ assert isinstance(values, (tuple, list))
+ if not values:
+ return ''
+ assert len(values) == 1, "more than one value: %s" % repr(values)
+ return str(values[0]).strip()
+
+ def delete(self, collection):
+ """
+ removes all occurances of the header from the collection provided
+ """
+ if type(collection) == dict:
+ if self._environ_name in collection:
+ del collection[self._environ_name]
+ return self
+ assert list == type(collection)
+ i = 0
+ while i < len(collection):
+ if collection[i][0].lower() == self._headers_name:
+ del collection[i]
+ continue
+ i += 1
+
+ def update(self, collection, *args, **kwargs):
+ """
+ updates the collection with the provided header value
+
+ This method replaces (in-place when possible) all occurrences of
+ the given header with the provided value. If no value is
+ provided, this is the same as ``remove`` (note that this case
+ can only occur if the target is a collection w/o a corresponding
+ header value). The return value is the new header value (which
+ could be a list for ``_MultiEntryHeader`` instances).
+ """
+ value = self.__call__(*args, **kwargs)
+ if not value:
+ self.delete(collection)
+ return
+ if type(collection) == dict:
+ collection[self._environ_name] = value
+ return
+ assert list == type(collection)
+ i = 0
+ found = False
+ while i < len(collection):
+ if collection[i][0].lower() == self._headers_name:
+ if found:
+ del collection[i]
+ continue
+ collection[i] = (self.name, value)
+ found = True
+ i += 1
+ if not found:
+ collection.append((self.name, value))
+
+ def tuples(self, *args, **kwargs):
+ value = self.__call__(*args, **kwargs)
+ if not value:
+ return ()
+ return [(self.name, value)]
+
+class _SingleValueHeader(HTTPHeader):
+ """
+ a ``HTTPHeader`` with exactly a single value
+
+ This is the default behavior of ``HTTPHeader`` where returning a
+ the string-value of headers via ``__call__`` assumes that only
+ a single value exists.
+ """
+ pass
+
+class _MultiValueHeader(HTTPHeader):
+ """
+ a ``HTTPHeader`` with one or more values
+
+ The field-value for these header instances is is allowed to be more
+ than one value; whereby the ``__call__`` method returns a comma
+ separated list as described by section 4.2 of RFC 2616.
+ """
+
+ def __call__(self, *args, **kwargs):
+ results = self.values(*args, **kwargs)
+ if not results:
+ return ''
+ return ", ".join([str(v).strip() for v in results])
+
+ def parse(self, *args, **kwargs):
+ value = self.__call__(*args, **kwargs)
+ values = value.split(',')
+ return [
+ v.strip() for v in values
+ if v.strip()]
+
+class _MultiEntryHeader(HTTPHeader):
+ """
+ a multi-value ``HTTPHeader`` where items cannot be combined with a comma
+
+ This header is multi-valued, but the values should not be combined
+ with a comma since the header is not in compliance with RFC 2616
+ (Set-Cookie due to Expires parameter) or which common user-agents do
+ not behave well when the header values are combined.
+ """
+
+ def update(self, collection, *args, **kwargs):
+ assert list == type(collection), "``environ`` may not be updated"
+ self.delete(collection)
+ collection.extend(self.tuples(*args, **kwargs))
+
+ def tuples(self, *args, **kwargs):
+ values = self.values(*args, **kwargs)
+ if not values:
+ return ()
+ return [(self.name, value.strip()) for value in values]
+
+def get_header(name, raiseError=True):
+ """
+ find the given ``HTTPHeader`` instance
+
+ This function finds the corresponding ``HTTPHeader`` for the
+ ``name`` provided. So that python-style names can be used,
+ underscores are converted to dashes before the lookup.
+ """
+ retval = _headers.get(str(name).strip().lower().replace("_","-"))
+ if not retval and raiseError:
+ raise AssertionError("'%s' is an unknown header" % name)
+ return retval
+
+def list_headers(general=None, request=None, response=None, entity=None):
+ " list all headers for a given category "
+ if not (general or request or response or entity):
+ general = request = response = entity = True
+ search = []
+ for (bool, strval) in ((general, 'general'), (request, 'request'),
+ (response, 'response'), (entity, 'entity')):
+ if bool:
+ search.append(strval)
+ return [head for head in _headers.values() if head.category in search]
+
+def normalize_headers(response_headers, strict=True):
+ """
+ sort headers as suggested by RFC 2616
+
+ This alters the underlying response_headers to use the common
+ name for each header; as well as sorting them with general
+ headers first, followed by request/response headers, then
+ entity headers, and unknown headers last.
+ """
+ category = {}
+ for idx in range(len(response_headers)):
+ (key, val) = response_headers[idx]
+ head = get_header(key, strict)
+ if not head:
+ newhead = '-'.join([x.capitalize() for x in
+ key.replace("_","-").split("-")])
+ response_headers[idx] = (newhead, val)
+ category[newhead] = 4
+ continue
+ response_headers[idx] = (str(head), val)
+ category[str(head)] = head.sort_order
+ def key_func(item):
+ value = item[0]
+ return (category[value], value)
+ response_headers.sort(key=key_func)
+
+class _DateHeader(_SingleValueHeader):
+ """
+ handle date-based headers
+
+ This extends the ``_SingleValueHeader`` object with specific
+ treatment of time values:
+
+ - It overrides ``compose`` to provide a sole keyword argument
+ ``time`` which is an offset in seconds from the current time.
+
+ - A ``time`` method is provided which parses the given value
+ and returns the current time value.
+ """
+
+ def compose(self, time=None, delta=None):
+ time = time or now()
+ if delta:
+ assert type(delta) == int
+ time += delta
+ return (formatdate(time),)
+
+ def parse(self, *args, **kwargs):
+ """ return the time value (in seconds since 1970) """
+ value = self.__call__(*args, **kwargs)
+ if value:
+ try:
+ return mktime_tz(parsedate_tz(value))
+ except (TypeError, OverflowError):
+ raise HTTPBadRequest((
+ "Received an ill-formed timestamp for %s: %s\r\n") %
+ (self.name, value))
+
+#
+# Following are specific HTTP headers. Since these classes are mostly
+# singletons, there is no point in keeping the class around once it has
+# been instantiated, so we use the same name.
+#
+
+class _CacheControl(_MultiValueHeader):
+ """
+ Cache-Control, RFC 2616 14.9 (use ``CACHE_CONTROL``)
+
+ This header can be constructed (using keyword arguments), by
+ first specifying one of the following mechanisms:
+
+ ``public``
+
+ if True, this argument specifies that the
+ response, as a whole, may be cashed.
+
+ ``private``
+
+ if True, this argument specifies that the response, as a
+ whole, may be cashed; this implementation does not support
+ the enumeration of private fields
+
+ ``no_cache``
+
+ if True, this argument specifies that the response, as a
+ whole, may not be cashed; this implementation does not
+ support the enumeration of private fields
+
+ In general, only one of the above three may be True, the other 2
+ must then be False or None. If all three are None, then the cache
+ is assumed to be ``public``. Following one of these mechanism
+ specifiers are various modifiers:
+
+ ``no_store``
+
+ indicates if content may be stored on disk;
+ otherwise cache is limited to memory (note:
+ users can still save the data, this applies
+ to intermediate caches)
+
+ ``max_age``
+
+ the maximum duration (in seconds) for which
+ the content should be cached; if ``no-cache``
+ is specified, this defaults to 0 seconds
+
+ ``s_maxage``
+
+ the maximum duration (in seconds) for which the
+ content should be allowed in a shared cache.
+
+ ``no_transform``
+
+ specifies that an intermediate cache should
+ not convert the content from one type to
+ another (e.g. transform a BMP to a PNG).
+
+ ``extensions``
+
+ gives additional cache-control extensions,
+ such as items like, community="UCI" (14.9.6)
+
+ The usage of ``apply()`` on this header has side-effects. As
+ recommended by RFC 2616, if ``max_age`` is provided, then then the
+ ``Expires`` header is also calculated for HTTP/1.0 clients and
+ proxies (this is done at the time ``apply()`` is called). For
+ ``no-cache`` and for ``private`` cases, we either do not want the
+ response cached or do not want any response accidently returned to
+ other users; so to prevent this case, we set the ``Expires`` header
+ to the time of the request, signifying to HTTP/1.0 transports that
+ the content isn't to be cached. If you are using SSL, your
+ communication is already "private", so to work with HTTP/1.0
+ browsers over SSL, consider specifying your cache as ``public`` as
+ the distinction between public and private is moot.
+ """
+
+ # common values for max-age; "good enough" approximates
+ ONE_HOUR = 60*60
+ ONE_DAY = ONE_HOUR * 24
+ ONE_WEEK = ONE_DAY * 7
+ ONE_MONTH = ONE_DAY * 30
+ ONE_YEAR = ONE_WEEK * 52
+
+ def _compose(self, public=None, private=None, no_cache=None,
+ no_store=False, max_age=None, s_maxage=None,
+ no_transform=False, **extensions):
+ assert isinstance(max_age, (type(None), int))
+ assert isinstance(s_maxage, (type(None), int))
+ expires = 0
+ result = []
+ if private is True:
+ assert not public and not no_cache and not s_maxage
+ result.append('private')
+ elif no_cache is True:
+ assert not public and not private and not max_age
+ result.append('no-cache')
+ else:
+ assert public is None or public is True
+ assert not private and not no_cache
+ expires = max_age
+ result.append('public')
+ if no_store:
+ result.append('no-store')
+ if no_transform:
+ result.append('no-transform')
+ if max_age is not None:
+ result.append('max-age=%d' % max_age)
+ if s_maxage is not None:
+ result.append('s-maxage=%d' % s_maxage)
+ for (k, v) in six.iteritems(extensions):
+ if k not in self.extensions:
+ raise AssertionError("unexpected extension used: '%s'" % k)
+ result.append('%s="%s"' % (k.replace("_", "-"), v))
+ return (result, expires)
+
+ def compose(self, **kwargs):
+ (result, expires) = self._compose(**kwargs)
+ return result
+
+ def apply(self, collection, **kwargs):
+ """ returns the offset expiration in seconds """
+ (result, expires) = self._compose(**kwargs)
+ if expires is not None:
+ EXPIRES.update(collection, delta=expires)
+ self.update(collection, *result)
+ return expires
+
+_CacheControl('Cache-Control', 'general', 'RFC 2616, 14.9')
+
+class _ContentType(_SingleValueHeader):
+ """
+ Content-Type, RFC 2616 section 14.17
+
+ Unlike other headers, use the CGI variable instead.
+ """
+ version = '1.0'
+ _environ_name = 'CONTENT_TYPE'
+
+ # common mimetype constants
+ UNKNOWN = 'application/octet-stream'
+ TEXT_PLAIN = 'text/plain'
+ TEXT_HTML = 'text/html'
+ TEXT_XML = 'text/xml'
+
+ def compose(self, major=None, minor=None, charset=None):
+ if not major:
+ if minor in ('plain', 'html', 'xml'):
+ major = 'text'
+ else:
+ assert not minor and not charset
+ return (self.UNKNOWN,)
+ if not minor:
+ minor = "*"
+ result = "%s/%s" % (major, minor)
+ if charset:
+ result += "; charset=%s" % charset
+ return (result,)
+
+_ContentType('Content-Type', 'entity', 'RFC 2616, 14.17')
+
+class _ContentLength(_SingleValueHeader):
+ """
+ Content-Length, RFC 2616 section 14.13
+
+ Unlike other headers, use the CGI variable instead.
+ """
+ version = "1.0"
+ _environ_name = 'CONTENT_LENGTH'
+
+_ContentLength('Content-Length', 'entity', 'RFC 2616, 14.13')
+
+class _ContentDisposition(_SingleValueHeader):
+ """
+ Content-Disposition, RFC 2183 (use ``CONTENT_DISPOSITION``)
+
+ This header can be constructed (using keyword arguments),
+ by first specifying one of the following mechanisms:
+
+ ``attachment``
+
+ if True, this specifies that the content should not be
+ shown in the browser and should be handled externally,
+ even if the browser could render the content
+
+ ``inline``
+
+ exclusive with attachment; indicates that the content
+ should be rendered in the browser if possible, but
+ otherwise it should be handled externally
+
+ Only one of the above 2 may be True. If both are None, then
+ the disposition is assumed to be an ``attachment``. These are
+ distinct fields since support for field enumeration may be
+ added in the future.
+
+ ``filename``
+
+ the filename parameter, if any, to be reported; if
+ this is None, then the current object's filename
+ attribute is used
+
+ The usage of ``apply()`` on this header has side-effects. If
+ filename is provided, and Content-Type is not set or is
+ 'application/octet-stream', then the mimetypes.guess is used to
+ upgrade the Content-Type setting.
+ """
+
+ def _compose(self, attachment=None, inline=None, filename=None):
+ result = []
+ if inline is True:
+ assert not attachment
+ result.append('inline')
+ else:
+ assert not inline
+ result.append('attachment')
+ if filename:
+ assert '"' not in filename
+ filename = filename.split("/")[-1]
+ filename = filename.split("\\")[-1]
+ result.append('filename="%s"' % filename)
+ return (("; ".join(result),), filename)
+
+ def compose(self, **kwargs):
+ (result, mimetype) = self._compose(**kwargs)
+ return result
+
+ def apply(self, collection, **kwargs):
+ """ return the new Content-Type side-effect value """
+ (result, filename) = self._compose(**kwargs)
+ mimetype = CONTENT_TYPE(collection)
+ if filename and (not mimetype or CONTENT_TYPE.UNKNOWN == mimetype):
+ mimetype, _ = mimetypes.guess_type(filename)
+ if mimetype and CONTENT_TYPE.UNKNOWN != mimetype:
+ CONTENT_TYPE.update(collection, mimetype)
+ self.update(collection, *result)
+ return mimetype
+
+_ContentDisposition('Content-Disposition', 'entity', 'RFC 2183')
+
+class _IfModifiedSince(_DateHeader):
+ """
+ If-Modified-Since, RFC 2616 section 14.25
+ """
+ version = '1.0'
+
+ def __call__(self, *args, **kwargs):
+ """
+ Split the value on ';' incase the header includes extra attributes. E.g.
+ IE 6 is known to send:
+ If-Modified-Since: Sun, 25 Jun 2006 20:36:35 GMT; length=1506
+ """
+ return _DateHeader.__call__(self, *args, **kwargs).split(';', 1)[0]
+
+ def parse(self, *args, **kwargs):
+ value = _DateHeader.parse(self, *args, **kwargs)
+ if value and value > now():
+ raise HTTPBadRequest((
+ "Please check your system clock.\r\n"
+ "According to this server, the time provided in the\r\n"
+ "%s header is in the future.\r\n") % self.name)
+ return value
+_IfModifiedSince('If-Modified-Since', 'request', 'RFC 2616, 14.25')
+
+class _Range(_MultiValueHeader):
+ """
+ Range, RFC 2616 14.35 (use ``RANGE``)
+
+ According to section 14.16, the response to this message should be a
+ 206 Partial Content and that if multiple non-overlapping byte ranges
+ are requested (it is an error to request multiple overlapping
+ ranges) the result should be sent as multipart/byteranges mimetype.
+
+ The server should respond with '416 Requested Range Not Satisfiable'
+ if the requested ranges are out-of-bounds. The specification also
+ indicates that a syntax error in the Range request should result in
+ the header being ignored rather than a '400 Bad Request'.
+ """
+
+ def parse(self, *args, **kwargs):
+ """
+ Returns a tuple (units, list), where list is a sequence of
+ (begin, end) tuples; and end is None if it was not provided.
+ """
+ value = self.__call__(*args, **kwargs)
+ if not value:
+ return None
+ ranges = []
+ last_end = -1
+ try:
+ (units, range) = value.split("=", 1)
+ units = units.strip().lower()
+ for item in range.split(","):
+ (begin, end) = item.split("-")
+ if not begin.strip():
+ begin = 0
+ else:
+ begin = int(begin)
+ if begin <= last_end:
+ raise ValueError()
+ if not end.strip():
+ end = None
+ else:
+ end = int(end)
+ last_end = end
+ ranges.append((begin, end))
+ except ValueError:
+ # In this case where the Range header is malformed,
+ # section 14.16 says to treat the request as if the
+ # Range header was not present. How do I log this?
+ return None
+ return (units, ranges)
+_Range('Range', 'request', 'RFC 2616, 14.35')
+
+class _AcceptLanguage(_MultiValueHeader):
+ """
+ Accept-Language, RFC 2616 section 14.4
+ """
+
+ def parse(self, *args, **kwargs):
+ """
+ Return a list of language tags sorted by their "q" values. For example,
+ "en-us,en;q=0.5" should return ``["en-us", "en"]``. If there is no
+ ``Accept-Language`` header present, default to ``[]``.
+ """
+ header = self.__call__(*args, **kwargs)
+ if header is None:
+ return []
+ langs = [v for v in header.split(",") if v]
+ qs = []
+ for lang in langs:
+ pieces = lang.split(";")
+ lang, params = pieces[0].strip().lower(), pieces[1:]
+ q = 1
+ for param in params:
+ if '=' not in param:
+ # Malformed request; probably a bot, we'll ignore
+ continue
+ lvalue, rvalue = param.split("=")
+ lvalue = lvalue.strip().lower()
+ rvalue = rvalue.strip()
+ if lvalue == "q":
+ q = float(rvalue)
+ qs.append((lang, q))
+ qs.sort(key=lambda query: query[1], reverse=True)
+ return [lang for (lang, q) in qs]
+_AcceptLanguage('Accept-Language', 'request', 'RFC 2616, 14.4')
+
+class _AcceptRanges(_MultiValueHeader):
+ """
+ Accept-Ranges, RFC 2616 section 14.5
+ """
+ def compose(self, none=None, bytes=None):
+ if bytes:
+ return ('bytes',)
+ return ('none',)
+_AcceptRanges('Accept-Ranges', 'response', 'RFC 2616, 14.5')
+
+class _ContentRange(_SingleValueHeader):
+ """
+ Content-Range, RFC 2616 section 14.6
+ """
+ def compose(self, first_byte=None, last_byte=None, total_length=None):
+ retval = "bytes %d-%d/%d" % (first_byte, last_byte, total_length)
+ assert last_byte == -1 or first_byte <= last_byte
+ assert last_byte < total_length
+ return (retval,)
+_ContentRange('Content-Range', 'entity', 'RFC 2616, 14.6')
+
+class _Authorization(_SingleValueHeader):
+ """
+ Authorization, RFC 2617 (RFC 2616, 14.8)
+ """
+ def compose(self, digest=None, basic=None, username=None, password=None,
+ challenge=None, path=None, method=None):
+ assert username and password
+ if basic or not challenge:
+ assert not digest
+ userpass = "%s:%s" % (username.strip(), password.strip())
+ return "Basic %s" % userpass.encode('base64').strip()
+ assert challenge and not basic
+ path = path or "/"
+ (_, realm) = challenge.split('realm="')
+ (realm, _) = realm.split('"', 1)
+ auth = AbstractDigestAuthHandler()
+ auth.add_password(realm, path, username, password)
+ (token, challenge) = challenge.split(' ', 1)
+ chal = parse_keqv_list(parse_http_list(challenge))
+ class FakeRequest(object):
+ if six.PY3:
+ @property
+ def full_url(self):
+ return path
+
+ selector = full_url
+
+ @property
+ def data(self):
+ return None
+ else:
+ def get_full_url(self):
+ return path
+
+ get_selector = get_full_url
+
+ def has_data(self):
+ return False
+
+ def get_method(self):
+ return method or "GET"
+
+ retval = "Digest %s" % auth.get_authorization(FakeRequest(), chal)
+ return (retval,)
+_Authorization('Authorization', 'request', 'RFC 2617')
+
+#
+# For now, construct a minimalistic version of the field-names; at a
+# later date more complicated headers may sprout content constructors.
+# The items commented out have concrete variants.
+#
+for (name, category, version, style, comment) in \
+(("Accept" ,'request' ,'1.1','multi-value','RFC 2616, 14.1' )
+,("Accept-Charset" ,'request' ,'1.1','multi-value','RFC 2616, 14.2' )
+,("Accept-Encoding" ,'request' ,'1.1','multi-value','RFC 2616, 14.3' )
+#,("Accept-Language" ,'request' ,'1.1','multi-value','RFC 2616, 14.4' )
+#,("Accept-Ranges" ,'response','1.1','multi-value','RFC 2616, 14.5' )
+,("Age" ,'response','1.1','singular' ,'RFC 2616, 14.6' )
+,("Allow" ,'entity' ,'1.0','multi-value','RFC 2616, 14.7' )
+#,("Authorization" ,'request' ,'1.0','singular' ,'RFC 2616, 14.8' )
+#,("Cache-Control" ,'general' ,'1.1','multi-value','RFC 2616, 14.9' )
+,("Cookie" ,'request' ,'1.0','multi-value','RFC 2109/Netscape')
+,("Connection" ,'general' ,'1.1','multi-value','RFC 2616, 14.10')
+,("Content-Encoding" ,'entity' ,'1.0','multi-value','RFC 2616, 14.11')
+#,("Content-Disposition",'entity' ,'1.1','multi-value','RFC 2616, 15.5' )
+,("Content-Language" ,'entity' ,'1.1','multi-value','RFC 2616, 14.12')
+#,("Content-Length" ,'entity' ,'1.0','singular' ,'RFC 2616, 14.13')
+,("Content-Location" ,'entity' ,'1.1','singular' ,'RFC 2616, 14.14')
+,("Content-MD5" ,'entity' ,'1.1','singular' ,'RFC 2616, 14.15')
+#,("Content-Range" ,'entity' ,'1.1','singular' ,'RFC 2616, 14.16')
+#,("Content-Type" ,'entity' ,'1.0','singular' ,'RFC 2616, 14.17')
+,("Date" ,'general' ,'1.0','date-header','RFC 2616, 14.18')
+,("ETag" ,'response','1.1','singular' ,'RFC 2616, 14.19')
+,("Expect" ,'request' ,'1.1','multi-value','RFC 2616, 14.20')
+,("Expires" ,'entity' ,'1.0','date-header','RFC 2616, 14.21')
+,("From" ,'request' ,'1.0','singular' ,'RFC 2616, 14.22')
+,("Host" ,'request' ,'1.1','singular' ,'RFC 2616, 14.23')
+,("If-Match" ,'request' ,'1.1','multi-value','RFC 2616, 14.24')
+#,("If-Modified-Since" ,'request' ,'1.0','date-header','RFC 2616, 14.25')
+,("If-None-Match" ,'request' ,'1.1','multi-value','RFC 2616, 14.26')
+,("If-Range" ,'request' ,'1.1','singular' ,'RFC 2616, 14.27')
+,("If-Unmodified-Since",'request' ,'1.1','date-header' ,'RFC 2616, 14.28')
+,("Last-Modified" ,'entity' ,'1.0','date-header','RFC 2616, 14.29')
+,("Location" ,'response','1.0','singular' ,'RFC 2616, 14.30')
+,("Max-Forwards" ,'request' ,'1.1','singular' ,'RFC 2616, 14.31')
+,("Pragma" ,'general' ,'1.0','multi-value','RFC 2616, 14.32')
+,("Proxy-Authenticate" ,'response','1.1','multi-value','RFC 2616, 14.33')
+,("Proxy-Authorization",'request' ,'1.1','singular' ,'RFC 2616, 14.34')
+#,("Range" ,'request' ,'1.1','multi-value','RFC 2616, 14.35')
+,("Referer" ,'request' ,'1.0','singular' ,'RFC 2616, 14.36')
+,("Retry-After" ,'response','1.1','singular' ,'RFC 2616, 14.37')
+,("Server" ,'response','1.0','singular' ,'RFC 2616, 14.38')
+,("Set-Cookie" ,'response','1.0','multi-entry','RFC 2109/Netscape')
+,("TE" ,'request' ,'1.1','multi-value','RFC 2616, 14.39')
+,("Trailer" ,'general' ,'1.1','multi-value','RFC 2616, 14.40')
+,("Transfer-Encoding" ,'general' ,'1.1','multi-value','RFC 2616, 14.41')
+,("Upgrade" ,'general' ,'1.1','multi-value','RFC 2616, 14.42')
+,("User-Agent" ,'request' ,'1.0','singular' ,'RFC 2616, 14.43')
+,("Vary" ,'response','1.1','multi-value','RFC 2616, 14.44')
+,("Via" ,'general' ,'1.1','multi-value','RFC 2616, 14.45')
+,("Warning" ,'general' ,'1.1','multi-entry','RFC 2616, 14.46')
+,("WWW-Authenticate" ,'response','1.0','multi-entry','RFC 2616, 14.47')):
+ klass = {'multi-value': _MultiValueHeader,
+ 'multi-entry': _MultiEntryHeader,
+ 'date-header': _DateHeader,
+ 'singular' : _SingleValueHeader}[style]
+ klass(name, category, comment, version).__doc__ = comment
+ del klass
+
+for head in _headers.values():
+ headname = head.name.replace("-","_").upper()
+ locals()[headname] = head
+ __all__.append(headname)
+
+__pudge_all__ = __all__[:]
+for _name, _obj in six.iteritems(dict(globals())):
+ if isinstance(_obj, type) and issubclass(_obj, HTTPHeader):
+ __pudge_all__.append(_name)
diff --git a/paste/httpserver.py b/paste/httpserver.py
new file mode 100755
index 0000000..035d818
--- /dev/null
+++ b/paste/httpserver.py
@@ -0,0 +1,1430 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+WSGI HTTP Server
+
+This is a minimalistic WSGI server using Python's built-in BaseHTTPServer;
+if pyOpenSSL is installed, it also provides SSL capabilities.
+"""
+
+# @@: add in protection against HTTP/1.0 clients who claim to
+# be 1.1 but do not send a Content-Length
+
+# @@: add support for chunked encoding, this is not a 1.1 server
+# till this is completed.
+
+from __future__ import print_function
+import atexit
+import traceback
+import socket, sys, threading
+import posixpath
+import six
+import time
+import os
+from itertools import count
+from six.moves import _thread
+from six.moves import queue
+from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
+from six.moves.socketserver import ThreadingMixIn
+from six.moves.urllib.parse import unquote, urlsplit
+from paste.util import converters
+import logging
+try:
+ from paste.util import killthread
+except ImportError:
+ # Not available, probably no ctypes
+ killthread = None
+
+__all__ = ['WSGIHandlerMixin', 'WSGIServer', 'WSGIHandler', 'serve']
+__version__ = "0.5"
+
+
+def _get_headers(headers, k):
+ """
+ Private function for abstracting differences in getting HTTP request
+ headers on Python 2 vs. Python 3
+ """
+
+ if hasattr(headers, 'get_all'):
+ return headers.get_all(k) # Python 3 - email.message.Message
+ else:
+ return headers.getheaders(k) # Python 2 - mimetools.Message
+
+
+class ContinueHook(object):
+ """
+ When a client request includes a 'Expect: 100-continue' header, then
+ it is the responsibility of the server to send 100 Continue when it
+ is ready for the content body. This allows authentication, access
+ levels, and other exceptions to be detected *before* bandwith is
+ spent on the request body.
+
+ This is a rfile wrapper that implements this functionality by
+ sending 100 Continue to the client immediately after the user
+ requests the content via a read() operation on the rfile stream.
+ After this response is sent, it becomes a pass-through object.
+ """
+
+ def __init__(self, rfile, write):
+ self._ContinueFile_rfile = rfile
+ self._ContinueFile_write = write
+ for attr in ('close', 'closed', 'fileno', 'flush',
+ 'mode', 'bufsize', 'softspace'):
+ if hasattr(rfile, attr):
+ setattr(self, attr, getattr(rfile, attr))
+ for attr in ('read', 'readline', 'readlines'):
+ if hasattr(rfile, attr):
+ setattr(self, attr, getattr(self, '_ContinueFile_' + attr))
+
+ def _ContinueFile_send(self):
+ self._ContinueFile_write("HTTP/1.1 100 Continue\r\n\r\n")
+ rfile = self._ContinueFile_rfile
+ for attr in ('read', 'readline', 'readlines'):
+ if hasattr(rfile, attr):
+ setattr(self, attr, getattr(rfile, attr))
+
+ def _ContinueFile_read(self, size=-1):
+ self._ContinueFile_send()
+ return self._ContinueFile_rfile.read(size)
+
+ def _ContinueFile_readline(self, size=-1):
+ self._ContinueFile_send()
+ return self._ContinueFile_rfile.readline(size)
+
+ def _ContinueFile_readlines(self, sizehint=0):
+ self._ContinueFile_send()
+ return self._ContinueFile_rfile.readlines(sizehint)
+
+class WSGIHandlerMixin:
+ """
+ WSGI mix-in for HTTPRequestHandler
+
+ This class is a mix-in to provide WSGI functionality to any
+ HTTPRequestHandler derivative (as provided in Python's BaseHTTPServer).
+ This assumes a ``wsgi_application`` handler on ``self.server``.
+ """
+ lookup_addresses = True
+
+ def log_request(self, *args, **kwargs):
+ """ disable success request logging
+
+ Logging transactions should not be part of a WSGI server,
+ if you want logging; look at paste.translogger
+ """
+ pass
+
+ def log_message(self, *args, **kwargs):
+ """ disable error message logging
+
+ Logging transactions should not be part of a WSGI server,
+ if you want logging; look at paste.translogger
+ """
+ pass
+
+ def version_string(self):
+ """ behavior that BaseHTTPServer should have had """
+ if not self.sys_version:
+ return self.server_version
+ else:
+ return self.server_version + ' ' + self.sys_version
+
+ def wsgi_write_chunk(self, chunk):
+ """
+ Write a chunk of the output stream; send headers if they
+ have not already been sent.
+ """
+ if not self.wsgi_headers_sent and not self.wsgi_curr_headers:
+ raise RuntimeError(
+ "Content returned before start_response called")
+ if not self.wsgi_headers_sent:
+ self.wsgi_headers_sent = True
+ (status, headers) = self.wsgi_curr_headers
+ code, message = status.split(" ", 1)
+ self.send_response(int(code), message)
+ #
+ # HTTP/1.1 compliance; either send Content-Length or
+ # signal that the connection is being closed.
+ #
+ send_close = True
+ for (k, v) in headers:
+ lk = k.lower()
+ if 'content-length' == lk:
+ send_close = False
+ if 'connection' == lk:
+ if 'close' == v.lower():
+ self.close_connection = 1
+ send_close = False
+ self.send_header(k, v)
+ if send_close:
+ self.close_connection = 1
+ self.send_header('Connection', 'close')
+
+ self.end_headers()
+ self.wfile.write(chunk)
+
+ def wsgi_start_response(self, status, response_headers, exc_info=None):
+ if exc_info:
+ try:
+ if self.wsgi_headers_sent:
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ else:
+ # In this case, we're going to assume that the
+ # higher-level code is currently handling the
+ # issue and returning a resonable response.
+ # self.log_error(repr(exc_info))
+ pass
+ finally:
+ exc_info = None
+ elif self.wsgi_curr_headers:
+ assert 0, "Attempt to set headers a second time w/o an exc_info"
+ self.wsgi_curr_headers = (status, response_headers)
+ return self.wsgi_write_chunk
+
+ def wsgi_setup(self, environ=None):
+ """
+ Setup the member variables used by this WSGI mixin, including
+ the ``environ`` and status member variables.
+
+ After the basic environment is created; the optional ``environ``
+ argument can be used to override any settings.
+ """
+
+ dummy_url = 'http://dummy%s' % (self.path,)
+ (scheme, netloc, path, query, fragment) = urlsplit(dummy_url)
+ path = unquote(path)
+ endslash = path.endswith('/')
+ path = posixpath.normpath(path)
+ if endslash and path != '/':
+ # Put the slash back...
+ path += '/'
+ (server_name, server_port) = self.server.server_address[:2]
+
+ rfile = self.rfile
+ # We can put in the protection to keep from over-reading the
+ # file
+ try:
+ content_length = int(self.headers.get('Content-Length', '0'))
+ except ValueError:
+ content_length = 0
+ if '100-continue' == self.headers.get('Expect','').lower():
+ rfile = LimitedLengthFile(ContinueHook(rfile, self.wfile.write), content_length)
+ else:
+ if not hasattr(self.connection, 'get_context'):
+ # @@: LimitedLengthFile is currently broken in connection
+ # with SSL (sporatic errors that are diffcult to trace, but
+ # ones that go away when you don't use LimitedLengthFile)
+ rfile = LimitedLengthFile(rfile, content_length)
+
+ remote_address = self.client_address[0]
+ self.wsgi_environ = {
+ 'wsgi.version': (1,0)
+ ,'wsgi.url_scheme': 'http'
+ ,'wsgi.input': rfile
+ ,'wsgi.errors': sys.stderr
+ ,'wsgi.multithread': True
+ ,'wsgi.multiprocess': False
+ ,'wsgi.run_once': False
+ # CGI variables required by PEP-333
+ ,'REQUEST_METHOD': self.command
+ ,'SCRIPT_NAME': '' # application is root of server
+ ,'PATH_INFO': path
+ ,'QUERY_STRING': query
+ ,'CONTENT_TYPE': self.headers.get('Content-Type', '')
+ ,'CONTENT_LENGTH': self.headers.get('Content-Length', '0')
+ ,'SERVER_NAME': server_name
+ ,'SERVER_PORT': str(server_port)
+ ,'SERVER_PROTOCOL': self.request_version
+ # CGI not required by PEP-333
+ ,'REMOTE_ADDR': remote_address
+ }
+ if scheme:
+ self.wsgi_environ['paste.httpserver.proxy.scheme'] = scheme
+ if netloc:
+ self.wsgi_environ['paste.httpserver.proxy.host'] = netloc
+
+ if self.lookup_addresses:
+ # @@: make lookup_addreses actually work, at this point
+ # it has been address_string() is overriden down in
+ # file and hence is a noop
+ if remote_address.startswith("192.168.") \
+ or remote_address.startswith("10.") \
+ or remote_address.startswith("172.16."):
+ pass
+ else:
+ address_string = None # self.address_string()
+ if address_string:
+ self.wsgi_environ['REMOTE_HOST'] = address_string
+
+ if hasattr(self.server, 'thread_pool'):
+ # Now that we know what the request was for, we should
+ # tell the thread pool what its worker is working on
+ self.server.thread_pool.worker_tracker[_thread.get_ident()][1] = self.wsgi_environ
+ self.wsgi_environ['paste.httpserver.thread_pool'] = self.server.thread_pool
+
+ for k, v in self.headers.items():
+ key = 'HTTP_' + k.replace("-","_").upper()
+ if key in ('HTTP_CONTENT_TYPE','HTTP_CONTENT_LENGTH'):
+ continue
+ self.wsgi_environ[key] = ','.join(_get_headers(self.headers, k))
+
+ if hasattr(self.connection,'get_context'):
+ self.wsgi_environ['wsgi.url_scheme'] = 'https'
+ # @@: extract other SSL parameters from pyOpenSSL at...
+ # http://www.modssl.org/docs/2.8/ssl_reference.html#ToC25
+
+ if environ:
+ assert isinstance(environ, dict)
+ self.wsgi_environ.update(environ)
+ if 'on' == environ.get('HTTPS'):
+ self.wsgi_environ['wsgi.url_scheme'] = 'https'
+
+ self.wsgi_curr_headers = None
+ self.wsgi_headers_sent = False
+
+ def wsgi_connection_drop(self, exce, environ=None):
+ """
+ Override this if you're interested in socket exceptions, such
+ as when the user clicks 'Cancel' during a file download.
+ """
+ pass
+
+ def wsgi_execute(self, environ=None):
+ """
+ Invoke the server's ``wsgi_application``.
+ """
+
+ self.wsgi_setup(environ)
+
+ try:
+ result = self.server.wsgi_application(self.wsgi_environ,
+ self.wsgi_start_response)
+ try:
+ for chunk in result:
+ self.wsgi_write_chunk(chunk)
+ if not self.wsgi_headers_sent:
+ self.wsgi_write_chunk('')
+ finally:
+ if hasattr(result,'close'):
+ result.close()
+ result = None
+ except socket.error as exce:
+ self.wsgi_connection_drop(exce, environ)
+ return
+ except:
+ if not self.wsgi_headers_sent:
+ error_msg = "Internal Server Error\n"
+ self.wsgi_curr_headers = (
+ '500 Internal Server Error',
+ [('Content-type', 'text/plain'),
+ ('Content-length', str(len(error_msg)))])
+ self.wsgi_write_chunk("Internal Server Error\n")
+ raise
+
+#
+# SSL Functionality
+#
+# This implementation was motivated by Sebastien Martini's SSL example
+# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
+#
+try:
+ from OpenSSL import SSL, tsafe
+ SocketErrors = (socket.error, SSL.ZeroReturnError, SSL.SysCallError)
+except ImportError:
+ # Do not require pyOpenSSL to be installed, but disable SSL
+ # functionality in that case.
+ SSL = None
+ SocketErrors = (socket.error,)
+ class SecureHTTPServer(HTTPServer):
+ def __init__(self, server_address, RequestHandlerClass,
+ ssl_context=None, request_queue_size=None):
+ assert not ssl_context, "pyOpenSSL not installed"
+ HTTPServer.__init__(self, server_address, RequestHandlerClass)
+ if request_queue_size:
+ self.socket.listen(request_queue_size)
+else:
+
+ class _ConnFixer(object):
+ """ wraps a socket connection so it implements makefile """
+ def __init__(self, conn):
+ self.__conn = conn
+ def makefile(self, mode, bufsize):
+ return socket._fileobject(self.__conn, mode, bufsize)
+ def __getattr__(self, attrib):
+ return getattr(self.__conn, attrib)
+
+ class SecureHTTPServer(HTTPServer):
+ """
+ Provides SSL server functionality on top of the BaseHTTPServer
+ by overriding _private_ members of Python's standard
+ distribution. The interface for this instance only changes by
+ adding a an optional ssl_context attribute to the constructor:
+
+ cntx = SSL.Context(SSL.SSLv23_METHOD)
+ cntx.use_privatekey_file("host.pem")
+ cntx.use_certificate_file("host.pem")
+
+ """
+
+ def __init__(self, server_address, RequestHandlerClass,
+ ssl_context=None, request_queue_size=None):
+ # This overrides the implementation of __init__ in python's
+ # SocketServer.TCPServer (which BaseHTTPServer.HTTPServer
+ # does not override, thankfully).
+ HTTPServer.__init__(self, server_address, RequestHandlerClass)
+ self.socket = socket.socket(self.address_family,
+ self.socket_type)
+ self.ssl_context = ssl_context
+ if ssl_context:
+ class TSafeConnection(tsafe.Connection):
+ def settimeout(self, *args):
+ self._lock.acquire()
+ try:
+ return self._ssl_conn.settimeout(*args)
+ finally:
+ self._lock.release()
+ def gettimeout(self):
+ self._lock.acquire()
+ try:
+ return self._ssl_conn.gettimeout()
+ finally:
+ self._lock.release()
+ self.socket = TSafeConnection(ssl_context, self.socket)
+ self.server_bind()
+ if request_queue_size:
+ self.socket.listen(request_queue_size)
+ self.server_activate()
+
+ def get_request(self):
+ # The default SSL request object does not seem to have a
+ # ``makefile(mode, bufsize)`` method as expected by
+ # Socketserver.StreamRequestHandler.
+ (conn, info) = self.socket.accept()
+ if self.ssl_context:
+ conn = _ConnFixer(conn)
+ return (conn, info)
+
+ def _auto_ssl_context():
+ import OpenSSL, random
+ pkey = OpenSSL.crypto.PKey()
+ pkey.generate_key(OpenSSL.crypto.TYPE_RSA, 768)
+
+ cert = OpenSSL.crypto.X509()
+
+ cert.set_serial_number(random.randint(0, sys.maxint))
+ cert.gmtime_adj_notBefore(0)
+ cert.gmtime_adj_notAfter(60 * 60 * 24 * 365)
+ cert.get_subject().CN = '*'
+ cert.get_subject().O = 'Dummy Certificate'
+ cert.get_issuer().CN = 'Untrusted Authority'
+ cert.get_issuer().O = 'Self-Signed'
+ cert.set_pubkey(pkey)
+ cert.sign(pkey, 'md5')
+
+ ctx = SSL.Context(SSL.SSLv23_METHOD)
+ ctx.use_privatekey(pkey)
+ ctx.use_certificate(cert)
+
+ return ctx
+
+class WSGIHandler(WSGIHandlerMixin, BaseHTTPRequestHandler):
+ """
+ A WSGI handler that overrides POST, GET and HEAD to delegate
+ requests to the server's ``wsgi_application``.
+ """
+ server_version = 'PasteWSGIServer/' + __version__
+
+ def handle_one_request(self):
+ """Handle a single HTTP request.
+
+ You normally don't need to override this method; see the class
+ __doc__ string for information on how to handle specific HTTP
+ commands such as GET and POST.
+
+ """
+ self.raw_requestline = self.rfile.readline()
+ if not self.raw_requestline:
+ self.close_connection = 1
+ return
+ if not self.parse_request(): # An error code has been sent, just exit
+ return
+ self.wsgi_execute()
+
+ def handle(self):
+ # don't bother logging disconnects while handling a request
+ try:
+ BaseHTTPRequestHandler.handle(self)
+ except SocketErrors as exce:
+ self.wsgi_connection_drop(exce)
+
+ def address_string(self):
+ """Return the client address formatted for logging.
+
+ This is overridden so that no hostname lookup is done.
+ """
+ return ''
+
+class LimitedLengthFile(object):
+ def __init__(self, file, length):
+ self.file = file
+ self.length = length
+ self._consumed = 0
+ if hasattr(self.file, 'seek'):
+ self.seek = self._seek
+
+ def __repr__(self):
+ base_repr = repr(self.file)
+ return base_repr[:-1] + ' length=%s>' % self.length
+
+ def read(self, length=None):
+ left = self.length - self._consumed
+ if length is None:
+ length = left
+ else:
+ length = min(length, left)
+ # next two lines are hnecessary only if read(0) blocks
+ if not left:
+ return ''
+ data = self.file.read(length)
+ self._consumed += len(data)
+ return data
+
+ def readline(self, *args):
+ max_read = self.length - self._consumed
+ if len(args):
+ max_read = min(args[0], max_read)
+ data = self.file.readline(max_read)
+ self._consumed += len(data)
+ return data
+
+ def readlines(self, hint=None):
+ data = self.file.readlines(hint)
+ for chunk in data:
+ self._consumed += len(chunk)
+ return data
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.length - self._consumed <= 0:
+ raise StopIteration
+ return self.readline()
+
+ ## Optional methods ##
+
+ def _seek(self, place):
+ self.file.seek(place)
+ self._consumed = place
+
+ def tell(self):
+ if hasattr(self.file, 'tell'):
+ return self.file.tell()
+ else:
+ return self._consumed
+
+class ThreadPool(object):
+ """
+ Generic thread pool with a queue of callables to consume.
+
+ Keeps a notion of the status of its worker threads:
+
+ idle: worker thread with nothing to do
+
+ busy: worker thread doing its job
+
+ hung: worker thread that's been doing a job for too long
+
+ dying: a hung thread that has been killed, but hasn't died quite
+ yet.
+
+ zombie: what was a worker thread that we've tried to kill but
+ isn't dead yet.
+
+ At any time you can call track_threads, to get a dictionary with
+ these keys and lists of thread_ids that fall in that status. All
+ keys will be present, even if they point to emty lists.
+
+ hung threads are threads that have been busy more than
+ hung_thread_limit seconds. Hung threads are killed when they live
+ longer than kill_thread_limit seconds. A thread is then
+ considered dying for dying_limit seconds, if it is still alive
+ after that it is considered a zombie.
+
+ When there are no idle workers and a request comes in, another
+ worker *may* be spawned. If there are less than spawn_if_under
+ threads in the busy state, another thread will be spawned. So if
+ the limit is 5, and there are 4 hung threads and 6 busy threads,
+ no thread will be spawned.
+
+ When there are more than max_zombie_threads_before_die zombie
+ threads, a SystemExit exception will be raised, stopping the
+ server. Use 0 or None to never raise this exception. Zombie
+ threads *should* get cleaned up, but killing threads is no
+ necessarily reliable. This is turned off by default, since it is
+ only a good idea if you've deployed the server with some process
+ watching from above (something similar to daemontools or zdaemon).
+
+ Each worker thread only processes ``max_requests`` tasks before it
+ dies and replaces itself with a new worker thread.
+ """
+
+
+ SHUTDOWN = object()
+
+ def __init__(
+ self, nworkers, name="ThreadPool", daemon=False,
+ max_requests=100, # threads are killed after this many requests
+ hung_thread_limit=30, # when a thread is marked "hung"
+ kill_thread_limit=1800, # when you kill that hung thread
+ dying_limit=300, # seconds that a kill should take to go into effect (longer than this and the thread is a "zombie")
+ spawn_if_under=5, # spawn if there's too many hung threads
+ max_zombie_threads_before_die=0, # when to give up on the process
+ hung_check_period=100, # every 100 requests check for hung workers
+ logger=None, # Place to log messages to
+ error_email=None, # Person(s) to notify if serious problem occurs
+ ):
+ """
+ Create thread pool with `nworkers` worker threads.
+ """
+ self.nworkers = nworkers
+ self.max_requests = max_requests
+ self.name = name
+ self.queue = queue.Queue()
+ self.workers = []
+ self.daemon = daemon
+ if logger is None:
+ logger = logging.getLogger('paste.httpserver.ThreadPool')
+ if isinstance(logger, six.string_types):
+ logger = logging.getLogger(logger)
+ self.logger = logger
+ self.error_email = error_email
+ self._worker_count = count()
+
+ assert (not kill_thread_limit
+ or kill_thread_limit >= hung_thread_limit), (
+ "kill_thread_limit (%s) should be higher than hung_thread_limit (%s)"
+ % (kill_thread_limit, hung_thread_limit))
+ if not killthread:
+ kill_thread_limit = 0
+ self.logger.info(
+ "Cannot use kill_thread_limit as ctypes/killthread is not available")
+ self.kill_thread_limit = kill_thread_limit
+ self.dying_limit = dying_limit
+ self.hung_thread_limit = hung_thread_limit
+ assert spawn_if_under <= nworkers, (
+ "spawn_if_under (%s) should be less than nworkers (%s)"
+ % (spawn_if_under, nworkers))
+ self.spawn_if_under = spawn_if_under
+ self.max_zombie_threads_before_die = max_zombie_threads_before_die
+ self.hung_check_period = hung_check_period
+ self.requests_since_last_hung_check = 0
+ # Used to keep track of what worker is doing what:
+ self.worker_tracker = {}
+ # Used to keep track of the workers not doing anything:
+ self.idle_workers = []
+ # Used to keep track of threads that have been killed, but maybe aren't dead yet:
+ self.dying_threads = {}
+ # This is used to track when we last had to add idle workers;
+ # we shouldn't cull extra workers until some time has passed
+ # (hung_thread_limit) since workers were added:
+ self._last_added_new_idle_workers = 0
+ if not daemon:
+ atexit.register(self.shutdown)
+ for i in range(self.nworkers):
+ self.add_worker_thread(message='Initial worker pool')
+
+ def add_task(self, task):
+ """
+ Add a task to the queue
+ """
+ self.logger.debug('Added task (%i tasks queued)', self.queue.qsize())
+ if self.hung_check_period:
+ self.requests_since_last_hung_check += 1
+ if self.requests_since_last_hung_check > self.hung_check_period:
+ self.requests_since_last_hung_check = 0
+ self.kill_hung_threads()
+ if not self.idle_workers and self.spawn_if_under:
+ # spawn_if_under can come into effect...
+ busy = 0
+ now = time.time()
+ self.logger.debug('No idle workers for task; checking if we need to make more workers')
+ for worker in self.workers:
+ if not hasattr(worker, 'thread_id'):
+ # Not initialized
+ continue
+ time_started, info = self.worker_tracker.get(worker.thread_id,
+ (None, None))
+ if time_started is not None:
+ if now - time_started < self.hung_thread_limit:
+ busy += 1
+ if busy < self.spawn_if_under:
+ self.logger.info(
+ 'No idle tasks, and only %s busy tasks; adding %s more '
+ 'workers', busy, self.spawn_if_under-busy)
+ self._last_added_new_idle_workers = time.time()
+ for i in range(self.spawn_if_under - busy):
+ self.add_worker_thread(message='Response to lack of idle workers')
+ else:
+ self.logger.debug(
+ 'No extra workers needed (%s busy workers)',
+ busy)
+ if (len(self.workers) > self.nworkers
+ and len(self.idle_workers) > 3
+ and time.time()-self._last_added_new_idle_workers > self.hung_thread_limit):
+ # We've spawned worers in the past, but they aren't needed
+ # anymore; kill off some
+ self.logger.info(
+ 'Culling %s extra workers (%s idle workers present)',
+ len(self.workers)-self.nworkers, len(self.idle_workers))
+ self.logger.debug(
+ 'Idle workers: %s', self.idle_workers)
+ for i in range(len(self.workers) - self.nworkers):
+ self.queue.put(self.SHUTDOWN)
+ self.queue.put(task)
+
+ def track_threads(self):
+ """
+ Return a dict summarizing the threads in the pool (as
+ described in the ThreadPool docstring).
+ """
+ result = dict(idle=[], busy=[], hung=[], dying=[], zombie=[])
+ now = time.time()
+ for worker in self.workers:
+ if not hasattr(worker, 'thread_id'):
+ # The worker hasn't fully started up, we should just
+ # ignore it
+ continue
+ time_started, info = self.worker_tracker.get(worker.thread_id,
+ (None, None))
+ if time_started is not None:
+ if now - time_started > self.hung_thread_limit:
+ result['hung'].append(worker)
+ else:
+ result['busy'].append(worker)
+ else:
+ result['idle'].append(worker)
+ for thread_id, (time_killed, worker) in self.dying_threads.items():
+ if not self.thread_exists(thread_id):
+ # Cull dying threads that are actually dead and gone
+ self.logger.info('Killed thread %s no longer around',
+ thread_id)
+ try:
+ del self.dying_threads[thread_id]
+ except KeyError:
+ pass
+ continue
+ if now - time_killed > self.dying_limit:
+ result['zombie'].append(worker)
+ else:
+ result['dying'].append(worker)
+ return result
+
+ def kill_worker(self, thread_id):
+ """
+ Removes the worker with the given thread_id from the pool, and
+ replaces it with a new worker thread.
+
+ This should only be done for mis-behaving workers.
+ """
+ if killthread is None:
+ raise RuntimeError(
+ "Cannot kill worker; killthread/ctypes not available")
+ thread_obj = threading._active.get(thread_id)
+ killthread.async_raise(thread_id, SystemExit)
+ try:
+ del self.worker_tracker[thread_id]
+ except KeyError:
+ pass
+ self.logger.info('Killing thread %s', thread_id)
+ if thread_obj in self.workers:
+ self.workers.remove(thread_obj)
+ self.dying_threads[thread_id] = (time.time(), thread_obj)
+ self.add_worker_thread(message='Replacement for killed thread %s' % thread_id)
+
+ def thread_exists(self, thread_id):
+ """
+ Returns true if a thread with this id is still running
+ """
+ return thread_id in threading._active
+
+ def add_worker_thread(self, *args, **kwargs):
+ index = six.next(self._worker_count)
+ worker = threading.Thread(target=self.worker_thread_callback,
+ args=args, kwargs=kwargs,
+ name=("worker %d" % index))
+ worker.setDaemon(self.daemon)
+ worker.start()
+
+ def kill_hung_threads(self):
+ """
+ Tries to kill any hung threads
+ """
+ if not self.kill_thread_limit:
+ # No killing should occur
+ return
+ now = time.time()
+ max_time = 0
+ total_time = 0
+ idle_workers = 0
+ starting_workers = 0
+ working_workers = 0
+ killed_workers = 0
+ for worker in self.workers:
+ if not hasattr(worker, 'thread_id'):
+ # Not setup yet
+ starting_workers += 1
+ continue
+ time_started, info = self.worker_tracker.get(worker.thread_id,
+ (None, None))
+ if time_started is None:
+ # Must be idle
+ idle_workers += 1
+ continue
+ working_workers += 1
+ max_time = max(max_time, now-time_started)
+ total_time += now-time_started
+ if now - time_started > self.kill_thread_limit:
+ self.logger.warning(
+ 'Thread %s hung (working on task for %i seconds)',
+ worker.thread_id, now - time_started)
+ try:
+ import pprint
+ info_desc = pprint.pformat(info)
+ except:
+ out = six.StringIO()
+ traceback.print_exc(file=out)
+ info_desc = 'Error:\n%s' % out.getvalue()
+ self.notify_problem(
+ "Killing worker thread (id=%(thread_id)s) because it has been \n"
+ "working on task for %(time)s seconds (limit is %(limit)s)\n"
+ "Info on task:\n"
+ "%(info)s"
+ % dict(thread_id=worker.thread_id,
+ time=now - time_started,
+ limit=self.kill_thread_limit,
+ info=info_desc))
+ self.kill_worker(worker.thread_id)
+ killed_workers += 1
+ if working_workers:
+ ave_time = float(total_time) / working_workers
+ ave_time = '%.2fsec' % ave_time
+ else:
+ ave_time = 'N/A'
+ self.logger.info(
+ "kill_hung_threads status: %s threads (%s working, %s idle, %s starting) "
+ "ave time %s, max time %.2fsec, killed %s workers"
+ % (idle_workers + starting_workers + working_workers,
+ working_workers, idle_workers, starting_workers,
+ ave_time, max_time, killed_workers))
+ self.check_max_zombies()
+
+ def check_max_zombies(self):
+ """
+ Check if we've reached max_zombie_threads_before_die; if so
+ then kill the entire process.
+ """
+ if not self.max_zombie_threads_before_die:
+ return
+ found = []
+ now = time.time()
+ for thread_id, (time_killed, worker) in self.dying_threads.items():
+ if not self.thread_exists(thread_id):
+ # Cull dying threads that are actually dead and gone
+ try:
+ del self.dying_threads[thread_id]
+ except KeyError:
+ pass
+ continue
+ if now - time_killed > self.dying_limit:
+ found.append(thread_id)
+ if found:
+ self.logger.info('Found %s zombie threads', found)
+ if len(found) > self.max_zombie_threads_before_die:
+ self.logger.fatal(
+ 'Exiting process because %s zombie threads is more than %s limit',
+ len(found), self.max_zombie_threads_before_die)
+ self.notify_problem(
+ "Exiting process because %(found)s zombie threads "
+ "(more than limit of %(limit)s)\n"
+ "Bad threads (ids):\n"
+ " %(ids)s\n"
+ % dict(found=len(found),
+ limit=self.max_zombie_threads_before_die,
+ ids="\n ".join(map(str, found))),
+ subject="Process restart (too many zombie threads)")
+ self.shutdown(10)
+ print('Shutting down', threading.currentThread())
+ raise ServerExit(3)
+
+ def worker_thread_callback(self, message=None):
+ """
+ Worker thread should call this method to get and process queued
+ callables.
+ """
+ thread_obj = threading.currentThread()
+ thread_id = thread_obj.thread_id = _thread.get_ident()
+ self.workers.append(thread_obj)
+ self.idle_workers.append(thread_id)
+ requests_processed = 0
+ add_replacement_worker = False
+ self.logger.debug('Started new worker %s: %s', thread_id, message)
+ try:
+ while True:
+ if self.max_requests and self.max_requests < requests_processed:
+ # Replace this thread then die
+ self.logger.debug('Thread %s processed %i requests (limit %s); stopping thread'
+ % (thread_id, requests_processed, self.max_requests))
+ add_replacement_worker = True
+ break
+ runnable = self.queue.get()
+ if runnable is ThreadPool.SHUTDOWN:
+ self.logger.debug('Worker %s asked to SHUTDOWN', thread_id)
+ break
+ try:
+ self.idle_workers.remove(thread_id)
+ except ValueError:
+ pass
+ self.worker_tracker[thread_id] = [time.time(), None]
+ requests_processed += 1
+ try:
+ try:
+ runnable()
+ except:
+ # We are later going to call sys.exc_clear(),
+ # removing all remnants of any exception, so
+ # we should log it now. But ideally no
+ # exception should reach this level
+ print('Unexpected exception in worker %r' % runnable,
+ file=sys.stderr)
+ traceback.print_exc()
+ if thread_id in self.dying_threads:
+ # That last exception was intended to kill me
+ break
+ finally:
+ try:
+ del self.worker_tracker[thread_id]
+ except KeyError:
+ pass
+ if six.PY2:
+ sys.exc_clear()
+ self.idle_workers.append(thread_id)
+ finally:
+ try:
+ del self.worker_tracker[thread_id]
+ except KeyError:
+ pass
+ try:
+ self.idle_workers.remove(thread_id)
+ except ValueError:
+ pass
+ try:
+ self.workers.remove(thread_obj)
+ except ValueError:
+ pass
+ try:
+ del self.dying_threads[thread_id]
+ except KeyError:
+ pass
+ if add_replacement_worker:
+ self.add_worker_thread(message='Voluntary replacement for thread %s' % thread_id)
+
+ def shutdown(self, force_quit_timeout=0):
+ """
+ Shutdown the queue (after finishing any pending requests).
+ """
+ self.logger.info('Shutting down threadpool')
+ # Add a shutdown request for every worker
+ for i in range(len(self.workers)):
+ self.queue.put(ThreadPool.SHUTDOWN)
+ # Wait for each thread to terminate
+ hung_workers = []
+ for worker in self.workers:
+ worker.join(0.5)
+ if worker.isAlive():
+ hung_workers.append(worker)
+ zombies = []
+ for thread_id in self.dying_threads:
+ if self.thread_exists(thread_id):
+ zombies.append(thread_id)
+ if hung_workers or zombies:
+ self.logger.info("%s workers didn't stop properly, and %s zombies",
+ len(hung_workers), len(zombies))
+ if hung_workers:
+ for worker in hung_workers:
+ self.kill_worker(worker.thread_id)
+ self.logger.info('Workers killed forcefully')
+ if force_quit_timeout:
+ timed_out = False
+ need_force_quit = bool(zombies)
+ for worker in self.workers:
+ if not timed_out and worker.isAlive():
+ timed_out = True
+ worker.join(force_quit_timeout)
+ if worker.isAlive():
+ print("Worker %s won't die" % worker)
+ need_force_quit = True
+ if need_force_quit:
+ import atexit
+ # Remove the threading atexit callback
+ for callback in list(atexit._exithandlers):
+ func = getattr(callback[0], 'im_func', None)
+ if not func:
+ continue
+ globs = getattr(func, 'func_globals', {})
+ mod = globs.get('__name__')
+ if mod == 'threading':
+ atexit._exithandlers.remove(callback)
+ atexit._run_exitfuncs()
+ print('Forcefully exiting process')
+ os._exit(3)
+ else:
+ self.logger.info('All workers eventually killed')
+ else:
+ self.logger.info('All workers stopped')
+
+ def notify_problem(self, msg, subject=None, spawn_thread=True):
+ """
+ Called when there's a substantial problem. msg contains the
+ body of the notification, subject the summary.
+
+ If spawn_thread is true, then the email will be send in
+ another thread (so this doesn't block).
+ """
+ if not self.error_email:
+ return
+ if spawn_thread:
+ t = threading.Thread(
+ target=self.notify_problem,
+ args=(msg, subject, False))
+ t.start()
+ return
+ from_address = 'errors@localhost'
+ if not subject:
+ subject = msg.strip().splitlines()[0]
+ subject = subject[:50]
+ subject = '[http threadpool] %s' % subject
+ headers = [
+ "To: %s" % self.error_email,
+ "From: %s" % from_address,
+ "Subject: %s" % subject,
+ ]
+ try:
+ system = ' '.join(os.uname())
+ except:
+ system = '(unknown)'
+ body = (
+ "An error has occurred in the paste.httpserver.ThreadPool\n"
+ "Error:\n"
+ " %(msg)s\n"
+ "Occurred at: %(time)s\n"
+ "PID: %(pid)s\n"
+ "System: %(system)s\n"
+ "Server .py file: %(file)s\n"
+ % dict(msg=msg,
+ time=time.strftime("%c"),
+ pid=os.getpid(),
+ system=system,
+ file=os.path.abspath(__file__),
+ ))
+ message = '\n'.join(headers) + "\n\n" + body
+ import smtplib
+ server = smtplib.SMTP('localhost')
+ error_emails = [
+ e.strip() for e in self.error_email.split(",")
+ if e.strip()]
+ server.sendmail(from_address, error_emails, message)
+ server.quit()
+ print('email sent to', error_emails, message)
+
+class ThreadPoolMixIn(object):
+ """
+ Mix-in class to process requests from a thread pool
+ """
+ def __init__(self, nworkers, daemon=False, **threadpool_options):
+ # Create and start the workers
+ self.running = True
+ assert nworkers > 0, "ThreadPoolMixIn servers must have at least one worker"
+ self.thread_pool = ThreadPool(
+ nworkers,
+ "ThreadPoolMixIn HTTP server on %s:%d"
+ % (self.server_name, self.server_port),
+ daemon,
+ **threadpool_options)
+
+ def process_request(self, request, client_address):
+ """
+ Queue the request to be processed by on of the thread pool threads
+ """
+ # This sets the socket to blocking mode (and no timeout) since it
+ # may take the thread pool a little while to get back to it. (This
+ # is the default but since we set a timeout on the parent socket so
+ # that we can trap interrupts we need to restore this,.)
+ request.setblocking(1)
+ # Queue processing of the request
+ self.thread_pool.add_task(
+ lambda: self.process_request_in_thread(request, client_address))
+
+ def handle_error(self, request, client_address):
+ exc_class, exc, tb = sys.exc_info()
+ if exc_class is ServerExit:
+ # This is actually a request to stop the server
+ raise
+ return super(ThreadPoolMixIn, self).handle_error(request, client_address)
+
+ def process_request_in_thread(self, request, client_address):
+ """
+ The worker thread should call back here to do the rest of the
+ request processing. Error handling normaller done in 'handle_request'
+ must be done here.
+ """
+ try:
+ self.finish_request(request, client_address)
+ self.close_request(request)
+ except:
+ self.handle_error(request, client_address)
+ self.close_request(request)
+ exc = sys.exc_info()[1]
+ if isinstance(exc, (MemoryError, KeyboardInterrupt)):
+ raise
+
+ def serve_forever(self):
+ """
+ Overrides `serve_forever` to shut the threadpool down cleanly.
+ """
+ try:
+ while self.running:
+ try:
+ self.handle_request()
+ except socket.timeout:
+ # Timeout is expected, gives interrupts a chance to
+ # propogate, just keep handling
+ pass
+ finally:
+ if hasattr(self, 'thread_pool'):
+ self.thread_pool.shutdown()
+
+ def server_activate(self):
+ """
+ Overrides server_activate to set timeout on our listener socket.
+ """
+ # We set the timeout here so that we can trap interrupts on windows
+ self.socket.settimeout(1)
+
+ def server_close(self):
+ """
+ Finish pending requests and shutdown the server.
+ """
+ self.running = False
+ self.socket.close()
+ if hasattr(self, 'thread_pool'):
+ self.thread_pool.shutdown(60)
+
+class WSGIServerBase(SecureHTTPServer):
+ def __init__(self, wsgi_application, server_address,
+ RequestHandlerClass=None, ssl_context=None,
+ request_queue_size=None):
+ SecureHTTPServer.__init__(self, server_address,
+ RequestHandlerClass, ssl_context,
+ request_queue_size=request_queue_size)
+ self.wsgi_application = wsgi_application
+ self.wsgi_socket_timeout = None
+
+ def get_request(self):
+ # If there is a socket_timeout, set it on the accepted
+ (conn,info) = SecureHTTPServer.get_request(self)
+ if self.wsgi_socket_timeout:
+ conn.settimeout(self.wsgi_socket_timeout)
+ return (conn, info)
+
+class WSGIServer(ThreadingMixIn, WSGIServerBase):
+ daemon_threads = False
+
+class WSGIThreadPoolServer(ThreadPoolMixIn, WSGIServerBase):
+ def __init__(self, wsgi_application, server_address,
+ RequestHandlerClass=None, ssl_context=None,
+ nworkers=10, daemon_threads=False,
+ threadpool_options=None, request_queue_size=None):
+ WSGIServerBase.__init__(self, wsgi_application, server_address,
+ RequestHandlerClass, ssl_context,
+ request_queue_size=request_queue_size)
+ if threadpool_options is None:
+ threadpool_options = {}
+ ThreadPoolMixIn.__init__(self, nworkers, daemon_threads,
+ **threadpool_options)
+
+class ServerExit(SystemExit):
+ """
+ Raised to tell the server to really exit (SystemExit is normally
+ caught)
+ """
+
+def serve(application, host=None, port=None, handler=None, ssl_pem=None,
+ ssl_context=None, server_version=None, protocol_version=None,
+ start_loop=True, daemon_threads=None, socket_timeout=None,
+ use_threadpool=None, threadpool_workers=10,
+ threadpool_options=None, request_queue_size=5):
+ """
+ Serves your ``application`` over HTTP(S) via WSGI interface
+
+ ``host``
+
+ This is the ipaddress to bind to (or a hostname if your
+ nameserver is properly configured). This defaults to
+ 127.0.0.1, which is not a public interface.
+
+ ``port``
+
+ The port to run on, defaults to 8080 for HTTP, or 4443 for
+ HTTPS. This can be a string or an integer value.
+
+ ``handler``
+
+ This is the HTTP request handler to use, it defaults to
+ ``WSGIHandler`` in this module.
+
+ ``ssl_pem``
+
+ This an optional SSL certificate file (via OpenSSL). You can
+ supply ``*`` and a development-only certificate will be
+ created for you, or you can generate a self-signed test PEM
+ certificate file as follows::
+
+ $ openssl genrsa 1024 > host.key
+ $ chmod 400 host.key
+ $ openssl req -new -x509 -nodes -sha1 -days 365 \\
+ -key host.key > host.cert
+ $ cat host.cert host.key > host.pem
+ $ chmod 400 host.pem
+
+ ``ssl_context``
+
+ This an optional SSL context object for the server. A SSL
+ context will be automatically constructed for you if you supply
+ ``ssl_pem``. Supply this to use a context of your own
+ construction.
+
+ ``server_version``
+
+ The version of the server as reported in HTTP response line. This
+ defaults to something like "PasteWSGIServer/0.5". Many servers
+ hide their code-base identity with a name like 'Amnesiac/1.0'
+
+ ``protocol_version``
+
+ This sets the protocol used by the server, by default
+ ``HTTP/1.0``. There is some support for ``HTTP/1.1``, which
+ defaults to nicer keep-alive connections. This server supports
+ ``100 Continue``, but does not yet support HTTP/1.1 Chunked
+ Encoding. Hence, if you use HTTP/1.1, you're somewhat in error
+ since chunked coding is a mandatory requirement of a HTTP/1.1
+ server. If you specify HTTP/1.1, every response *must* have a
+ ``Content-Length`` and you must be careful not to read past the
+ end of the socket.
+
+ ``start_loop``
+
+ This specifies if the server loop (aka ``server.serve_forever()``)
+ should be called; it defaults to ``True``.
+
+ ``daemon_threads``
+
+ This flag specifies if when your webserver terminates all
+ in-progress client connections should be droppped. It defaults
+ to ``False``. You might want to set this to ``True`` if you
+ are using ``HTTP/1.1`` and don't set a ``socket_timeout``.
+
+ ``socket_timeout``
+
+ This specifies the maximum amount of time that a connection to a
+ given client will be kept open. At this time, it is a rude
+ disconnect, but at a later time it might follow the RFC a bit
+ more closely.
+
+ ``use_threadpool``
+
+ Server requests from a pool of worker threads (``threadpool_workers``)
+ rather than creating a new thread for each request. This can
+ substantially reduce latency since there is a high cost associated
+ with thread creation.
+
+ ``threadpool_workers``
+
+ Number of worker threads to create when ``use_threadpool`` is true. This
+ can be a string or an integer value.
+
+ ``threadpool_options``
+
+ A dictionary of options to be used when instantiating the
+ threadpool. See paste.httpserver.ThreadPool for specific
+ options (``threadpool_workers`` is a specific option that can
+ also go here).
+
+ ``request_queue_size``
+
+ The 'backlog' argument to socket.listen(); specifies the
+ maximum number of queued connections.
+
+ """
+ is_ssl = False
+ if ssl_pem or ssl_context:
+ assert SSL, "pyOpenSSL is not installed"
+ is_ssl = True
+ port = int(port or 4443)
+ if not ssl_context:
+ if ssl_pem == '*':
+ ssl_context = _auto_ssl_context()
+ else:
+ ssl_context = SSL.Context(SSL.SSLv23_METHOD)
+ ssl_context.use_privatekey_file(ssl_pem)
+ ssl_context.use_certificate_chain_file(ssl_pem)
+
+ host = host or '127.0.0.1'
+ if port is None:
+ if ':' in host:
+ host, port = host.split(':', 1)
+ else:
+ port = 8080
+ server_address = (host, int(port))
+
+ if not handler:
+ handler = WSGIHandler
+ if server_version:
+ handler.server_version = server_version
+ handler.sys_version = None
+ if protocol_version:
+ assert protocol_version in ('HTTP/0.9', 'HTTP/1.0', 'HTTP/1.1')
+ handler.protocol_version = protocol_version
+
+ if use_threadpool is None:
+ use_threadpool = True
+
+ if converters.asbool(use_threadpool):
+ server = WSGIThreadPoolServer(application, server_address, handler,
+ ssl_context, int(threadpool_workers),
+ daemon_threads,
+ threadpool_options=threadpool_options,
+ request_queue_size=request_queue_size)
+ else:
+ server = WSGIServer(application, server_address, handler, ssl_context,
+ request_queue_size=request_queue_size)
+ if daemon_threads:
+ server.daemon_threads = daemon_threads
+
+ if socket_timeout:
+ server.wsgi_socket_timeout = int(socket_timeout)
+
+ if converters.asbool(start_loop):
+ protocol = is_ssl and 'https' or 'http'
+ host, port = server.server_address[:2]
+ if host == '0.0.0.0':
+ print('serving on 0.0.0.0:%s view at %s://127.0.0.1:%s'
+ % (port, protocol, port))
+ else:
+ print("serving on %s://%s:%s" % (protocol, host, port))
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ # allow CTRL+C to shutdown
+ pass
+ return server
+
+# For paste.deploy server instantiation (egg:Paste#http)
+# Note: this gets a separate function because it has to expect string
+# arguments (though that's not much of an issue yet, ever?)
+def server_runner(wsgi_app, global_conf, **kwargs):
+ from paste.deploy.converters import asbool
+ for name in ['port', 'socket_timeout', 'threadpool_workers',
+ 'threadpool_hung_thread_limit',
+ 'threadpool_kill_thread_limit',
+ 'threadpool_dying_limit', 'threadpool_spawn_if_under',
+ 'threadpool_max_zombie_threads_before_die',
+ 'threadpool_hung_check_period',
+ 'threadpool_max_requests', 'request_queue_size']:
+ if name in kwargs:
+ kwargs[name] = int(kwargs[name])
+ for name in ['use_threadpool', 'daemon_threads']:
+ if name in kwargs:
+ kwargs[name] = asbool(kwargs[name])
+ threadpool_options = {}
+ for name, value in kwargs.items():
+ if name.startswith('threadpool_') and name != 'threadpool_workers':
+ threadpool_options[name[len('threadpool_'):]] = value
+ del kwargs[name]
+ if ('error_email' not in threadpool_options
+ and 'error_email' in global_conf):
+ threadpool_options['error_email'] = global_conf['error_email']
+ kwargs['threadpool_options'] = threadpool_options
+ serve(wsgi_app, **kwargs)
+
+server_runner.__doc__ = (serve.__doc__ or '') + """
+
+ You can also set these threadpool options:
+
+ ``threadpool_max_requests``:
+
+ The maximum number of requests a worker thread will process
+ before dying (and replacing itself with a new worker thread).
+ Default 100.
+
+ ``threadpool_hung_thread_limit``:
+
+ The number of seconds a thread can work on a task before it is
+ considered hung (stuck). Default 30 seconds.
+
+ ``threadpool_kill_thread_limit``:
+
+ The number of seconds a thread can work before you should kill it
+ (assuming it will never finish). Default 600 seconds (10 minutes).
+
+ ``threadpool_dying_limit``:
+
+ The length of time after killing a thread that it should actually
+ disappear. If it lives longer than this, it is considered a
+ "zombie". Note that even in easy situations killing a thread can
+ be very slow. Default 300 seconds (5 minutes).
+
+ ``threadpool_spawn_if_under``:
+
+ If there are no idle threads and a request comes in, and there are
+ less than this number of *busy* threads, then add workers to the
+ pool. Busy threads are threads that have taken less than
+ ``threadpool_hung_thread_limit`` seconds so far. So if you get
+ *lots* of requests but they complete in a reasonable amount of time,
+ the requests will simply queue up (adding more threads probably
+ wouldn't speed them up). But if you have lots of hung threads and
+ one more request comes in, this will add workers to handle it.
+ Default 5.
+
+ ``threadpool_max_zombie_threads_before_die``:
+
+ If there are more zombies than this, just kill the process. This is
+ only good if you have a monitor that will automatically restart
+ the server. This can clean up the mess. Default 0 (disabled).
+
+ `threadpool_hung_check_period``:
+
+ Every X requests, check for hung threads that need to be killed,
+ or for zombie threads that should cause a restart. Default 100
+ requests.
+
+ ``threadpool_logger``:
+
+ Logging messages will go the logger named here.
+
+ ``threadpool_error_email`` (or global ``error_email`` setting):
+
+ When threads are killed or the process restarted, this email
+ address will be contacted (using an SMTP server on localhost).
+
+"""
+
+
+if __name__ == '__main__':
+ from paste.wsgilib import dump_environ
+ #serve(dump_environ, ssl_pem="test.pem")
+ serve(dump_environ, server_version="Wombles/1.0",
+ protocol_version="HTTP/1.1", port="8888")
diff --git a/paste/lint.py b/paste/lint.py
new file mode 100644
index 0000000..d781686
--- /dev/null
+++ b/paste/lint.py
@@ -0,0 +1,438 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# Also licenced under the Apache License, 2.0: http://opensource.org/licenses/apache2.0.php
+# Licensed to PSF under a Contributor Agreement
+"""
+Middleware to check for obedience to the WSGI specification.
+
+Some of the things this checks:
+
+* Signature of the application and start_response (including that
+ keyword arguments are not used).
+
+* Environment checks:
+
+ - Environment is a dictionary (and not a subclass).
+
+ - That all the required keys are in the environment: REQUEST_METHOD,
+ SERVER_NAME, SERVER_PORT, wsgi.version, wsgi.input, wsgi.errors,
+ wsgi.multithread, wsgi.multiprocess, wsgi.run_once
+
+ - That HTTP_CONTENT_TYPE and HTTP_CONTENT_LENGTH are not in the
+ environment (these headers should appear as CONTENT_LENGTH and
+ CONTENT_TYPE).
+
+ - Warns if QUERY_STRING is missing, as the cgi module acts
+ unpredictably in that case.
+
+ - That CGI-style variables (that don't contain a .) have
+ (non-unicode) string values
+
+ - That wsgi.version is a tuple
+
+ - That wsgi.url_scheme is 'http' or 'https' (@@: is this too
+ restrictive?)
+
+ - Warns if the REQUEST_METHOD is not known (@@: probably too
+ restrictive).
+
+ - That SCRIPT_NAME and PATH_INFO are empty or start with /
+
+ - That at least one of SCRIPT_NAME or PATH_INFO are set.
+
+ - That CONTENT_LENGTH is a positive integer.
+
+ - That SCRIPT_NAME is not '/' (it should be '', and PATH_INFO should
+ be '/').
+
+ - That wsgi.input has the methods read, readline, readlines, and
+ __iter__
+
+ - That wsgi.errors has the methods flush, write, writelines
+
+* The status is a string, contains a space, starts with an integer,
+ and that integer is in range (> 100).
+
+* That the headers is a list (not a subclass, not another kind of
+ sequence).
+
+* That the items of the headers are tuples of strings.
+
+* That there is no 'status' header (that is used in CGI, but not in
+ WSGI).
+
+* That the headers don't contain newlines or colons, end in _ or -, or
+ contain characters codes below 037.
+
+* That Content-Type is given if there is content (CGI often has a
+ default content type, but WSGI does not).
+
+* That no Content-Type is given when there is no content (@@: is this
+ too restrictive?)
+
+* That the exc_info argument to start_response is a tuple or None.
+
+* That all calls to the writer are with strings, and no other methods
+ on the writer are accessed.
+
+* That wsgi.input is used properly:
+
+ - .read() is called with zero or one argument
+
+ - That it returns a string
+
+ - That readline, readlines, and __iter__ return strings
+
+ - That .close() is not called
+
+ - No other methods are provided
+
+* That wsgi.errors is used properly:
+
+ - .write() and .writelines() is called with a string
+
+ - That .close() is not called, and no other methods are provided.
+
+* The response iterator:
+
+ - That it is not a string (it should be a list of a single string; a
+ string will work, but perform horribly).
+
+ - That .next() returns a string
+
+ - That the iterator is not iterated over until start_response has
+ been called (that can signal either a server or application
+ error).
+
+ - That .close() is called (doesn't raise exception, only prints to
+ sys.stderr, because we only know it isn't called when the object
+ is garbage collected).
+"""
+
+import re
+import six
+import sys
+import warnings
+
+header_re = re.compile(r'^[a-zA-Z][a-zA-Z0-9\-_]*$')
+bad_header_value_re = re.compile(r'[\000-\037]')
+
+class WSGIWarning(Warning):
+ """
+ Raised in response to WSGI-spec-related warnings
+ """
+
+def middleware(application, global_conf=None):
+
+ """
+ When applied between a WSGI server and a WSGI application, this
+ middleware will check for WSGI compliancy on a number of levels.
+ This middleware does not modify the request or response in any
+ way, but will throw an AssertionError if anything seems off
+ (except for a failure to close the application iterator, which
+ will be printed to stderr -- there's no way to throw an exception
+ at that point).
+ """
+
+ def lint_app(*args, **kw):
+ assert len(args) == 2, "Two arguments required"
+ assert not kw, "No keyword arguments allowed"
+ environ, start_response = args
+
+ check_environ(environ)
+
+ # We use this to check if the application returns without
+ # calling start_response:
+ start_response_started = []
+
+ def start_response_wrapper(*args, **kw):
+ assert len(args) == 2 or len(args) == 3, (
+ "Invalid number of arguments: %s" % args)
+ assert not kw, "No keyword arguments allowed"
+ status = args[0]
+ headers = args[1]
+ if len(args) == 3:
+ exc_info = args[2]
+ else:
+ exc_info = None
+
+ check_status(status)
+ check_headers(headers)
+ check_content_type(status, headers)
+ check_exc_info(exc_info)
+
+ start_response_started.append(None)
+ return WriteWrapper(start_response(*args))
+
+ environ['wsgi.input'] = InputWrapper(environ['wsgi.input'])
+ environ['wsgi.errors'] = ErrorWrapper(environ['wsgi.errors'])
+
+ iterator = application(environ, start_response_wrapper)
+ assert iterator is not None and iterator != False, (
+ "The application must return an iterator, if only an empty list")
+
+ check_iterator(iterator)
+
+ return IteratorWrapper(iterator, start_response_started)
+
+ return lint_app
+
+class InputWrapper(object):
+
+ def __init__(self, wsgi_input):
+ self.input = wsgi_input
+
+ def read(self, *args):
+ assert len(args) <= 1
+ v = self.input.read(*args)
+ assert isinstance(v, six.binary_type)
+ return v
+
+ def readline(self, *args):
+ v = self.input.readline(*args)
+ assert isinstance(v, six.binary_type)
+ return v
+
+ def readlines(self, *args):
+ assert len(args) <= 1
+ lines = self.input.readlines(*args)
+ assert isinstance(lines, list)
+ for line in lines:
+ assert isinstance(line, six.binary_type)
+ return lines
+
+ def __iter__(self):
+ while 1:
+ line = self.readline()
+ if not line:
+ return
+ yield line
+
+ def close(self):
+ assert 0, "input.close() must not be called"
+
+class ErrorWrapper(object):
+
+ def __init__(self, wsgi_errors):
+ self.errors = wsgi_errors
+
+ def write(self, s):
+ assert isinstance(s, bytes)
+ self.errors.write(s)
+
+ def flush(self):
+ self.errors.flush()
+
+ def writelines(self, seq):
+ for line in seq:
+ self.write(line)
+
+ def close(self):
+ assert 0, "errors.close() must not be called"
+
+class WriteWrapper(object):
+
+ def __init__(self, wsgi_writer):
+ self.writer = wsgi_writer
+
+ def __call__(self, s):
+ assert isinstance(s, six.binary_type)
+ self.writer(s)
+
+class PartialIteratorWrapper(object):
+
+ def __init__(self, wsgi_iterator):
+ self.iterator = wsgi_iterator
+
+ def __iter__(self):
+ # We want to make sure __iter__ is called
+ return IteratorWrapper(self.iterator)
+
+class IteratorWrapper(object):
+
+ def __init__(self, wsgi_iterator, check_start_response):
+ self.original_iterator = wsgi_iterator
+ self.iterator = iter(wsgi_iterator)
+ self.closed = False
+ self.check_start_response = check_start_response
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ assert not self.closed, (
+ "Iterator read after closed")
+ v = six.next(self.iterator)
+ if self.check_start_response is not None:
+ assert self.check_start_response, (
+ "The application returns and we started iterating over its body, but start_response has not yet been called")
+ self.check_start_response = None
+ return v
+
+ __next__ = next
+
+ def close(self):
+ self.closed = True
+ if hasattr(self.original_iterator, 'close'):
+ self.original_iterator.close()
+
+ def __del__(self):
+ if not self.closed:
+ sys.stderr.write(
+ "Iterator garbage collected without being closed")
+ assert self.closed, (
+ "Iterator garbage collected without being closed")
+
+def check_environ(environ):
+ assert isinstance(environ,dict), (
+ "Environment is not of the right type: %r (environment: %r)"
+ % (type(environ), environ))
+
+ for key in ['REQUEST_METHOD', 'SERVER_NAME', 'SERVER_PORT',
+ 'wsgi.version', 'wsgi.input', 'wsgi.errors',
+ 'wsgi.multithread', 'wsgi.multiprocess',
+ 'wsgi.run_once']:
+ assert key in environ, (
+ "Environment missing required key: %r" % key)
+
+ for key in ['HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH']:
+ assert key not in environ, (
+ "Environment should not have the key: %s "
+ "(use %s instead)" % (key, key[5:]))
+
+ if 'QUERY_STRING' not in environ:
+ warnings.warn(
+ 'QUERY_STRING is not in the WSGI environment; the cgi '
+ 'module will use sys.argv when this variable is missing, '
+ 'so application errors are more likely',
+ WSGIWarning)
+
+ for key in environ.keys():
+ if '.' in key:
+ # Extension, we don't care about its type
+ continue
+ assert isinstance(environ[key], str), (
+ "Environmental variable %s is not a string: %r (value: %r)"
+ % (key, type(environ[key]), environ[key]))
+
+ assert isinstance(environ['wsgi.version'], tuple), (
+ "wsgi.version should be a tuple (%r)" % environ['wsgi.version'])
+ assert environ['wsgi.url_scheme'] in ('http', 'https'), (
+ "wsgi.url_scheme unknown: %r" % environ['wsgi.url_scheme'])
+
+ check_input(environ['wsgi.input'])
+ check_errors(environ['wsgi.errors'])
+
+ # @@: these need filling out:
+ if environ['REQUEST_METHOD'] not in (
+ 'GET', 'HEAD', 'POST', 'OPTIONS','PUT','DELETE','TRACE'):
+ warnings.warn(
+ "Unknown REQUEST_METHOD: %r" % environ['REQUEST_METHOD'],
+ WSGIWarning)
+
+ assert (not environ.get('SCRIPT_NAME')
+ or environ['SCRIPT_NAME'].startswith('/')), (
+ "SCRIPT_NAME doesn't start with /: %r" % environ['SCRIPT_NAME'])
+ assert (not environ.get('PATH_INFO')
+ or environ['PATH_INFO'].startswith('/')), (
+ "PATH_INFO doesn't start with /: %r" % environ['PATH_INFO'])
+ if environ.get('CONTENT_LENGTH'):
+ assert int(environ['CONTENT_LENGTH']) >= 0, (
+ "Invalid CONTENT_LENGTH: %r" % environ['CONTENT_LENGTH'])
+
+ if not environ.get('SCRIPT_NAME'):
+ assert 'PATH_INFO' in environ, (
+ "One of SCRIPT_NAME or PATH_INFO are required (PATH_INFO "
+ "should at least be '/' if SCRIPT_NAME is empty)")
+ assert environ.get('SCRIPT_NAME') != '/', (
+ "SCRIPT_NAME cannot be '/'; it should instead be '', and "
+ "PATH_INFO should be '/'")
+
+def check_input(wsgi_input):
+ for attr in ['read', 'readline', 'readlines', '__iter__']:
+ assert hasattr(wsgi_input, attr), (
+ "wsgi.input (%r) doesn't have the attribute %s"
+ % (wsgi_input, attr))
+
+def check_errors(wsgi_errors):
+ for attr in ['flush', 'write', 'writelines']:
+ assert hasattr(wsgi_errors, attr), (
+ "wsgi.errors (%r) doesn't have the attribute %s"
+ % (wsgi_errors, attr))
+
+def check_status(status):
+ assert isinstance(status, str), (
+ "Status must be a string (not %r)" % status)
+ # Implicitly check that we can turn it into an integer:
+ status_code = status.split(None, 1)[0]
+ assert len(status_code) == 3, (
+ "Status codes must be three characters: %r" % status_code)
+ status_int = int(status_code)
+ assert status_int >= 100, "Status code is invalid: %r" % status_int
+ if len(status) < 4 or status[3] != ' ':
+ warnings.warn(
+ "The status string (%r) should be a three-digit integer "
+ "followed by a single space and a status explanation"
+ % status, WSGIWarning)
+
+def check_headers(headers):
+ assert isinstance(headers,list), (
+ "Headers (%r) must be of type list: %r"
+ % (headers, type(headers)))
+ header_names = {}
+ for item in headers:
+ assert isinstance(item, tuple), (
+ "Individual headers (%r) must be of type tuple: %r"
+ % (item, type(item)))
+ assert len(item) == 2
+ name, value = item
+ assert name.lower() != 'status', (
+ "The Status header cannot be used; it conflicts with CGI "
+ "script, and HTTP status is not given through headers "
+ "(value: %r)." % value)
+ header_names[name.lower()] = None
+ assert '\n' not in name and ':' not in name, (
+ "Header names may not contain ':' or '\\n': %r" % name)
+ assert header_re.search(name), "Bad header name: %r" % name
+ assert not name.endswith('-') and not name.endswith('_'), (
+ "Names may not end in '-' or '_': %r" % name)
+ assert not bad_header_value_re.search(value), (
+ "Bad header value: %r (bad char: %r)"
+ % (value, bad_header_value_re.search(value).group(0)))
+
+def check_content_type(status, headers):
+ code = int(status.split(None, 1)[0])
+ # @@: need one more person to verify this interpretation of RFC 2616
+ # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+ NO_MESSAGE_BODY = (204, 304)
+ NO_MESSAGE_TYPE = (204, 304)
+ for name, value in headers:
+ if name.lower() == 'content-type':
+ if code not in NO_MESSAGE_TYPE:
+ return
+ assert 0, (("Content-Type header found in a %s response, "
+ "which must not return content.") % code)
+ if code not in NO_MESSAGE_BODY:
+ assert 0, "No Content-Type header found in headers (%s)" % headers
+
+def check_exc_info(exc_info):
+ assert exc_info is None or type(exc_info) is type(()), (
+ "exc_info (%r) is not a tuple: %r" % (exc_info, type(exc_info)))
+ # More exc_info checks?
+
+def check_iterator(iterator):
+ # Technically a string is legal, which is why it's a really bad
+ # idea, because it may cause the response to be returned
+ # character-by-character
+ assert not isinstance(iterator, str), (
+ "You should not return a string as your application iterator, "
+ "instead return a single-item list containing that string.")
+
+def make_middleware(application, global_conf):
+ # @@: global_conf should be taken out of the middleware function,
+ # and isolated here
+ return middleware(application)
+
+make_middleware.__doc__ = __doc__
+
+__all__ = ['middleware', 'make_middleware']
diff --git a/paste/modpython.py b/paste/modpython.py
new file mode 100644
index 0000000..d20a588
--- /dev/null
+++ b/paste/modpython.py
@@ -0,0 +1,253 @@
+"""WSGI Paste wrapper for mod_python. Requires Python 2.2 or greater.
+
+
+Example httpd.conf section for a Paste app with an ini file::
+
+ <Location />
+ SetHandler python-program
+ PythonHandler paste.modpython
+ PythonOption paste.ini /some/location/your/pasteconfig.ini
+ </Location>
+
+Or if you want to load a WSGI application under /your/homedir in the module
+``startup`` and the WSGI app is ``app``::
+
+ <Location />
+ SetHandler python-program
+ PythonHandler paste.modpython
+ PythonPath "['/virtual/project/directory'] + sys.path"
+ PythonOption wsgi.application startup::app
+ </Location>
+
+
+If you'd like to use a virtual installation, make sure to add it in the path
+like so::
+
+ <Location />
+ SetHandler python-program
+ PythonHandler paste.modpython
+ PythonPath "['/virtual/project/directory', '/virtual/lib/python2.4/'] + sys.path"
+ PythonOption paste.ini /virtual/project/directory/pasteconfig.ini
+ </Location>
+
+Some WSGI implementations assume that the SCRIPT_NAME environ variable will
+always be equal to "the root URL of the app"; Apache probably won't act as
+you expect in that case. You can add another PythonOption directive to tell
+modpython_gateway to force that behavior:
+
+ PythonOption SCRIPT_NAME /mcontrol
+
+Some WSGI applications need to be cleaned up when Apache exits. You can
+register a cleanup handler with yet another PythonOption directive:
+
+ PythonOption wsgi.cleanup module::function
+
+The module.function will be called with no arguments on server shutdown,
+once for each child process or thread.
+
+This module highly based on Robert Brewer's, here:
+http://projects.amor.org/misc/svn/modpython_gateway.py
+"""
+
+import six
+import traceback
+
+try:
+ from mod_python import apache
+except:
+ pass
+from paste.deploy import loadapp
+
+class InputWrapper(object):
+
+ def __init__(self, req):
+ self.req = req
+
+ def close(self):
+ pass
+
+ def read(self, size=-1):
+ return self.req.read(size)
+
+ def readline(self, size=-1):
+ return self.req.readline(size)
+
+ def readlines(self, hint=-1):
+ return self.req.readlines(hint)
+
+ def __iter__(self):
+ line = self.readline()
+ while line:
+ yield line
+ # Notice this won't prefetch the next line; it only
+ # gets called if the generator is resumed.
+ line = self.readline()
+
+
+class ErrorWrapper(object):
+
+ def __init__(self, req):
+ self.req = req
+
+ def flush(self):
+ pass
+
+ def write(self, msg):
+ self.req.log_error(msg)
+
+ def writelines(self, seq):
+ self.write(''.join(seq))
+
+
+bad_value = ("You must provide a PythonOption '%s', either 'on' or 'off', "
+ "when running a version of mod_python < 3.1")
+
+
+class Handler(object):
+
+ def __init__(self, req):
+ self.started = False
+
+ options = req.get_options()
+
+ # Threading and forking
+ try:
+ q = apache.mpm_query
+ threaded = q(apache.AP_MPMQ_IS_THREADED)
+ forked = q(apache.AP_MPMQ_IS_FORKED)
+ except AttributeError:
+ threaded = options.get('multithread', '').lower()
+ if threaded == 'on':
+ threaded = True
+ elif threaded == 'off':
+ threaded = False
+ else:
+ raise ValueError(bad_value % "multithread")
+
+ forked = options.get('multiprocess', '').lower()
+ if forked == 'on':
+ forked = True
+ elif forked == 'off':
+ forked = False
+ else:
+ raise ValueError(bad_value % "multiprocess")
+
+ env = self.environ = dict(apache.build_cgi_env(req))
+
+ if 'SCRIPT_NAME' in options:
+ # Override SCRIPT_NAME and PATH_INFO if requested.
+ env['SCRIPT_NAME'] = options['SCRIPT_NAME']
+ env['PATH_INFO'] = req.uri[len(options['SCRIPT_NAME']):]
+ else:
+ env['SCRIPT_NAME'] = ''
+ env['PATH_INFO'] = req.uri
+
+ env['wsgi.input'] = InputWrapper(req)
+ env['wsgi.errors'] = ErrorWrapper(req)
+ env['wsgi.version'] = (1, 0)
+ env['wsgi.run_once'] = False
+ if env.get("HTTPS") in ('yes', 'on', '1'):
+ env['wsgi.url_scheme'] = 'https'
+ else:
+ env['wsgi.url_scheme'] = 'http'
+ env['wsgi.multithread'] = threaded
+ env['wsgi.multiprocess'] = forked
+
+ self.request = req
+
+ def run(self, application):
+ try:
+ result = application(self.environ, self.start_response)
+ for data in result:
+ self.write(data)
+ if not self.started:
+ self.request.set_content_length(0)
+ if hasattr(result, 'close'):
+ result.close()
+ except:
+ traceback.print_exc(None, self.environ['wsgi.errors'])
+ if not self.started:
+ self.request.status = 500
+ self.request.content_type = 'text/plain'
+ data = "A server error occurred. Please contact the administrator."
+ self.request.set_content_length(len(data))
+ self.request.write(data)
+
+ def start_response(self, status, headers, exc_info=None):
+ if exc_info:
+ try:
+ if self.started:
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ finally:
+ exc_info = None
+
+ self.request.status = int(status[:3])
+
+ for key, val in headers:
+ if key.lower() == 'content-length':
+ self.request.set_content_length(int(val))
+ elif key.lower() == 'content-type':
+ self.request.content_type = val
+ else:
+ self.request.headers_out.add(key, val)
+
+ return self.write
+
+ def write(self, data):
+ if not self.started:
+ self.started = True
+ self.request.write(data)
+
+
+startup = None
+cleanup = None
+wsgiapps = {}
+
+def handler(req):
+ options = req.get_options()
+ # Run a startup function if requested.
+ global startup
+ if 'wsgi.startup' in options and not startup:
+ func = options['wsgi.startup']
+ if func:
+ module_name, object_str = func.split('::', 1)
+ module = __import__(module_name, globals(), locals(), [''])
+ startup = apache.resolve_object(module, object_str)
+ startup(req)
+
+ # Register a cleanup function if requested.
+ global cleanup
+ if 'wsgi.cleanup' in options and not cleanup:
+ func = options['wsgi.cleanup']
+ if func:
+ module_name, object_str = func.split('::', 1)
+ module = __import__(module_name, globals(), locals(), [''])
+ cleanup = apache.resolve_object(module, object_str)
+ def cleaner(data):
+ cleanup()
+ try:
+ # apache.register_cleanup wasn't available until 3.1.4.
+ apache.register_cleanup(cleaner)
+ except AttributeError:
+ req.server.register_cleanup(req, cleaner)
+
+ # Import the wsgi 'application' callable and pass it to Handler.run
+ global wsgiapps
+ appini = options.get('paste.ini')
+ app = None
+ if appini:
+ if appini not in wsgiapps:
+ wsgiapps[appini] = loadapp("config:%s" % appini)
+ app = wsgiapps[appini]
+
+ # Import the wsgi 'application' callable and pass it to Handler.run
+ appwsgi = options.get('wsgi.application')
+ if appwsgi and not appini:
+ modname, objname = appwsgi.split('::', 1)
+ module = __import__(modname, globals(), locals(), [''])
+ app = getattr(module, objname)
+
+ Handler(req).run(app)
+
+ # status was set in Handler; always return apache.OK
+ return apache.OK
diff --git a/paste/pony.py b/paste/pony.py
new file mode 100644
index 0000000..fce6aa8
--- /dev/null
+++ b/paste/pony.py
@@ -0,0 +1,57 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+We have a pony and/or a unicorn.
+"""
+from paste.request import construct_url
+
+PONY = """
+eJyFkkFuxCAMRfdzCisbJxK2D5D2JpbMrlI3XXQZDt9PCG0ySgcWIMT79rcN0XClUJlZRB9jVmci
+FmV19khjgRFl0RzrKmqzvY8lRUWFlXvCrD7UbAQR/17NUvGhypAF9og16vWtkC8DzUayS6pN3/dR
+ki0OnpzKjUBFpmlC7zVFRNL1rwoq6PWXXQSnIm9WoTzlM2//ke21o5g/l1ckRhiPbkDZXsKIR7l1
+36hF9uMhnRiVjI8UgYjlsIKCrXXpcA9iX5y7zMmtG0fUpW61Ssttipf6cp3WARfkMVoYFryi2a+w
+o/2dhW0OXfcMTnmh53oR9egzPs+qkpY9IKxdUVRP5wHO7UDAuI6moA2N+/z4vtc2k8B+AIBimVU=
+"""
+
+UNICORN = """
+eJyVVD1vhDAM3e9XeAtIxB5P6qlDx0OMXVBzSpZOHdsxP762E0JAnMgZ8Zn37OePAPC60eV1Dl5b
+SS7fB6DmQNGhtegpNlPIQS8HmkYGdSqNqDF9wcMYus4TuBYGsZwIPqXfEoNir5K+R3mbzhlR4JMW
+eGpikPpn9wHl2sDgEH1270guZwzKDRf3nTztMvfI5r3fJqEmNxdCyISBcWjNgjPG8Egg2hgT3mJi
+KBwNvmPB1hbWJ3TwBfMlqdTzxNyDE2H8zOD5HA4KkqJGPVY/TwnxmPA82kdSJNj7zs+R0d1pB+JO
+xn2DKgsdxAfFS2pfTSD0Fb6Uzv7dCQSvE5JmZQEQ90vNjBU1GPuGQpCPS8cGo+dQgjIKqxnJTXbw
+ucFzPFVIJXtzk6BXKGPnYsKzvFmGx7A0j6Zqvlvk5rETXbMWTGWj0RFc8QNPYVfhJfMMniCPazWJ
+lGtPZecIGJWW6oL2hpbWRZEkChe8eg5Wb7xx/MBZBFjxeZPEss+mRQ3Uhc8WQv684seSRO7i3nb4
+7HlKUg8sraz47LmXyh8S0somADvoUpoHjGWl+rUkF0H+EIf/gbyyMg58BBk6L634/fkHUCodMw==
+"""
+
+
+class PonyMiddleware(object):
+
+ def __init__(self, application):
+ self.application = application
+
+ def __call__(self, environ, start_response):
+ path_info = environ.get('PATH_INFO', '')
+ if path_info == '/pony':
+ url = construct_url(environ, with_query_string=False)
+ if 'horn' in environ.get('QUERY_STRING', ''):
+ data = UNICORN
+ link = 'remove horn!'
+ else:
+ data = PONY
+ url += '?horn'
+ link = 'add horn!'
+ msg = data.decode('base64').decode('zlib')
+ msg = '<pre>%s\n<a href="%s">%s</a></pre>' % (
+ msg, url, link)
+ start_response('200 OK', [('content-type', 'text/html')])
+ return [msg]
+ else:
+ return self.application(environ, start_response)
+
+def make_pony(app, global_conf):
+ """
+ Adds pony power to any application, at /pony
+ """
+ return PonyMiddleware(app)
+
diff --git a/paste/progress.py b/paste/progress.py
new file mode 100755
index 0000000..57bf0bd
--- /dev/null
+++ b/paste/progress.py
@@ -0,0 +1,222 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# This code was written with funding by http://prometheusresearch.com
+"""
+Upload Progress Monitor
+
+This is a WSGI middleware component which monitors the status of files
+being uploaded. It includes a small query application which will return
+a list of all files being uploaded by particular session/user.
+
+>>> from paste.httpserver import serve
+>>> from paste.urlmap import URLMap
+>>> from paste.auth.basic import AuthBasicHandler
+>>> from paste.debug.debugapp import SlowConsumer, SimpleApplication
+>>> # from paste.progress import *
+>>> realm = 'Test Realm'
+>>> def authfunc(username, password):
+... return username == password
+>>> map = URLMap({})
+>>> ups = UploadProgressMonitor(map, threshold=1024)
+>>> map['/upload'] = SlowConsumer()
+>>> map['/simple'] = SimpleApplication()
+>>> map['/report'] = UploadProgressReporter(ups)
+>>> serve(AuthBasicHandler(ups, realm, authfunc))
+serving on...
+
+.. note::
+
+ This is experimental, and will change in the future.
+"""
+import time
+from paste.wsgilib import catch_errors
+
+DEFAULT_THRESHOLD = 1024 * 1024 # one megabyte
+DEFAULT_TIMEOUT = 60*5 # five minutes
+ENVIRON_RECEIVED = 'paste.bytes_received'
+REQUEST_STARTED = 'paste.request_started'
+REQUEST_FINISHED = 'paste.request_finished'
+
+class _ProgressFile(object):
+ """
+ This is the input-file wrapper used to record the number of
+ ``paste.bytes_received`` for the given request.
+ """
+
+ def __init__(self, environ, rfile):
+ self._ProgressFile_environ = environ
+ self._ProgressFile_rfile = rfile
+ self.flush = rfile.flush
+ self.write = rfile.write
+ self.writelines = rfile.writelines
+
+ def __iter__(self):
+ environ = self._ProgressFile_environ
+ riter = iter(self._ProgressFile_rfile)
+ def iterwrap():
+ for chunk in riter:
+ environ[ENVIRON_RECEIVED] += len(chunk)
+ yield chunk
+ return iter(iterwrap)
+
+ def read(self, size=-1):
+ chunk = self._ProgressFile_rfile.read(size)
+ self._ProgressFile_environ[ENVIRON_RECEIVED] += len(chunk)
+ return chunk
+
+ def readline(self):
+ chunk = self._ProgressFile_rfile.readline()
+ self._ProgressFile_environ[ENVIRON_RECEIVED] += len(chunk)
+ return chunk
+
+ def readlines(self, hint=None):
+ chunk = self._ProgressFile_rfile.readlines(hint)
+ self._ProgressFile_environ[ENVIRON_RECEIVED] += len(chunk)
+ return chunk
+
+class UploadProgressMonitor(object):
+ """
+ monitors and reports on the status of uploads in progress
+
+ Parameters:
+
+ ``application``
+
+ This is the next application in the WSGI stack.
+
+ ``threshold``
+
+ This is the size in bytes that is needed for the
+ upload to be included in the monitor.
+
+ ``timeout``
+
+ This is the amount of time (in seconds) that a upload
+ remains in the monitor after it has finished.
+
+ Methods:
+
+ ``uploads()``
+
+ This returns a list of ``environ`` dict objects for each
+ upload being currently monitored, or finished but whose time
+ has not yet expired.
+
+ For each request ``environ`` that is monitored, there are several
+ variables that are stored:
+
+ ``paste.bytes_received``
+
+ This is the total number of bytes received for the given
+ request; it can be compared with ``CONTENT_LENGTH`` to
+ build a percentage complete. This is an integer value.
+
+ ``paste.request_started``
+
+ This is the time (in seconds) when the request was started
+ as obtained from ``time.time()``. One would want to format
+ this for presentation to the user, if necessary.
+
+ ``paste.request_finished``
+
+ This is the time (in seconds) when the request was finished,
+ canceled, or otherwise disconnected. This is None while
+ the given upload is still in-progress.
+
+ TODO: turn monitor into a queue and purge queue of finished
+ requests that have passed the timeout period.
+ """
+ def __init__(self, application, threshold=None, timeout=None):
+ self.application = application
+ self.threshold = threshold or DEFAULT_THRESHOLD
+ self.timeout = timeout or DEFAULT_TIMEOUT
+ self.monitor = []
+
+ def __call__(self, environ, start_response):
+ length = environ.get('CONTENT_LENGTH', 0)
+ if length and int(length) > self.threshold:
+ # replace input file object
+ self.monitor.append(environ)
+ environ[ENVIRON_RECEIVED] = 0
+ environ[REQUEST_STARTED] = time.time()
+ environ[REQUEST_FINISHED] = None
+ environ['wsgi.input'] = \
+ _ProgressFile(environ, environ['wsgi.input'])
+ def finalizer(exc_info=None):
+ environ[REQUEST_FINISHED] = time.time()
+ return catch_errors(self.application, environ,
+ start_response, finalizer, finalizer)
+ return self.application(environ, start_response)
+
+ def uploads(self):
+ return self.monitor
+
+class UploadProgressReporter(object):
+ """
+ reports on the progress of uploads for a given user
+
+ This reporter returns a JSON file (for use in AJAX) listing the
+ uploads in progress for the given user. By default, this reporter
+ uses the ``REMOTE_USER`` environment to compare between the current
+ request and uploads in-progress. If they match, then a response
+ record is formed.
+
+ ``match()``
+
+ This member function can be overriden to provide alternative
+ matching criteria. It takes two environments, the first
+ is the current request, the second is a current upload.
+
+ ``report()``
+
+ This member function takes an environment and builds a
+ ``dict`` that will be used to create a JSON mapping for
+ the given upload. By default, this just includes the
+ percent complete and the request url.
+
+ """
+ def __init__(self, monitor):
+ self.monitor = monitor
+
+ def match(self, search_environ, upload_environ):
+ if search_environ.get('REMOTE_USER', None) == \
+ upload_environ.get('REMOTE_USER', 0):
+ return True
+ return False
+
+ def report(self, environ):
+ retval = { 'started': time.strftime("%Y-%m-%d %H:%M:%S",
+ time.gmtime(environ[REQUEST_STARTED])),
+ 'finished': '',
+ 'content_length': environ.get('CONTENT_LENGTH'),
+ 'bytes_received': environ[ENVIRON_RECEIVED],
+ 'path_info': environ.get('PATH_INFO',''),
+ 'query_string': environ.get('QUERY_STRING','')}
+ finished = environ[REQUEST_FINISHED]
+ if finished:
+ retval['finished'] = time.strftime("%Y:%m:%d %H:%M:%S",
+ time.gmtime(finished))
+ return retval
+
+ def __call__(self, environ, start_response):
+ body = []
+ for map in [self.report(env) for env in self.monitor.uploads()
+ if self.match(environ, env)]:
+ parts = []
+ for k, v in map.items():
+ v = str(v).replace("\\", "\\\\").replace('"', '\\"')
+ parts.append('%s: "%s"' % (k, v))
+ body.append("{ %s }" % ", ".join(parts))
+ body = "[ %s ]" % ", ".join(body)
+ start_response("200 OK", [('Content-Type', 'text/plain'),
+ ('Content-Length', len(body))])
+ return [body]
+
+__all__ = ['UploadProgressMonitor', 'UploadProgressReporter']
+
+if "__main__" == __name__:
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
diff --git a/paste/proxy.py b/paste/proxy.py
new file mode 100644
index 0000000..67d4b1b
--- /dev/null
+++ b/paste/proxy.py
@@ -0,0 +1,289 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+An application that proxies WSGI requests to a remote server.
+
+TODO:
+
+* Send ``Via`` header? It's not clear to me this is a Via in the
+ style of a typical proxy.
+
+* Other headers or metadata? I put in X-Forwarded-For, but that's it.
+
+* Signed data of non-HTTP keys? This would be for things like
+ REMOTE_USER.
+
+* Something to indicate what the original URL was? The original host,
+ scheme, and base path.
+
+* Rewriting ``Location`` headers? mod_proxy does this.
+
+* Rewriting body? (Probably not on this one -- that can be done with
+ a different middleware that wraps this middleware)
+
+* Example::
+
+ use = egg:Paste#proxy
+ address = http://server3:8680/exist/rest/db/orgs/sch/config/
+ allowed_request_methods = GET
+
+"""
+
+from six.moves import http_client as httplib
+from six.moves.urllib import parse as urlparse
+from six.moves.urllib.parse import quote
+import six
+
+from paste import httpexceptions
+from paste.util.converters import aslist
+
+# Remove these headers from response (specify lower case header
+# names):
+filtered_headers = (
+ 'transfer-encoding',
+ 'connection',
+ 'keep-alive',
+ 'proxy-authenticate',
+ 'proxy-authorization',
+ 'te',
+ 'trailers',
+ 'upgrade',
+)
+
+class Proxy(object):
+
+ def __init__(self, address, allowed_request_methods=(),
+ suppress_http_headers=()):
+ self.address = address
+ self.parsed = urlparse.urlsplit(address)
+ self.scheme = self.parsed[0].lower()
+ self.host = self.parsed[1]
+ self.path = self.parsed[2]
+ self.allowed_request_methods = [
+ x.lower() for x in allowed_request_methods if x]
+
+ self.suppress_http_headers = [
+ x.lower() for x in suppress_http_headers if x]
+
+ def __call__(self, environ, start_response):
+ if (self.allowed_request_methods and
+ environ['REQUEST_METHOD'].lower() not in self.allowed_request_methods):
+ return httpexceptions.HTTPBadRequest("Disallowed")(environ, start_response)
+
+ if self.scheme == 'http':
+ ConnClass = httplib.HTTPConnection
+ elif self.scheme == 'https':
+ ConnClass = httplib.HTTPSConnection
+ else:
+ raise ValueError(
+ "Unknown scheme for %r: %r" % (self.address, self.scheme))
+ conn = ConnClass(self.host)
+ headers = {}
+ for key, value in environ.items():
+ if key.startswith('HTTP_'):
+ key = key[5:].lower().replace('_', '-')
+ if key == 'host' or key in self.suppress_http_headers:
+ continue
+ headers[key] = value
+ headers['host'] = self.host
+ if 'REMOTE_ADDR' in environ:
+ headers['x-forwarded-for'] = environ['REMOTE_ADDR']
+ if environ.get('CONTENT_TYPE'):
+ headers['content-type'] = environ['CONTENT_TYPE']
+ if environ.get('CONTENT_LENGTH'):
+ if environ['CONTENT_LENGTH'] == '-1':
+ # This is a special case, where the content length is basically undetermined
+ body = environ['wsgi.input'].read(-1)
+ headers['content-length'] = str(len(body))
+ else:
+ headers['content-length'] = environ['CONTENT_LENGTH']
+ length = int(environ['CONTENT_LENGTH'])
+ body = environ['wsgi.input'].read(length)
+ else:
+ body = ''
+
+ path_info = quote(environ['PATH_INFO'])
+ if self.path:
+ request_path = path_info
+ if request_path and request_path[0] == '/':
+ request_path = request_path[1:]
+
+ path = urlparse.urljoin(self.path, request_path)
+ else:
+ path = path_info
+ if environ.get('QUERY_STRING'):
+ path += '?' + environ['QUERY_STRING']
+
+ conn.request(environ['REQUEST_METHOD'],
+ path,
+ body, headers)
+ res = conn.getresponse()
+ headers_out = parse_headers(res.msg)
+
+ status = '%s %s' % (res.status, res.reason)
+ start_response(status, headers_out)
+ # @@: Default?
+ length = res.getheader('content-length')
+ if length is not None:
+ body = res.read(int(length))
+ else:
+ body = res.read()
+ conn.close()
+ return [body]
+
+def make_proxy(global_conf, address, allowed_request_methods="",
+ suppress_http_headers=""):
+ """
+ Make a WSGI application that proxies to another address:
+
+ ``address``
+ the full URL ending with a trailing ``/``
+
+ ``allowed_request_methods``:
+ a space seperated list of request methods (e.g., ``GET POST``)
+
+ ``suppress_http_headers``
+ a space seperated list of http headers (lower case, without
+ the leading ``http_``) that should not be passed on to target
+ host
+ """
+ allowed_request_methods = aslist(allowed_request_methods)
+ suppress_http_headers = aslist(suppress_http_headers)
+ return Proxy(
+ address,
+ allowed_request_methods=allowed_request_methods,
+ suppress_http_headers=suppress_http_headers)
+
+
+class TransparentProxy(object):
+
+ """
+ A proxy that sends the request just as it was given, including
+ respecting HTTP_HOST, wsgi.url_scheme, etc.
+
+ This is a way of translating WSGI requests directly to real HTTP
+ requests. All information goes in the environment; modify it to
+ modify the way the request is made.
+
+ If you specify ``force_host`` (and optionally ``force_scheme``)
+ then HTTP_HOST won't be used to determine where to connect to;
+ instead a specific host will be connected to, but the ``Host``
+ header in the request will remain intact.
+ """
+
+ def __init__(self, force_host=None,
+ force_scheme='http'):
+ self.force_host = force_host
+ self.force_scheme = force_scheme
+
+ def __repr__(self):
+ return '<%s %s force_host=%r force_scheme=%r>' % (
+ self.__class__.__name__,
+ hex(id(self)),
+ self.force_host, self.force_scheme)
+
+ def __call__(self, environ, start_response):
+ scheme = environ['wsgi.url_scheme']
+ if self.force_host is None:
+ conn_scheme = scheme
+ else:
+ conn_scheme = self.force_scheme
+ if conn_scheme == 'http':
+ ConnClass = httplib.HTTPConnection
+ elif conn_scheme == 'https':
+ ConnClass = httplib.HTTPSConnection
+ else:
+ raise ValueError(
+ "Unknown scheme %r" % scheme)
+ if 'HTTP_HOST' not in environ:
+ raise ValueError(
+ "WSGI environ must contain an HTTP_HOST key")
+ host = environ['HTTP_HOST']
+ if self.force_host is None:
+ conn_host = host
+ else:
+ conn_host = self.force_host
+ conn = ConnClass(conn_host)
+ headers = {}
+ for key, value in environ.items():
+ if key.startswith('HTTP_'):
+ key = key[5:].lower().replace('_', '-')
+ headers[key] = value
+ headers['host'] = host
+ if 'REMOTE_ADDR' in environ and 'HTTP_X_FORWARDED_FOR' not in environ:
+ headers['x-forwarded-for'] = environ['REMOTE_ADDR']
+ if environ.get('CONTENT_TYPE'):
+ headers['content-type'] = environ['CONTENT_TYPE']
+ if environ.get('CONTENT_LENGTH'):
+ length = int(environ['CONTENT_LENGTH'])
+ body = environ['wsgi.input'].read(length)
+ if length == -1:
+ environ['CONTENT_LENGTH'] = str(len(body))
+ elif 'CONTENT_LENGTH' not in environ:
+ body = ''
+ length = 0
+ else:
+ body = ''
+ length = 0
+
+ path = (environ.get('SCRIPT_NAME', '')
+ + environ.get('PATH_INFO', ''))
+ path = quote(path)
+ if 'QUERY_STRING' in environ:
+ path += '?' + environ['QUERY_STRING']
+ conn.request(environ['REQUEST_METHOD'],
+ path, body, headers)
+ res = conn.getresponse()
+ headers_out = parse_headers(res.msg)
+
+ status = '%s %s' % (res.status, res.reason)
+ start_response(status, headers_out)
+ # @@: Default?
+ length = res.getheader('content-length')
+ if length is not None:
+ body = res.read(int(length))
+ else:
+ body = res.read()
+ conn.close()
+ return [body]
+
+def parse_headers(message):
+ """
+ Turn a Message object into a list of WSGI-style headers.
+ """
+ headers_out = []
+ if six.PY3:
+ for header, value in message.items():
+ if header.lower() not in filtered_headers:
+ headers_out.append((header, value))
+ else:
+ for full_header in message.headers:
+ if not full_header:
+ # Shouldn't happen, but we'll just ignore
+ continue
+ if full_header[0].isspace():
+ # Continuation line, add to the last header
+ if not headers_out:
+ raise ValueError(
+ "First header starts with a space (%r)" % full_header)
+ last_header, last_value = headers_out.pop()
+ value = last_value + ' ' + full_header.strip()
+ headers_out.append((last_header, value))
+ continue
+ try:
+ header, value = full_header.split(':', 1)
+ except:
+ raise ValueError("Invalid header: %r" % full_header)
+ value = value.strip()
+ if header.lower() not in filtered_headers:
+ headers_out.append((header, value))
+ return headers_out
+
+def make_transparent_proxy(
+ global_conf, force_host=None, force_scheme='http'):
+ """
+ Create a proxy that connects to a specific host, but does
+ absolutely no other filtering, including the Host header.
+ """
+ return TransparentProxy(force_host=force_host,
+ force_scheme=force_scheme)
diff --git a/paste/recursive.py b/paste/recursive.py
new file mode 100644
index 0000000..0bef920
--- /dev/null
+++ b/paste/recursive.py
@@ -0,0 +1,406 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware to make internal requests and forward requests internally.
+
+When applied, several keys are added to the environment that will allow
+you to trigger recursive redirects and forwards.
+
+ paste.recursive.include:
+ When you call
+ ``environ['paste.recursive.include'](new_path_info)`` a response
+ will be returned. The response has a ``body`` attribute, a
+ ``status`` attribute, and a ``headers`` attribute.
+
+ paste.recursive.script_name:
+ The ``SCRIPT_NAME`` at the point that recursive lives. Only
+ paths underneath this path can be redirected to.
+
+ paste.recursive.old_path_info:
+ A list of previous ``PATH_INFO`` values from previous redirects.
+
+Raise ``ForwardRequestException(new_path_info)`` to do a forward
+(aborting the current request).
+"""
+
+import six
+import warnings
+from six.moves import cStringIO as StringIO
+
+__all__ = ['RecursiveMiddleware']
+__pudge_all__ = ['RecursiveMiddleware', 'ForwardRequestException']
+
+class RecursionLoop(AssertionError):
+ # Subclasses AssertionError for legacy reasons
+ """Raised when a recursion enters into a loop"""
+
+class CheckForRecursionMiddleware(object):
+ def __init__(self, app, env):
+ self.app = app
+ self.env = env
+
+ def __call__(self, environ, start_response):
+ path_info = environ.get('PATH_INFO','')
+ if path_info in self.env.get(
+ 'paste.recursive.old_path_info', []):
+ raise RecursionLoop(
+ "Forwarding loop detected; %r visited twice (internal "
+ "redirect path: %s)"
+ % (path_info, self.env['paste.recursive.old_path_info']))
+ old_path_info = self.env.setdefault('paste.recursive.old_path_info', [])
+ old_path_info.append(self.env.get('PATH_INFO', ''))
+ return self.app(environ, start_response)
+
+class RecursiveMiddleware(object):
+
+ """
+ A WSGI middleware that allows for recursive and forwarded calls.
+ All these calls go to the same 'application', but presumably that
+ application acts differently with different URLs. The forwarded
+ URLs must be relative to this container.
+
+ Interface is entirely through the ``paste.recursive.forward`` and
+ ``paste.recursive.include`` environmental keys.
+ """
+
+ def __init__(self, application, global_conf=None):
+ self.application = application
+
+ def __call__(self, environ, start_response):
+ environ['paste.recursive.forward'] = Forwarder(
+ self.application,
+ environ,
+ start_response)
+ environ['paste.recursive.include'] = Includer(
+ self.application,
+ environ,
+ start_response)
+ environ['paste.recursive.include_app_iter'] = IncluderAppIter(
+ self.application,
+ environ,
+ start_response)
+ my_script_name = environ.get('SCRIPT_NAME', '')
+ environ['paste.recursive.script_name'] = my_script_name
+ try:
+ return self.application(environ, start_response)
+ except ForwardRequestException as e:
+ middleware = CheckForRecursionMiddleware(
+ e.factory(self), environ)
+ return middleware(environ, start_response)
+
+class ForwardRequestException(Exception):
+ """
+ Used to signal that a request should be forwarded to a different location.
+
+ ``url``
+ The URL to forward to starting with a ``/`` and relative to
+ ``RecursiveMiddleware``. URL fragments can also contain query strings
+ so ``/error?code=404`` would be a valid URL fragment.
+
+ ``environ``
+ An altertative WSGI environment dictionary to use for the forwarded
+ request. If specified is used *instead* of the ``url_fragment``
+
+ ``factory``
+ If specifed ``factory`` is used instead of ``url`` or ``environ``.
+ ``factory`` is a callable that takes a WSGI application object
+ as the first argument and returns an initialised WSGI middleware
+ which can alter the forwarded response.
+
+ Basic usage (must have ``RecursiveMiddleware`` present) :
+
+ .. code-block:: python
+
+ from paste.recursive import ForwardRequestException
+ def app(environ, start_response):
+ if environ['PATH_INFO'] == '/hello':
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return [b'Hello World!']
+ elif environ['PATH_INFO'] == '/error':
+ start_response("404 Not Found", [('Content-type', 'text/plain')])
+ return [b'Page not found']
+ else:
+ raise ForwardRequestException('/error')
+
+ from paste.recursive import RecursiveMiddleware
+ app = RecursiveMiddleware(app)
+
+ If you ran this application and visited ``/hello`` you would get a
+ ``Hello World!`` message. If you ran the application and visited
+ ``/not_found`` a ``ForwardRequestException`` would be raised and the caught
+ by the ``RecursiveMiddleware``. The ``RecursiveMiddleware`` would then
+ return the headers and response from the ``/error`` URL but would display
+ a ``404 Not found`` status message.
+
+ You could also specify an ``environ`` dictionary instead of a url. Using
+ the same example as before:
+
+ .. code-block:: python
+
+ def app(environ, start_response):
+ ... same as previous example ...
+ else:
+ new_environ = environ.copy()
+ new_environ['PATH_INFO'] = '/error'
+ raise ForwardRequestException(environ=new_environ)
+
+ Finally, if you want complete control over every aspect of the forward you
+ can specify a middleware factory. For example to keep the old status code
+ but use the headers and resposne body from the forwarded response you might
+ do this:
+
+ .. code-block:: python
+
+ from paste.recursive import ForwardRequestException
+ from paste.recursive import RecursiveMiddleware
+ from paste.errordocument import StatusKeeper
+
+ def app(environ, start_response):
+ if environ['PATH_INFO'] == '/hello':
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return [b'Hello World!']
+ elif environ['PATH_INFO'] == '/error':
+ start_response("404 Not Found", [('Content-type', 'text/plain')])
+ return [b'Page not found']
+ else:
+ def factory(app):
+ return StatusKeeper(app, status='404 Not Found', url='/error')
+ raise ForwardRequestException(factory=factory)
+
+ app = RecursiveMiddleware(app)
+ """
+
+ def __init__(
+ self,
+ url=None,
+ environ={},
+ factory=None,
+ path_info=None):
+ # Check no incompatible options have been chosen
+ if factory and url:
+ raise TypeError(
+ 'You cannot specify factory and a url in '
+ 'ForwardRequestException')
+ elif factory and environ:
+ raise TypeError(
+ 'You cannot specify factory and environ in '
+ 'ForwardRequestException')
+ if url and environ:
+ raise TypeError(
+ 'You cannot specify environ and url in '
+ 'ForwardRequestException')
+
+ # set the path_info or warn about its use.
+ if path_info:
+ if not url:
+ warnings.warn(
+ "ForwardRequestException(path_info=...) has been deprecated; please "
+ "use ForwardRequestException(url=...)",
+ DeprecationWarning, 2)
+ else:
+ raise TypeError('You cannot use url and path_info in ForwardRequestException')
+ self.path_info = path_info
+
+ # If the url can be treated as a path_info do that
+ if url and not '?' in str(url):
+ self.path_info = url
+
+ # Base middleware
+ class ForwardRequestExceptionMiddleware(object):
+ def __init__(self, app):
+ self.app = app
+
+ # Otherwise construct the appropriate middleware factory
+ if hasattr(self, 'path_info'):
+ p = self.path_info
+ def factory_(app):
+ class PathInfoForward(ForwardRequestExceptionMiddleware):
+ def __call__(self, environ, start_response):
+ environ['PATH_INFO'] = p
+ return self.app(environ, start_response)
+ return PathInfoForward(app)
+ self.factory = factory_
+ elif url:
+ def factory_(app):
+ class URLForward(ForwardRequestExceptionMiddleware):
+ def __call__(self, environ, start_response):
+ environ['PATH_INFO'] = url.split('?')[0]
+ environ['QUERY_STRING'] = url.split('?')[1]
+ return self.app(environ, start_response)
+ return URLForward(app)
+ self.factory = factory_
+ elif environ:
+ def factory_(app):
+ class EnvironForward(ForwardRequestExceptionMiddleware):
+ def __call__(self, environ_, start_response):
+ return self.app(environ, start_response)
+ return EnvironForward(app)
+ self.factory = factory_
+ else:
+ self.factory = factory
+
+class Recursive(object):
+
+ def __init__(self, application, environ, start_response):
+ self.application = application
+ self.original_environ = environ.copy()
+ self.previous_environ = environ
+ self.start_response = start_response
+
+ def __call__(self, path, extra_environ=None):
+ """
+ `extra_environ` is an optional dictionary that is also added
+ to the forwarded request. E.g., ``{'HTTP_HOST': 'new.host'}``
+ could be used to forward to a different virtual host.
+ """
+ environ = self.original_environ.copy()
+ if extra_environ:
+ environ.update(extra_environ)
+ environ['paste.recursive.previous_environ'] = self.previous_environ
+ base_path = self.original_environ.get('SCRIPT_NAME')
+ if path.startswith('/'):
+ assert path.startswith(base_path), (
+ "You can only forward requests to resources under the "
+ "path %r (not %r)" % (base_path, path))
+ path = path[len(base_path)+1:]
+ assert not path.startswith('/')
+ path_info = '/' + path
+ environ['PATH_INFO'] = path_info
+ environ['REQUEST_METHOD'] = 'GET'
+ environ['CONTENT_LENGTH'] = '0'
+ environ['CONTENT_TYPE'] = ''
+ environ['wsgi.input'] = StringIO('')
+ return self.activate(environ)
+
+ def activate(self, environ):
+ raise NotImplementedError
+
+ def __repr__(self):
+ return '<%s.%s from %s>' % (
+ self.__class__.__module__,
+ self.__class__.__name__,
+ self.original_environ.get('SCRIPT_NAME') or '/')
+
+class Forwarder(Recursive):
+
+ """
+ The forwarder will try to restart the request, except with
+ the new `path` (replacing ``PATH_INFO`` in the request).
+
+ It must not be called after and headers have been returned.
+ It returns an iterator that must be returned back up the call
+ stack, so it must be used like:
+
+ .. code-block:: python
+
+ return environ['paste.recursive.forward'](path)
+
+ Meaningful transformations cannot be done, since headers are
+ sent directly to the server and cannot be inspected or
+ rewritten.
+ """
+
+ def activate(self, environ):
+ warnings.warn(
+ "recursive.Forwarder has been deprecated; please use "
+ "ForwardRequestException",
+ DeprecationWarning, 2)
+ return self.application(environ, self.start_response)
+
+
+class Includer(Recursive):
+
+ """
+ Starts another request with the given path and adding or
+ overwriting any values in the `extra_environ` dictionary.
+ Returns an IncludeResponse object.
+ """
+
+ def activate(self, environ):
+ response = IncludedResponse()
+ def start_response(status, headers, exc_info=None):
+ if exc_info:
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ response.status = status
+ response.headers = headers
+ return response.write
+ app_iter = self.application(environ, start_response)
+ try:
+ for s in app_iter:
+ response.write(s)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ response.close()
+ return response
+
+class IncludedResponse(object):
+
+ def __init__(self):
+ self.headers = None
+ self.status = None
+ self.output = StringIO()
+ self.str = None
+
+ def close(self):
+ self.str = self.output.getvalue()
+ self.output.close()
+ self.output = None
+
+ def write(self, s):
+ assert self.output is not None, (
+ "This response has already been closed and no further data "
+ "can be written.")
+ self.output.write(s)
+
+ def __str__(self):
+ return self.body
+
+ def body__get(self):
+ if self.str is None:
+ return self.output.getvalue()
+ else:
+ return self.str
+ body = property(body__get)
+
+
+class IncluderAppIter(Recursive):
+ """
+ Like Includer, but just stores the app_iter response
+ (be sure to call close on the response!)
+ """
+
+ def activate(self, environ):
+ response = IncludedAppIterResponse()
+ def start_response(status, headers, exc_info=None):
+ if exc_info:
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ response.status = status
+ response.headers = headers
+ return response.write
+ app_iter = self.application(environ, start_response)
+ response.app_iter = app_iter
+ return response
+
+class IncludedAppIterResponse(object):
+
+ def __init__(self):
+ self.status = None
+ self.headers = None
+ self.accumulated = []
+ self.app_iter = None
+ self._closed = False
+
+ def close(self):
+ assert not self._closed, (
+ "Tried to close twice")
+ if hasattr(self.app_iter, 'close'):
+ self.app_iter.close()
+
+ def write(self, s):
+ self.accumulated.append
+
+def make_recursive_middleware(app, global_conf):
+ return RecursiveMiddleware(app)
+
+make_recursive_middleware.__doc__ = __doc__
diff --git a/paste/registry.py b/paste/registry.py
new file mode 100644
index 0000000..908bc0d
--- /dev/null
+++ b/paste/registry.py
@@ -0,0 +1,581 @@
+# (c) 2005 Ben Bangert
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""Registry for handling request-local module globals sanely
+
+Dealing with module globals in a thread-safe way is good if your
+application is the sole responder in a thread, however that approach fails
+to properly account for various scenarios that occur with WSGI applications
+and middleware.
+
+What is actually needed in the case where a module global is desired that
+is always set properly depending on the current request, is a stacked
+thread-local object. Such an object is popped or pushed during the request
+cycle so that it properly represents the object that should be active for
+the current request.
+
+To make it easy to deal with such variables, this module provides a special
+StackedObjectProxy class which you can instantiate and attach to your
+module where you'd like others to access it. The object you'd like this to
+actually "be" during the request is then registered with the
+RegistryManager middleware, which ensures that for the scope of the current
+WSGI application everything will work properly.
+
+Example:
+
+.. code-block:: python
+
+ #yourpackage/__init__.py
+
+ from paste.registry import RegistryManager, StackedObjectProxy
+ myglobal = StackedObjectProxy()
+
+ #wsgi app stack
+ app = RegistryManager(yourapp)
+
+ #inside your wsgi app
+ class yourapp(object):
+ def __call__(self, environ, start_response):
+ obj = someobject # The request-local object you want to access
+ # via yourpackage.myglobal
+ if environ.has_key('paste.registry'):
+ environ['paste.registry'].register(myglobal, obj)
+
+You will then be able to import yourpackage anywhere in your WSGI app or in
+the calling stack below it and be assured that it is using the object you
+registered with Registry.
+
+RegistryManager can be in the WSGI stack multiple times, each time it
+appears it registers a new request context.
+
+
+Performance
+===========
+
+The overhead of the proxy object is very minimal, however if you are using
+proxy objects extensively (Thousands of accesses per request or more), there
+are some ways to avoid them. A proxy object runs approximately 3-20x slower
+than direct access to the object, this is rarely your performance bottleneck
+when developing web applications.
+
+Should you be developing a system which may be accessing the proxy object
+thousands of times per request, the performance of the proxy will start to
+become more noticeable. In that circumstance, the problem can be avoided by
+getting at the actual object via the proxy with the ``_current_obj`` function:
+
+.. code-block:: python
+
+ #sessions.py
+ Session = StackedObjectProxy()
+ # ... initialization code, etc.
+
+ # somemodule.py
+ import sessions
+
+ def somefunc():
+ session = sessions.Session._current_obj()
+ # ... tons of session access
+
+This way the proxy is used only once to retrieve the object for the current
+context and the overhead is minimized while still making it easy to access
+the underlying object. The ``_current_obj`` function is preceded by an
+underscore to more likely avoid clashing with the contained object's
+attributes.
+
+**NOTE:** This is *highly* unlikely to be an issue in the vast majority of
+cases, and requires incredibly large amounts of proxy object access before
+one should consider the proxy object to be causing slow-downs. This section
+is provided solely in the extremely rare case that it is an issue so that a
+quick way to work around it is documented.
+
+"""
+import six
+import paste.util.threadinglocal as threadinglocal
+
+__all__ = ['StackedObjectProxy', 'RegistryManager', 'StackedObjectRestorer',
+ 'restorer']
+
+class NoDefault(object): pass
+
+class StackedObjectProxy(object):
+ """Track an object instance internally using a stack
+
+ The StackedObjectProxy proxies access to an object internally using a
+ stacked thread-local. This makes it safe for complex WSGI environments
+ where access to the object may be desired in multiple places without
+ having to pass the actual object around.
+
+ New objects are added to the top of the stack with _push_object while
+ objects can be removed with _pop_object.
+
+ """
+ def __init__(self, default=NoDefault, name="Default"):
+ """Create a new StackedObjectProxy
+
+ If a default is given, its used in every thread if no other object
+ has been pushed on.
+
+ """
+ self.__dict__['____name__'] = name
+ self.__dict__['____local__'] = threadinglocal.local()
+ if default is not NoDefault:
+ self.__dict__['____default_object__'] = default
+
+ def __dir__(self):
+ """Return a list of the StackedObjectProxy's and proxied
+ object's (if one exists) names.
+ """
+ dir_list = dir(self.__class__) + self.__dict__.keys()
+ try:
+ dir_list.extend(dir(self._current_obj()))
+ except TypeError:
+ pass
+ dir_list.sort()
+ return dir_list
+
+ def __getattr__(self, attr):
+ return getattr(self._current_obj(), attr)
+
+ def __setattr__(self, attr, value):
+ setattr(self._current_obj(), attr, value)
+
+ def __delattr__(self, name):
+ delattr(self._current_obj(), name)
+
+ def __getitem__(self, key):
+ return self._current_obj()[key]
+
+ def __setitem__(self, key, value):
+ self._current_obj()[key] = value
+
+ def __delitem__(self, key):
+ del self._current_obj()[key]
+
+ def __call__(self, *args, **kw):
+ return self._current_obj()(*args, **kw)
+
+ def __repr__(self):
+ try:
+ return repr(self._current_obj())
+ except (TypeError, AttributeError):
+ return '<%s.%s object at 0x%x>' % (self.__class__.__module__,
+ self.__class__.__name__,
+ id(self))
+
+ def __iter__(self):
+ return iter(self._current_obj())
+
+ def __len__(self):
+ return len(self._current_obj())
+
+ def __contains__(self, key):
+ return key in self._current_obj()
+
+ def __nonzero__(self):
+ return bool(self._current_obj())
+
+ def _current_obj(self):
+ """Returns the current active object being proxied to
+
+ In the event that no object was pushed, the default object if
+ provided will be used. Otherwise, a TypeError will be raised.
+
+ """
+ try:
+ objects = self.____local__.objects
+ except AttributeError:
+ objects = None
+ if objects:
+ return objects[-1]
+ else:
+ obj = self.__dict__.get('____default_object__', NoDefault)
+ if obj is not NoDefault:
+ return obj
+ else:
+ raise TypeError(
+ 'No object (name: %s) has been registered for this '
+ 'thread' % self.____name__)
+
+ def _push_object(self, obj):
+ """Make ``obj`` the active object for this thread-local.
+
+ This should be used like:
+
+ .. code-block:: python
+
+ obj = yourobject()
+ module.glob = StackedObjectProxy()
+ module.glob._push_object(obj)
+ try:
+ ... do stuff ...
+ finally:
+ module.glob._pop_object(conf)
+
+ """
+ try:
+ self.____local__.objects.append(obj)
+ except AttributeError:
+ self.____local__.objects = []
+ self.____local__.objects.append(obj)
+
+ def _pop_object(self, obj=None):
+ """Remove a thread-local object.
+
+ If ``obj`` is given, it is checked against the popped object and an
+ error is emitted if they don't match.
+
+ """
+ try:
+ popped = self.____local__.objects.pop()
+ if obj and popped is not obj:
+ raise AssertionError(
+ 'The object popped (%s) is not the same as the object '
+ 'expected (%s)' % (popped, obj))
+ except AttributeError:
+ raise AssertionError('No object has been registered for this thread')
+
+ def _object_stack(self):
+ """Returns all of the objects stacked in this container
+
+ (Might return [] if there are none)
+ """
+ try:
+ try:
+ objs = self.____local__.objects
+ except AttributeError:
+ return []
+ return objs[:]
+ except AssertionError:
+ return []
+
+ # The following methods will be swapped for their original versions by
+ # StackedObjectRestorer when restoration is enabled. The original
+ # functions (e.g. _current_obj) will be available at _current_obj_orig
+
+ def _current_obj_restoration(self):
+ request_id = restorer.in_restoration()
+ if request_id:
+ return restorer.get_saved_proxied_obj(self, request_id)
+ return self._current_obj_orig()
+ _current_obj_restoration.__doc__ = \
+ ('%s\n(StackedObjectRestorer restoration enabled)' % \
+ _current_obj.__doc__)
+
+ def _push_object_restoration(self, obj):
+ if not restorer.in_restoration():
+ self._push_object_orig(obj)
+ _push_object_restoration.__doc__ = \
+ ('%s\n(StackedObjectRestorer restoration enabled)' % \
+ _push_object.__doc__)
+
+ def _pop_object_restoration(self, obj=None):
+ if not restorer.in_restoration():
+ self._pop_object_orig(obj)
+ _pop_object_restoration.__doc__ = \
+ ('%s\n(StackedObjectRestorer restoration enabled)' % \
+ _pop_object.__doc__)
+
+class Registry(object):
+ """Track objects and stacked object proxies for removal
+
+ The Registry object is instantiated a single time for the request no
+ matter how many times the RegistryManager is used in a WSGI stack. Each
+ RegistryManager must call ``prepare`` before continuing the call to
+ start a new context for object registering.
+
+ Each context is tracked with a dict inside a list. The last list
+ element is the currently executing context. Each context dict is keyed
+ by the id of the StackedObjectProxy instance being proxied, the value
+ is a tuple of the StackedObjectProxy instance and the object being
+ tracked.
+
+ """
+ def __init__(self):
+ """Create a new Registry object
+
+ ``prepare`` must still be called before this Registry object can be
+ used to register objects.
+
+ """
+ self.reglist = []
+
+ def prepare(self):
+ """Used to create a new registry context
+
+ Anytime a new RegistryManager is called, ``prepare`` needs to be
+ called on the existing Registry object. This sets up a new context
+ for registering objects.
+
+ """
+ self.reglist.append({})
+
+ def register(self, stacked, obj):
+ """Register an object with a StackedObjectProxy"""
+ myreglist = self.reglist[-1]
+ stacked_id = id(stacked)
+ if stacked_id in myreglist:
+ stacked._pop_object(myreglist[stacked_id][1])
+ del myreglist[stacked_id]
+ stacked._push_object(obj)
+ myreglist[stacked_id] = (stacked, obj)
+
+ def multiregister(self, stacklist):
+ """Register a list of tuples
+
+ Similar call semantics as register, except this registers
+ multiple objects at once.
+
+ Example::
+
+ registry.multiregister([(sop, obj), (anothersop, anotherobj)])
+
+ """
+ myreglist = self.reglist[-1]
+ for stacked, obj in stacklist:
+ stacked_id = id(stacked)
+ if stacked_id in myreglist:
+ stacked._pop_object(myreglist[stacked_id][1])
+ del myreglist[stacked_id]
+ stacked._push_object(obj)
+ myreglist[stacked_id] = (stacked, obj)
+
+ # Replace now does the same thing as register
+ replace = register
+
+ def cleanup(self):
+ """Remove all objects from all StackedObjectProxy instances that
+ were tracked at this Registry context"""
+ for stacked, obj in six.itervalues(self.reglist[-1]):
+ stacked._pop_object(obj)
+ self.reglist.pop()
+
+class RegistryManager(object):
+ """Creates and maintains a Registry context
+
+ RegistryManager creates a new registry context for the registration of
+ StackedObjectProxy instances. Multiple RegistryManager's can be in a
+ WSGI stack and will manage the context so that the StackedObjectProxies
+ always proxy to the proper object.
+
+ The object being registered can be any object sub-class, list, or dict.
+
+ Registering objects is done inside a WSGI application under the
+ RegistryManager instance, using the ``environ['paste.registry']``
+ object which is a Registry instance.
+
+ """
+ def __init__(self, application, streaming=False):
+ self.application = application
+ self.streaming = streaming
+
+ def __call__(self, environ, start_response):
+ app_iter = None
+ reg = environ.setdefault('paste.registry', Registry())
+ reg.prepare()
+ if self.streaming:
+ return self.streaming_iter(reg, environ, start_response)
+
+ try:
+ app_iter = self.application(environ, start_response)
+ except Exception as e:
+ # Regardless of if the content is an iterable, generator, list
+ # or tuple, we clean-up right now. If its an iterable/generator
+ # care should be used to ensure the generator has its own ref
+ # to the actual object
+ if environ.get('paste.evalexception'):
+ # EvalException is present in the WSGI stack
+ expected = False
+ for expect in environ.get('paste.expected_exceptions', []):
+ if isinstance(e, expect):
+ expected = True
+ if not expected:
+ # An unexpected exception: save state for EvalException
+ restorer.save_registry_state(environ)
+ reg.cleanup()
+ raise
+ except:
+ # Save state for EvalException if it's present
+ if environ.get('paste.evalexception'):
+ restorer.save_registry_state(environ)
+ reg.cleanup()
+ raise
+ else:
+ reg.cleanup()
+
+ return app_iter
+
+ def streaming_iter(self, reg, environ, start_response):
+ try:
+ for item in self.application(environ, start_response):
+ yield item
+ except Exception as e:
+ # Regardless of if the content is an iterable, generator, list
+ # or tuple, we clean-up right now. If its an iterable/generator
+ # care should be used to ensure the generator has its own ref
+ # to the actual object
+ if environ.get('paste.evalexception'):
+ # EvalException is present in the WSGI stack
+ expected = False
+ for expect in environ.get('paste.expected_exceptions', []):
+ if isinstance(e, expect):
+ expected = True
+ if not expected:
+ # An unexpected exception: save state for EvalException
+ restorer.save_registry_state(environ)
+ reg.cleanup()
+ raise
+ except:
+ # Save state for EvalException if it's present
+ if environ.get('paste.evalexception'):
+ restorer.save_registry_state(environ)
+ reg.cleanup()
+ raise
+ else:
+ reg.cleanup()
+
+
+class StackedObjectRestorer(object):
+ """Track StackedObjectProxies and their proxied objects for automatic
+ restoration within EvalException's interactive debugger.
+
+ An instance of this class tracks all StackedObjectProxy state in existence
+ when unexpected exceptions are raised by WSGI applications housed by
+ EvalException and RegistryManager. Like EvalException, this information is
+ stored for the life of the process.
+
+ When an unexpected exception occurs and EvalException is present in the
+ WSGI stack, save_registry_state is intended to be called to store the
+ Registry state and enable automatic restoration on all currently registered
+ StackedObjectProxies.
+
+ With restoration enabled, those StackedObjectProxies' _current_obj
+ (overwritten by _current_obj_restoration) method's strategy is modified:
+ it will return its appropriate proxied object from the restorer when
+ a restoration context is active in the current thread.
+
+ The StackedObjectProxies' _push/pop_object methods strategies are also
+ changed: they no-op when a restoration context is active in the current
+ thread (because the pushing/popping work is all handled by the
+ Registry/restorer).
+
+ The request's Registry objects' reglists are restored from the restorer
+ when a restoration context begins, enabling the Registry methods to work
+ while their changes are tracked by the restorer.
+
+ The overhead of enabling restoration is negligible (another threadlocal
+ access for the changed StackedObjectProxy methods) for normal use outside
+ of a restoration context, but worth mentioning when combined with
+ StackedObjectProxies normal overhead. Once enabled it does not turn off,
+ however:
+
+ o Enabling restoration only occurs after an unexpected exception is
+ detected. The server is likely to be restarted shortly after the exception
+ is raised to fix the cause
+
+ o StackedObjectRestorer is only enabled when EvalException is enabled (not
+ on a production server) and RegistryManager exists in the middleware
+ stack"""
+ def __init__(self):
+ # Registries and their saved reglists by request_id
+ self.saved_registry_states = {}
+ self.restoration_context_id = threadinglocal.local()
+
+ def save_registry_state(self, environ):
+ """Save the state of this request's Registry (if it hasn't already been
+ saved) to the saved_registry_states dict, keyed by the request's unique
+ identifier"""
+ registry = environ.get('paste.registry')
+ if not registry or not len(registry.reglist) or \
+ self.get_request_id(environ) in self.saved_registry_states:
+ # No Registry, no state to save, or this request's state has
+ # already been saved
+ return
+
+ self.saved_registry_states[self.get_request_id(environ)] = \
+ (registry, registry.reglist[:])
+
+ # Tweak the StackedObjectProxies we want to save state for -- change
+ # their methods to act differently when a restoration context is active
+ # in the current thread
+ for reglist in registry.reglist:
+ for stacked, obj in six.itervalues(reglist):
+ self.enable_restoration(stacked)
+
+ def get_saved_proxied_obj(self, stacked, request_id):
+ """Retrieve the saved object proxied by the specified
+ StackedObjectProxy for the request identified by request_id"""
+ # All state for the request identified by request_id
+ reglist = self.saved_registry_states[request_id][1]
+
+ # The top of the stack was current when the exception occurred
+ stack_level = len(reglist) - 1
+ stacked_id = id(stacked)
+ while True:
+ if stack_level < 0:
+ # Nothing registered: Call _current_obj_orig to raise a
+ # TypeError
+ return stacked._current_obj_orig()
+ context = reglist[stack_level]
+ if stacked_id in context:
+ break
+ # This StackedObjectProxy may not have been registered by the
+ # RegistryManager that was active when the exception was raised --
+ # continue searching down the stack until it's found
+ stack_level -= 1
+ return context[stacked_id][1]
+
+ def enable_restoration(self, stacked):
+ """Replace the specified StackedObjectProxy's methods with their
+ respective restoration versions.
+
+ _current_obj_restoration forces recovery of the saved proxied object
+ when a restoration context is active in the current thread.
+
+ _push/pop_object_restoration avoid pushing/popping data
+ (pushing/popping is only done at the Registry level) when a restoration
+ context is active in the current thread"""
+ if '_current_obj_orig' in stacked.__dict__:
+ # Restoration already enabled
+ return
+
+ for func_name in ('_current_obj', '_push_object', '_pop_object'):
+ orig_func = getattr(stacked, func_name)
+ restoration_func = getattr(stacked, func_name + '_restoration')
+ stacked.__dict__[func_name + '_orig'] = orig_func
+ stacked.__dict__[func_name] = restoration_func
+
+ def get_request_id(self, environ):
+ """Return a unique identifier for the current request"""
+ from paste.evalexception.middleware import get_debug_count
+ return get_debug_count(environ)
+
+ def restoration_begin(self, request_id):
+ """Enable a restoration context in the current thread for the specified
+ request_id"""
+ if request_id in self.saved_registry_states:
+ # Restore the old Registry object's state
+ registry, reglist = self.saved_registry_states[request_id]
+ registry.reglist = reglist
+
+ self.restoration_context_id.request_id = request_id
+
+ def restoration_end(self):
+ """Register a restoration context as finished, if one exists"""
+ try:
+ del self.restoration_context_id.request_id
+ except AttributeError:
+ pass
+
+ def in_restoration(self):
+ """Determine if a restoration context is active for the current thread.
+ Returns the request_id it's active for if so, otherwise False"""
+ return getattr(self.restoration_context_id, 'request_id', False)
+
+restorer = StackedObjectRestorer()
+
+
+# Paste Deploy entry point
+def make_registry_manager(app, global_conf):
+ return RegistryManager(app)
+
+make_registry_manager.__doc__ = RegistryManager.__doc__
diff --git a/paste/reloader.py b/paste/reloader.py
new file mode 100644
index 0000000..c9d7c14
--- /dev/null
+++ b/paste/reloader.py
@@ -0,0 +1,179 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+A file monitor and server restarter.
+
+Use this like:
+
+..code-block:: Python
+
+ import reloader
+ reloader.install()
+
+Then make sure your server is installed with a shell script like::
+
+ err=3
+ while test "$err" -eq 3 ; do
+ python server.py
+ err="$?"
+ done
+
+or is run from this .bat file (if you use Windows)::
+
+ @echo off
+ :repeat
+ python server.py
+ if %errorlevel% == 3 goto repeat
+
+or run a monitoring process in Python (``paster serve --reload`` does
+this).
+
+Use the ``watch_file(filename)`` function to cause a reload/restart for
+other other non-Python files (e.g., configuration files). If you have
+a dynamic set of files that grows over time you can use something like::
+
+ def watch_config_files():
+ return CONFIG_FILE_CACHE.keys()
+ paste.reloader.add_file_callback(watch_config_files)
+
+Then every time the reloader polls files it will call
+``watch_config_files`` and check all the filenames it returns.
+"""
+
+from __future__ import print_function
+import os
+import sys
+import time
+import threading
+import traceback
+from paste.util.classinstance import classinstancemethod
+
+def install(poll_interval=1):
+ """
+ Install the reloading monitor.
+
+ On some platforms server threads may not terminate when the main
+ thread does, causing ports to remain open/locked. The
+ ``raise_keyboard_interrupt`` option creates a unignorable signal
+ which causes the whole application to shut-down (rudely).
+ """
+ mon = Monitor(poll_interval=poll_interval)
+ t = threading.Thread(target=mon.periodic_reload)
+ t.setDaemon(True)
+ t.start()
+
+class Monitor(object):
+
+ instances = []
+ global_extra_files = []
+ global_file_callbacks = []
+
+ def __init__(self, poll_interval):
+ self.module_mtimes = {}
+ self.keep_running = True
+ self.poll_interval = poll_interval
+ self.extra_files = list(self.global_extra_files)
+ self.instances.append(self)
+ self.file_callbacks = list(self.global_file_callbacks)
+
+ def periodic_reload(self):
+ while True:
+ if not self.check_reload():
+ # use os._exit() here and not sys.exit() since within a
+ # thread sys.exit() just closes the given thread and
+ # won't kill the process; note os._exit does not call
+ # any atexit callbacks, nor does it do finally blocks,
+ # flush open files, etc. In otherwords, it is rude.
+ os._exit(3)
+ break
+ time.sleep(self.poll_interval)
+
+ def check_reload(self):
+ filenames = list(self.extra_files)
+ for file_callback in self.file_callbacks:
+ try:
+ filenames.extend(file_callback())
+ except:
+ print("Error calling paste.reloader callback %r:" % file_callback,
+ file=sys.stderr)
+ traceback.print_exc()
+ for module in sys.modules.values():
+ try:
+ filename = module.__file__
+ except (AttributeError, ImportError):
+ continue
+ if filename is not None:
+ filenames.append(filename)
+ for filename in filenames:
+ try:
+ stat = os.stat(filename)
+ if stat:
+ mtime = stat.st_mtime
+ else:
+ mtime = 0
+ except (OSError, IOError):
+ continue
+ if filename.endswith('.pyc') and os.path.exists(filename[:-1]):
+ mtime = max(os.stat(filename[:-1]).st_mtime, mtime)
+ elif filename.endswith('$py.class') and \
+ os.path.exists(filename[:-9] + '.py'):
+ mtime = max(os.stat(filename[:-9] + '.py').st_mtime, mtime)
+ if filename not in self.module_mtimes:
+ self.module_mtimes[filename] = mtime
+ elif self.module_mtimes[filename] < mtime:
+ print("%s changed; reloading..." % filename, file=sys.stderr)
+ return False
+ return True
+
+ def watch_file(self, cls, filename):
+ """Watch the named file for changes"""
+ filename = os.path.abspath(filename)
+ if self is None:
+ for instance in cls.instances:
+ instance.watch_file(filename)
+ cls.global_extra_files.append(filename)
+ else:
+ self.extra_files.append(filename)
+
+ watch_file = classinstancemethod(watch_file)
+
+ def add_file_callback(self, cls, callback):
+ """Add a callback -- a function that takes no parameters -- that will
+ return a list of filenames to watch for changes."""
+ if self is None:
+ for instance in cls.instances:
+ instance.add_file_callback(callback)
+ cls.global_file_callbacks.append(callback)
+ else:
+ self.file_callbacks.append(callback)
+
+ add_file_callback = classinstancemethod(add_file_callback)
+
+if sys.platform.startswith('java'):
+ try:
+ from _systemrestart import SystemRestart
+ except ImportError:
+ pass
+ else:
+ class JythonMonitor(Monitor):
+
+ """
+ Monitor that utilizes Jython's special
+ ``_systemrestart.SystemRestart`` exception.
+
+ When raised from the main thread it causes Jython to reload
+ the interpreter in the existing Java process (avoiding
+ startup time).
+
+ Note that this functionality of Jython is experimental and
+ may change in the future.
+ """
+
+ def periodic_reload(self):
+ while True:
+ if not self.check_reload():
+ raise SystemRestart()
+ time.sleep(self.poll_interval)
+
+watch_file = Monitor.watch_file
+add_file_callback = Monitor.add_file_callback
diff --git a/paste/request.py b/paste/request.py
new file mode 100644
index 0000000..8d5e5c3
--- /dev/null
+++ b/paste/request.py
@@ -0,0 +1,428 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Ian Bicking and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+This module provides helper routines with work directly on a WSGI
+environment to solve common requirements.
+
+ * get_cookies(environ)
+ * parse_querystring(environ)
+ * parse_formvars(environ, include_get_vars=True)
+ * construct_url(environ, with_query_string=True, with_path_info=True,
+ script_name=None, path_info=None, querystring=None)
+ * path_info_split(path_info)
+ * path_info_pop(environ)
+ * resolve_relative_url(url, environ)
+
+"""
+import cgi
+from six.moves.urllib import parse as urlparse
+from six.moves.urllib.parse import quote, parse_qsl
+try:
+ # Python 3
+ from http.cookies import SimpleCookie, CookieError
+except ImportError:
+ # Python 2
+ from Cookie import SimpleCookie, CookieError
+
+try:
+ from UserDict import DictMixin
+except ImportError:
+ from collections import MutableMapping as DictMixin
+import six
+
+from paste.util.multidict import MultiDict
+
+__all__ = ['get_cookies', 'get_cookie_dict', 'parse_querystring',
+ 'parse_formvars', 'construct_url', 'path_info_split',
+ 'path_info_pop', 'resolve_relative_url', 'EnvironHeaders']
+
+def get_cookies(environ):
+ """
+ Gets a cookie object (which is a dictionary-like object) from the
+ request environment; caches this value in case get_cookies is
+ called again for the same request.
+
+ """
+ header = environ.get('HTTP_COOKIE', '')
+ if 'paste.cookies' in environ:
+ cookies, check_header = environ['paste.cookies']
+ if check_header == header:
+ return cookies
+ cookies = SimpleCookie()
+ try:
+ cookies.load(header)
+ except CookieError:
+ pass
+ environ['paste.cookies'] = (cookies, header)
+ return cookies
+
+def get_cookie_dict(environ):
+ """Return a *plain* dictionary of cookies as found in the request.
+
+ Unlike ``get_cookies`` this returns a dictionary, not a
+ ``SimpleCookie`` object. For incoming cookies a dictionary fully
+ represents the information. Like ``get_cookies`` this caches and
+ checks the cache.
+ """
+ header = environ.get('HTTP_COOKIE')
+ if not header:
+ return {}
+ if 'paste.cookies.dict' in environ:
+ cookies, check_header = environ['paste.cookies.dict']
+ if check_header == header:
+ return cookies
+ cookies = SimpleCookie()
+ try:
+ cookies.load(header)
+ except CookieError:
+ pass
+ result = {}
+ for name in cookies:
+ result[name] = cookies[name].value
+ environ['paste.cookies.dict'] = (result, header)
+ return result
+
+def parse_querystring(environ):
+ """
+ Parses a query string into a list like ``[(name, value)]``.
+ Caches this value in case parse_querystring is called again
+ for the same request.
+
+ You can pass the result to ``dict()``, but be aware that keys that
+ appear multiple times will be lost (only the last value will be
+ preserved).
+
+ """
+ source = environ.get('QUERY_STRING', '')
+ if not source:
+ return []
+ if 'paste.parsed_querystring' in environ:
+ parsed, check_source = environ['paste.parsed_querystring']
+ if check_source == source:
+ return parsed
+ parsed = parse_qsl(source, keep_blank_values=True,
+ strict_parsing=False)
+ environ['paste.parsed_querystring'] = (parsed, source)
+ return parsed
+
+def parse_dict_querystring(environ):
+ """Parses a query string like parse_querystring, but returns a MultiDict
+
+ Caches this value in case parse_dict_querystring is called again
+ for the same request.
+
+ Example::
+
+ >>> environ = {'QUERY_STRING': 'day=Monday&user=fred&user=jane'}
+ >>> parsed = parse_dict_querystring(environ)
+
+ >>> parsed['day']
+ 'Monday'
+ >>> parsed['user']
+ 'fred'
+ >>> parsed.getall('user')
+ ['fred', 'jane']
+
+ """
+ source = environ.get('QUERY_STRING', '')
+ if not source:
+ return MultiDict()
+ if 'paste.parsed_dict_querystring' in environ:
+ parsed, check_source = environ['paste.parsed_dict_querystring']
+ if check_source == source:
+ return parsed
+ parsed = parse_qsl(source, keep_blank_values=True,
+ strict_parsing=False)
+ multi = MultiDict(parsed)
+ environ['paste.parsed_dict_querystring'] = (multi, source)
+ return multi
+
+def parse_formvars(environ, include_get_vars=True, encoding=None, errors=None):
+ """Parses the request, returning a MultiDict of form variables.
+
+ If ``include_get_vars`` is true then GET (query string) variables
+ will also be folded into the MultiDict.
+
+ All values should be strings, except for file uploads which are
+ left as ``FieldStorage`` instances.
+
+ If the request was not a normal form request (e.g., a POST with an
+ XML body) then ``environ['wsgi.input']`` won't be read.
+ """
+ source = environ['wsgi.input']
+ if 'paste.parsed_formvars' in environ:
+ parsed, check_source = environ['paste.parsed_formvars']
+ if check_source == source:
+ if include_get_vars:
+ parsed.update(parse_querystring(environ))
+ return parsed
+ # @@: Shouldn't bother FieldStorage parsing during GET/HEAD and
+ # fake_out_cgi requests
+ type = environ.get('CONTENT_TYPE', '').lower()
+ if ';' in type:
+ type = type.split(';', 1)[0]
+ fake_out_cgi = type not in ('', 'application/x-www-form-urlencoded',
+ 'multipart/form-data')
+ # FieldStorage assumes a default CONTENT_LENGTH of -1, but a
+ # default of 0 is better:
+ if not environ.get('CONTENT_LENGTH'):
+ environ['CONTENT_LENGTH'] = '0'
+ # Prevent FieldStorage from parsing QUERY_STRING during GET/HEAD
+ # requests
+ old_query_string = environ.get('QUERY_STRING','')
+ environ['QUERY_STRING'] = ''
+ if fake_out_cgi:
+ input = six.BytesIO(b'')
+ old_content_type = environ.get('CONTENT_TYPE')
+ old_content_length = environ.get('CONTENT_LENGTH')
+ environ['CONTENT_LENGTH'] = '0'
+ environ['CONTENT_TYPE'] = ''
+ else:
+ input = environ['wsgi.input']
+ kwparms = {}
+ if six.PY3:
+ if encoding:
+ kwparms['encoding'] = encoding
+ if errors:
+ kwparms['errors'] = errors
+ fs = cgi.FieldStorage(fp=input,
+ environ=environ,
+ keep_blank_values=1,
+ **kwparms)
+ environ['QUERY_STRING'] = old_query_string
+ if fake_out_cgi:
+ environ['CONTENT_TYPE'] = old_content_type
+ environ['CONTENT_LENGTH'] = old_content_length
+ formvars = MultiDict()
+ if isinstance(fs.value, list):
+ for name in fs.keys():
+ values = fs[name]
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if not value.filename:
+ value = value.value
+ formvars.add(name, value)
+ environ['paste.parsed_formvars'] = (formvars, source)
+ if include_get_vars:
+ formvars.update(parse_querystring(environ))
+ return formvars
+
+def construct_url(environ, with_query_string=True, with_path_info=True,
+ script_name=None, path_info=None, querystring=None):
+ """Reconstructs the URL from the WSGI environment.
+
+ You may override SCRIPT_NAME, PATH_INFO, and QUERYSTRING with
+ the keyword arguments.
+
+ """
+ url = environ['wsgi.url_scheme']+'://'
+
+ if environ.get('HTTP_HOST'):
+ host = environ['HTTP_HOST']
+ port = None
+ if ':' in host:
+ host, port = host.split(':', 1)
+ if environ['wsgi.url_scheme'] == 'https':
+ if port == '443':
+ port = None
+ elif environ['wsgi.url_scheme'] == 'http':
+ if port == '80':
+ port = None
+ url += host
+ if port:
+ url += ':%s' % port
+ else:
+ url += environ['SERVER_NAME']
+ if environ['wsgi.url_scheme'] == 'https':
+ if environ['SERVER_PORT'] != '443':
+ url += ':' + environ['SERVER_PORT']
+ else:
+ if environ['SERVER_PORT'] != '80':
+ url += ':' + environ['SERVER_PORT']
+
+ if script_name is None:
+ url += quote(environ.get('SCRIPT_NAME',''))
+ else:
+ url += quote(script_name)
+ if with_path_info:
+ if path_info is None:
+ url += quote(environ.get('PATH_INFO',''))
+ else:
+ url += quote(path_info)
+ if with_query_string:
+ if querystring is None:
+ if environ.get('QUERY_STRING'):
+ url += '?' + environ['QUERY_STRING']
+ elif querystring:
+ url += '?' + querystring
+ return url
+
+def resolve_relative_url(url, environ):
+ """
+ Resolve the given relative URL as being relative to the
+ location represented by the environment. This can be used
+ for redirecting to a relative path. Note: if url is already
+ absolute, this function will (intentionally) have no effect
+ on it.
+
+ """
+ cur_url = construct_url(environ, with_query_string=False)
+ return urlparse.urljoin(cur_url, url)
+
+def path_info_split(path_info):
+ """
+ Splits off the first segment of the path. Returns (first_part,
+ rest_of_path). first_part can be None (if PATH_INFO is empty), ''
+ (if PATH_INFO is '/'), or a name without any /'s. rest_of_path
+ can be '' or a string starting with /.
+
+ """
+ if not path_info:
+ return None, ''
+ assert path_info.startswith('/'), (
+ "PATH_INFO should start with /: %r" % path_info)
+ path_info = path_info.lstrip('/')
+ if '/' in path_info:
+ first, rest = path_info.split('/', 1)
+ return first, '/' + rest
+ else:
+ return path_info, ''
+
+def path_info_pop(environ):
+ """
+ 'Pops' off the next segment of PATH_INFO, pushing it onto
+ SCRIPT_NAME, and returning that segment.
+
+ For instance::
+
+ >>> def call_it(script_name, path_info):
+ ... env = {'SCRIPT_NAME': script_name, 'PATH_INFO': path_info}
+ ... result = path_info_pop(env)
+ ... print('SCRIPT_NAME=%r; PATH_INFO=%r; returns=%r' % (
+ ... env['SCRIPT_NAME'], env['PATH_INFO'], result))
+ >>> call_it('/foo', '/bar')
+ SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns='bar'
+ >>> call_it('/foo/bar', '')
+ SCRIPT_NAME='/foo/bar'; PATH_INFO=''; returns=None
+ >>> call_it('/foo/bar', '/')
+ SCRIPT_NAME='/foo/bar/'; PATH_INFO=''; returns=''
+ >>> call_it('', '/1/2/3')
+ SCRIPT_NAME='/1'; PATH_INFO='/2/3'; returns='1'
+ >>> call_it('', '//1/2')
+ SCRIPT_NAME='//1'; PATH_INFO='/2'; returns='1'
+
+ """
+ path = environ.get('PATH_INFO', '')
+ if not path:
+ return None
+ while path.startswith('/'):
+ environ['SCRIPT_NAME'] += '/'
+ path = path[1:]
+ if '/' not in path:
+ environ['SCRIPT_NAME'] += path
+ environ['PATH_INFO'] = ''
+ return path
+ else:
+ segment, path = path.split('/', 1)
+ environ['PATH_INFO'] = '/' + path
+ environ['SCRIPT_NAME'] += segment
+ return segment
+
+_parse_headers_special = {
+ # This is a Zope convention, but we'll allow it here:
+ 'HTTP_CGI_AUTHORIZATION': 'Authorization',
+ 'CONTENT_LENGTH': 'Content-Length',
+ 'CONTENT_TYPE': 'Content-Type',
+ }
+
+def parse_headers(environ):
+ """
+ Parse the headers in the environment (like ``HTTP_HOST``) and
+ yield a sequence of those (header_name, value) tuples.
+ """
+ # @@: Maybe should parse out comma-separated headers?
+ for cgi_var, value in environ.iteritems():
+ if cgi_var in _parse_headers_special:
+ yield _parse_headers_special[cgi_var], value
+ elif cgi_var.startswith('HTTP_'):
+ yield cgi_var[5:].title().replace('_', '-'), value
+
+class EnvironHeaders(DictMixin):
+ """An object that represents the headers as present in a
+ WSGI environment.
+
+ This object is a wrapper (with no internal state) for a WSGI
+ request object, representing the CGI-style HTTP_* keys as a
+ dictionary. Because a CGI environment can only hold one value for
+ each key, this dictionary is single-valued (unlike outgoing
+ headers).
+ """
+
+ def __init__(self, environ):
+ self.environ = environ
+
+ def _trans_name(self, name):
+ key = 'HTTP_'+name.replace('-', '_').upper()
+ if key == 'HTTP_CONTENT_LENGTH':
+ key = 'CONTENT_LENGTH'
+ elif key == 'HTTP_CONTENT_TYPE':
+ key = 'CONTENT_TYPE'
+ return key
+
+ def _trans_key(self, key):
+ if key == 'CONTENT_TYPE':
+ return 'Content-Type'
+ elif key == 'CONTENT_LENGTH':
+ return 'Content-Length'
+ elif key.startswith('HTTP_'):
+ return key[5:].replace('_', '-').title()
+ else:
+ return None
+
+ def __len__(self):
+ return len(self.environ)
+
+ def __getitem__(self, item):
+ return self.environ[self._trans_name(item)]
+
+ def __setitem__(self, item, value):
+ # @@: Should this dictionary be writable at all?
+ self.environ[self._trans_name(item)] = value
+
+ def __delitem__(self, item):
+ del self.environ[self._trans_name(item)]
+
+ def __iter__(self):
+ for key in self.environ:
+ name = self._trans_key(key)
+ if name is not None:
+ yield name
+
+ def keys(self):
+ return list(iter(self))
+
+ def __contains__(self, item):
+ return self._trans_name(item) in self.environ
+
+def _cgi_FieldStorage__repr__patch(self):
+ """ monkey patch for FieldStorage.__repr__
+
+ Unbelievely, the default __repr__ on FieldStorage reads
+ the entire file content instead of being sane about it.
+ This is a simple replacement that doesn't do that
+ """
+ if self.file:
+ return "FieldStorage(%r, %r)" % (
+ self.name, self.filename)
+ return "FieldStorage(%r, %r, %r)" % (
+ self.name, self.filename, self.value)
+
+cgi.FieldStorage.__repr__ = _cgi_FieldStorage__repr__patch
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/paste/response.py b/paste/response.py
new file mode 100644
index 0000000..5ce0320
--- /dev/null
+++ b/paste/response.py
@@ -0,0 +1,240 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""Routines to generate WSGI responses"""
+
+############################################################
+## Headers
+############################################################
+import warnings
+
+class HeaderDict(dict):
+
+ """
+ This represents response headers. It handles the headers as a
+ dictionary, with case-insensitive keys.
+
+ Also there is an ``.add(key, value)`` method, which sets the key,
+ or adds the value to the current value (turning it into a list if
+ necessary).
+
+ For passing to WSGI there is a ``.headeritems()`` method which is
+ like ``.items()`` but unpacks value that are lists. It also
+ handles encoding -- all headers are encoded in ASCII (if they are
+ unicode).
+
+ @@: Should that encoding be ISO-8859-1 or UTF-8? I'm not sure
+ what the spec says.
+ """
+
+ def __getitem__(self, key):
+ return dict.__getitem__(self, self.normalize(key))
+
+ def __setitem__(self, key, value):
+ dict.__setitem__(self, self.normalize(key), value)
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, self.normalize(key))
+
+ def __contains__(self, key):
+ return dict.__contains__(self, self.normalize(key))
+
+ has_key = __contains__
+
+ def get(self, key, failobj=None):
+ return dict.get(self, self.normalize(key), failobj)
+
+ def setdefault(self, key, failobj=None):
+ return dict.setdefault(self, self.normalize(key), failobj)
+
+ def pop(self, key, *args):
+ return dict.pop(self, self.normalize(key), *args)
+
+ def update(self, other):
+ for key in other:
+ self[self.normalize(key)] = other[key]
+
+ def normalize(self, key):
+ return str(key).lower().strip()
+
+ def add(self, key, value):
+ key = self.normalize(key)
+ if key in self:
+ if isinstance(self[key], list):
+ self[key].append(value)
+ else:
+ self[key] = [self[key], value]
+ else:
+ self[key] = value
+
+ def headeritems(self):
+ result = []
+ for key, value in self.items():
+ if isinstance(value, list):
+ for v in value:
+ result.append((key, str(v)))
+ else:
+ result.append((key, str(value)))
+ return result
+
+ #@classmethod
+ def fromlist(cls, seq):
+ self = cls()
+ for name, value in seq:
+ self.add(name, value)
+ return self
+
+ fromlist = classmethod(fromlist)
+
+def has_header(headers, name):
+ """
+ Is header named ``name`` present in headers?
+ """
+ name = name.lower()
+ for header, value in headers:
+ if header.lower() == name:
+ return True
+ return False
+
+def header_value(headers, name):
+ """
+ Returns the header's value, or None if no such header. If a
+ header appears more than once, all the values of the headers
+ are joined with ','. Note that this is consistent /w RFC 2616
+ section 4.2 which states:
+
+ It MUST be possible to combine the multiple header fields
+ into one "field-name: field-value" pair, without changing
+ the semantics of the message, by appending each subsequent
+ field-value to the first, each separated by a comma.
+
+ However, note that the original netscape usage of 'Set-Cookie',
+ especially in MSIE which contains an 'expires' date will is not
+ compatible with this particular concatination method.
+ """
+ name = name.lower()
+ result = [value for header, value in headers
+ if header.lower() == name]
+ if result:
+ return ','.join(result)
+ else:
+ return None
+
+def remove_header(headers, name):
+ """
+ Removes the named header from the list of headers. Returns the
+ value of that header, or None if no header found. If multiple
+ headers are found, only the last one is returned.
+ """
+ name = name.lower()
+ i = 0
+ result = None
+ while i < len(headers):
+ if headers[i][0].lower() == name:
+ result = headers[i][1]
+ del headers[i]
+ continue
+ i += 1
+ return result
+
+def replace_header(headers, name, value):
+ """
+ Updates the headers replacing the first occurance of the given name
+ with the value provided; asserting that no further occurances
+ happen. Note that this is _not_ the same as remove_header and then
+ append, as two distinct operations (del followed by an append) are
+ not atomic in a threaded environment. Returns the previous header
+ value for the provided name, if any. Clearly one should not use
+ this function with ``set-cookie`` or other names that may have more
+ than one occurance in the headers.
+ """
+ name = name.lower()
+ i = 0
+ result = None
+ while i < len(headers):
+ if headers[i][0].lower() == name:
+ assert not result, "two values for the header '%s' found" % name
+ result = headers[i][1]
+ headers[i] = (name, value)
+ i += 1
+ if not result:
+ headers.append((name, value))
+ return result
+
+
+############################################################
+## Deprecated methods
+############################################################
+
+def error_body_response(error_code, message, __warn=True):
+ """
+ Returns a standard HTML response page for an HTTP error.
+ **Note:** Deprecated
+ """
+ if __warn:
+ warnings.warn(
+ 'wsgilib.error_body_response is deprecated; use the '
+ 'wsgi_application method on an HTTPException object '
+ 'instead', DeprecationWarning, 2)
+ return '''\
+<html>
+ <head>
+ <title>%(error_code)s</title>
+ </head>
+ <body>
+ <h1>%(error_code)s</h1>
+ %(message)s
+ </body>
+</html>''' % {
+ 'error_code': error_code,
+ 'message': message,
+ }
+
+
+def error_response(environ, error_code, message,
+ debug_message=None, __warn=True):
+ """
+ Returns the status, headers, and body of an error response.
+
+ Use like:
+
+ .. code-block:: python
+
+ status, headers, body = wsgilib.error_response(
+ '301 Moved Permanently', 'Moved to <a href="%s">%s</a>'
+ % (url, url))
+ start_response(status, headers)
+ return [body]
+
+ **Note:** Deprecated
+ """
+ if __warn:
+ warnings.warn(
+ 'wsgilib.error_response is deprecated; use the '
+ 'wsgi_application method on an HTTPException object '
+ 'instead', DeprecationWarning, 2)
+ if debug_message and environ.get('paste.config', {}).get('debug'):
+ message += '\n\n<!-- %s -->' % debug_message
+ body = error_body_response(error_code, message, __warn=False)
+ headers = [('content-type', 'text/html'),
+ ('content-length', str(len(body)))]
+ return error_code, headers, body
+
+def error_response_app(error_code, message, debug_message=None,
+ __warn=True):
+ """
+ An application that emits the given error response.
+
+ **Note:** Deprecated
+ """
+ if __warn:
+ warnings.warn(
+ 'wsgilib.error_response_app is deprecated; use the '
+ 'wsgi_application method on an HTTPException object '
+ 'instead', DeprecationWarning, 2)
+ def application(environ, start_response):
+ status, headers, body = error_response(
+ environ, error_code, message,
+ debug_message=debug_message, __warn=False)
+ start_response(status, headers)
+ return [body]
+ return application
diff --git a/paste/session.py b/paste/session.py
new file mode 100644
index 0000000..ae208e7
--- /dev/null
+++ b/paste/session.py
@@ -0,0 +1,346 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+Creates a session object in your WSGI environment.
+
+Use like:
+
+..code-block:: Python
+
+ environ['paste.session.factory']()
+
+This will return a dictionary. The contents of this dictionary will
+be saved to disk when the request is completed. The session will be
+created when you first fetch the session dictionary, and a cookie will
+be sent in that case. There's current no way to use sessions without
+cookies, and there's no way to delete a session except to clear its
+data.
+
+@@: This doesn't do any locking, and may cause problems when a single
+session is accessed concurrently. Also, it loads and saves the
+session for each request, with no caching. Also, sessions aren't
+expired.
+"""
+
+try:
+ # Python 3
+ from http.cookies import SimpleCookie
+except ImportError:
+ # Python 2
+ from Cookie import SimpleCookie
+import time
+import random
+import os
+import datetime
+import six
+import threading
+import tempfile
+
+try:
+ import cPickle
+except ImportError:
+ import pickle as cPickle
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+from paste import wsgilib
+from paste import request
+
+class SessionMiddleware(object):
+
+ def __init__(self, application, global_conf=None, **factory_kw):
+ self.application = application
+ self.factory_kw = factory_kw
+
+ def __call__(self, environ, start_response):
+ session_factory = SessionFactory(environ, **self.factory_kw)
+ environ['paste.session.factory'] = session_factory
+ remember_headers = []
+
+ def session_start_response(status, headers, exc_info=None):
+ if not session_factory.created:
+ remember_headers[:] = [status, headers]
+ return start_response(status, headers)
+ headers.append(session_factory.set_cookie_header())
+ return start_response(status, headers, exc_info)
+
+ app_iter = self.application(environ, session_start_response)
+ def start():
+ if session_factory.created and remember_headers:
+ # Tricky bastard used the session after start_response
+ status, headers = remember_headers
+ headers.append(session_factory.set_cookie_header())
+ exc = ValueError(
+ "You cannot get the session after content from the "
+ "app_iter has been returned")
+ start_response(status, headers, (exc.__class__, exc, None))
+ def close():
+ if session_factory.used:
+ session_factory.close()
+ return wsgilib.add_start_close(app_iter, start, close)
+
+
+class SessionFactory(object):
+
+
+ def __init__(self, environ, cookie_name='_SID_',
+ session_class=None,
+ session_expiration=60*12, # in minutes
+ **session_class_kw):
+
+ self.created = False
+ self.used = False
+ self.environ = environ
+ self.cookie_name = cookie_name
+ self.session = None
+ self.session_class = session_class or FileSession
+ self.session_class_kw = session_class_kw
+
+ self.expiration = session_expiration
+
+ def __call__(self):
+ self.used = True
+ if self.session is not None:
+ return self.session.data()
+ cookies = request.get_cookies(self.environ)
+ session = None
+ if self.cookie_name in cookies:
+ self.sid = cookies[self.cookie_name].value
+ try:
+ session = self.session_class(self.sid, create=False,
+ **self.session_class_kw)
+ except KeyError:
+ # Invalid SID
+ pass
+ if session is None:
+ self.created = True
+ self.sid = self.make_sid()
+ session = self.session_class(self.sid, create=True,
+ **self.session_class_kw)
+ session.clean_up()
+ self.session = session
+ return session.data()
+
+ def has_session(self):
+ if self.session is not None:
+ return True
+ cookies = request.get_cookies(self.environ)
+ if cookies.has_key(self.cookie_name):
+ return True
+ return False
+
+ def make_sid(self):
+ # @@: need better algorithm
+ return (''.join(['%02d' % x for x in time.localtime(time.time())[:6]])
+ + '-' + self.unique_id())
+
+ def unique_id(self, for_object=None):
+ """
+ Generates an opaque, identifier string that is practically
+ guaranteed to be unique. If an object is passed, then its
+ id() is incorporated into the generation. Relies on md5 and
+ returns a 32 character long string.
+ """
+ r = [time.time(), random.random()]
+ if hasattr(os, 'times'):
+ r.append(os.times())
+ if for_object is not None:
+ r.append(id(for_object))
+ content = str(r)
+ if six.PY3:
+ content = content.encode('utf8')
+ md5_hash = md5(content)
+ try:
+ return md5_hash.hexdigest()
+ except AttributeError:
+ # Older versions of Python didn't have hexdigest, so we'll
+ # do it manually
+ hexdigest = []
+ for char in md5_hash.digest():
+ hexdigest.append('%02x' % ord(char))
+ return ''.join(hexdigest)
+
+ def set_cookie_header(self):
+ c = SimpleCookie()
+ c[self.cookie_name] = self.sid
+ c[self.cookie_name]['path'] = '/'
+
+ gmt_expiration_time = time.gmtime(time.time() + (self.expiration * 60))
+ c[self.cookie_name]['expires'] = time.strftime("%a, %d-%b-%Y %H:%M:%S GMT", gmt_expiration_time)
+
+ name, value = str(c).split(': ', 1)
+ return (name, value)
+
+ def close(self):
+ if self.session is not None:
+ self.session.close()
+
+
+last_cleanup = None
+cleaning_up = False
+cleanup_cycle = datetime.timedelta(seconds=15*60) #15 min
+
+class FileSession(object):
+
+ def __init__(self, sid, create=False, session_file_path=tempfile.gettempdir(),
+ chmod=None,
+ expiration=2880, # in minutes: 48 hours
+ ):
+ if chmod and isinstance(chmod, (six.binary_type, six.text_type)):
+ chmod = int(chmod, 8)
+ self.chmod = chmod
+ if not sid:
+ # Invalid...
+ raise KeyError
+ self.session_file_path = session_file_path
+ self.sid = sid
+ if not create:
+ if not os.path.exists(self.filename()):
+ raise KeyError
+ self._data = None
+
+ self.expiration = expiration
+
+
+ def filename(self):
+ return os.path.join(self.session_file_path, self.sid)
+
+ def data(self):
+ if self._data is not None:
+ return self._data
+ if os.path.exists(self.filename()):
+ f = open(self.filename(), 'rb')
+ self._data = cPickle.load(f)
+ f.close()
+ else:
+ self._data = {}
+ return self._data
+
+ def close(self):
+ if self._data is not None:
+ filename = self.filename()
+ exists = os.path.exists(filename)
+ if not self._data:
+ if exists:
+ os.unlink(filename)
+ else:
+ f = open(filename, 'wb')
+ cPickle.dump(self._data, f)
+ f.close()
+ if not exists and self.chmod:
+ os.chmod(filename, self.chmod)
+
+ def _clean_up(self):
+ global cleaning_up
+ try:
+ exp_time = datetime.timedelta(seconds=self.expiration*60)
+ now = datetime.datetime.now()
+
+ #Open every session and check that it isn't too old
+ for root, dirs, files in os.walk(self.session_file_path):
+ for f in files:
+ self._clean_up_file(f, exp_time=exp_time, now=now)
+ finally:
+ cleaning_up = False
+
+ def _clean_up_file(self, f, exp_time, now):
+ t = f.split("-")
+ if len(t) != 2:
+ return
+ t = t[0]
+ try:
+ sess_time = datetime.datetime(
+ int(t[0:4]),
+ int(t[4:6]),
+ int(t[6:8]),
+ int(t[8:10]),
+ int(t[10:12]),
+ int(t[12:14]))
+ except ValueError:
+ # Probably not a session file at all
+ return
+
+ if sess_time + exp_time < now:
+ os.remove(os.path.join(self.session_file_path, f))
+
+ def clean_up(self):
+ global last_cleanup, cleanup_cycle, cleaning_up
+ now = datetime.datetime.now()
+
+ if cleaning_up:
+ return
+
+ if not last_cleanup or last_cleanup + cleanup_cycle < now:
+ if not cleaning_up:
+ cleaning_up = True
+ try:
+ last_cleanup = now
+ t = threading.Thread(target=self._clean_up)
+ t.start()
+ except:
+ # Normally _clean_up should set cleaning_up
+ # to false, but if something goes wrong starting
+ # it...
+ cleaning_up = False
+ raise
+
+class _NoDefault(object):
+ def __repr__(self):
+ return '<dynamic default>'
+NoDefault = _NoDefault()
+
+def make_session_middleware(
+ app, global_conf,
+ session_expiration=NoDefault,
+ expiration=NoDefault,
+ cookie_name=NoDefault,
+ session_file_path=NoDefault,
+ chmod=NoDefault):
+ """
+ Adds a middleware that handles sessions for your applications.
+ The session is a peristent dictionary. To get this dictionary
+ in your application, use ``environ['paste.session.factory']()``
+ which returns this persistent dictionary.
+
+ Configuration:
+
+ session_expiration:
+ The time each session lives, in minutes. This controls
+ the cookie expiration. Default 12 hours.
+
+ expiration:
+ The time each session lives on disk. Old sessions are
+ culled from disk based on this. Default 48 hours.
+
+ cookie_name:
+ The cookie name used to track the session. Use different
+ names to avoid session clashes.
+
+ session_file_path:
+ Sessions are put in this location, default /tmp.
+
+ chmod:
+ The octal chmod you want to apply to new sessions (e.g., 660
+ to make the sessions group readable/writable)
+
+ Each of these also takes from the global configuration. cookie_name
+ and chmod take from session_cookie_name and session_chmod
+ """
+ if session_expiration is NoDefault:
+ session_expiration = global_conf.get('session_expiration', 60*12)
+ session_expiration = int(session_expiration)
+ if expiration is NoDefault:
+ expiration = global_conf.get('expiration', 60*48)
+ expiration = int(expiration)
+ if cookie_name is NoDefault:
+ cookie_name = global_conf.get('session_cookie_name', '_SID_')
+ if session_file_path is NoDefault:
+ session_file_path = global_conf.get('session_file_path', '/tmp')
+ if chmod is NoDefault:
+ chmod = global_conf.get('session_chmod', None)
+ return SessionMiddleware(
+ app, session_expiration=session_expiration,
+ expiration=expiration, cookie_name=cookie_name,
+ session_file_path=session_file_path, chmod=chmod)
diff --git a/paste/transaction.py b/paste/transaction.py
new file mode 100644
index 0000000..1347acd
--- /dev/null
+++ b/paste/transaction.py
@@ -0,0 +1,120 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware related to transactions and database connections.
+
+At this time it is very basic; but will eventually sprout all that
+two-phase commit goodness that I don't need.
+
+.. note::
+
+ This is experimental, and will change in the future.
+"""
+from paste.httpexceptions import HTTPException
+from wsgilib import catch_errors
+
+class TransactionManagerMiddleware(object):
+
+ def __init__(self, application):
+ self.application = application
+
+ def __call__(self, environ, start_response):
+ environ['paste.transaction_manager'] = manager = Manager()
+ # This makes sure nothing else traps unexpected exceptions:
+ environ['paste.throw_errors'] = True
+ return catch_errors(self.application, environ, start_response,
+ error_callback=manager.error,
+ ok_callback=manager.finish)
+
+class Manager(object):
+
+ def __init__(self):
+ self.aborted = False
+ self.transactions = []
+
+ def abort(self):
+ self.aborted = True
+
+ def error(self, exc_info):
+ self.aborted = True
+ self.finish()
+
+ def finish(self):
+ for trans in self.transactions:
+ if self.aborted:
+ trans.rollback()
+ else:
+ trans.commit()
+
+
+class ConnectionFactory(object):
+ """
+ Provides a callable interface for connecting to ADBAPI databases in
+ a WSGI style (using the environment). More advanced connection
+ factories might use the REMOTE_USER and/or other environment
+ variables to make the connection returned depend upon the request.
+ """
+ def __init__(self, module, *args, **kwargs):
+ #assert getattr(module,'threadsaftey',0) > 0
+ self.module = module
+ self.args = args
+ self.kwargs = kwargs
+
+ # deal with database string quoting issues
+ self.quote = lambda s: "'%s'" % s.replace("'","''")
+ if hasattr(self.module,'PgQuoteString'):
+ self.quote = self.module.PgQuoteString
+
+ def __call__(self, environ=None):
+ conn = self.module.connect(*self.args, **self.kwargs)
+ conn.__dict__['module'] = self.module
+ conn.__dict__['quote'] = self.quote
+ return conn
+
+def BasicTransactionHandler(application, factory):
+ """
+ Provides a simple mechanism for starting a transaction based on the
+ factory; and for either committing or rolling back the transaction
+ depending on the result. It checks for the response's current
+ status code either through the latest call to start_response; or
+ through a HTTPException's code. If it is a 100, 200, or 300; the
+ transaction is committed; otherwise it is rolled back.
+ """
+ def basic_transaction(environ, start_response):
+ conn = factory(environ)
+ environ['paste.connection'] = conn
+ should_commit = [500]
+ def finalizer(exc_info=None):
+ if exc_info:
+ if isinstance(exc_info[1], HTTPException):
+ should_commit.append(exc_info[1].code)
+ if should_commit.pop() < 400:
+ conn.commit()
+ else:
+ try:
+ conn.rollback()
+ except:
+ # TODO: check if rollback has already happened
+ return
+ conn.close()
+ def basictrans_start_response(status, headers, exc_info = None):
+ should_commit.append(int(status.split(" ")[0]))
+ return start_response(status, headers, exc_info)
+ return catch_errors(application, environ, basictrans_start_response,
+ finalizer, finalizer)
+ return basic_transaction
+
+__all__ = ['ConnectionFactory', 'BasicTransactionHandler']
+
+if '__main__' == __name__ and False:
+ from pyPgSQL import PgSQL
+ factory = ConnectionFactory(PgSQL, database="testing")
+ conn = factory()
+ curr = conn.cursor()
+ curr.execute("SELECT now(), %s" % conn.quote("B'n\\'gles"))
+ (time, bing) = curr.fetchone()
+ print(bing, time)
+
diff --git a/paste/translogger.py b/paste/translogger.py
new file mode 100644
index 0000000..794efd8
--- /dev/null
+++ b/paste/translogger.py
@@ -0,0 +1,122 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Middleware for logging requests, using Apache combined log format
+"""
+
+import logging
+import six
+import time
+from six.moves.urllib.parse import quote
+
+class TransLogger(object):
+ """
+ This logging middleware will log all requests as they go through.
+ They are, by default, sent to a logger named ``'wsgi'`` at the
+ INFO level.
+
+ If ``setup_console_handler`` is true, then messages for the named
+ logger will be sent to the console.
+ """
+
+ format = ('%(REMOTE_ADDR)s - %(REMOTE_USER)s [%(time)s] '
+ '"%(REQUEST_METHOD)s %(REQUEST_URI)s %(HTTP_VERSION)s" '
+ '%(status)s %(bytes)s "%(HTTP_REFERER)s" "%(HTTP_USER_AGENT)s"')
+
+ def __init__(self, application,
+ logger=None,
+ format=None,
+ logging_level=logging.INFO,
+ logger_name='wsgi',
+ setup_console_handler=True,
+ set_logger_level=logging.DEBUG):
+ if format is not None:
+ self.format = format
+ self.application = application
+ self.logging_level = logging_level
+ self.logger_name = logger_name
+ if logger is None:
+ self.logger = logging.getLogger(self.logger_name)
+ if setup_console_handler:
+ console = logging.StreamHandler()
+ console.setLevel(logging.DEBUG)
+ # We need to control the exact format:
+ console.setFormatter(logging.Formatter('%(message)s'))
+ self.logger.addHandler(console)
+ self.logger.propagate = False
+ if set_logger_level is not None:
+ self.logger.setLevel(set_logger_level)
+ else:
+ self.logger = logger
+
+ def __call__(self, environ, start_response):
+ start = time.localtime()
+ req_uri = quote(environ.get('SCRIPT_NAME', '')
+ + environ.get('PATH_INFO', ''))
+ if environ.get('QUERY_STRING'):
+ req_uri += '?'+environ['QUERY_STRING']
+ method = environ['REQUEST_METHOD']
+ def replacement_start_response(status, headers, exc_info=None):
+ # @@: Ideally we would count the bytes going by if no
+ # content-length header was provided; but that does add
+ # some overhead, so at least for now we'll be lazy.
+ bytes = None
+ for name, value in headers:
+ if name.lower() == 'content-length':
+ bytes = value
+ self.write_log(environ, method, req_uri, start, status, bytes)
+ return start_response(status, headers)
+ return self.application(environ, replacement_start_response)
+
+ def write_log(self, environ, method, req_uri, start, status, bytes):
+ if bytes is None:
+ bytes = '-'
+ if time.daylight:
+ offset = time.altzone / 60 / 60 * -100
+ else:
+ offset = time.timezone / 60 / 60 * -100
+ if offset >= 0:
+ offset = "+%0.4d" % (offset)
+ elif offset < 0:
+ offset = "%0.4d" % (offset)
+ remote_addr = '-'
+ if environ.get('HTTP_X_FORWARDED_FOR'):
+ remote_addr = environ['HTTP_X_FORWARDED_FOR']
+ elif environ.get('REMOTE_ADDR'):
+ remote_addr = environ['REMOTE_ADDR']
+ d = {
+ 'REMOTE_ADDR': remote_addr,
+ 'REMOTE_USER': environ.get('REMOTE_USER') or '-',
+ 'REQUEST_METHOD': method,
+ 'REQUEST_URI': req_uri,
+ 'HTTP_VERSION': environ.get('SERVER_PROTOCOL'),
+ 'time': time.strftime('%d/%b/%Y:%H:%M:%S ', start) + offset,
+ 'status': status.split(None, 1)[0],
+ 'bytes': bytes,
+ 'HTTP_REFERER': environ.get('HTTP_REFERER', '-'),
+ 'HTTP_USER_AGENT': environ.get('HTTP_USER_AGENT', '-'),
+ }
+ message = self.format % d
+ self.logger.log(self.logging_level, message)
+
+def make_filter(
+ app, global_conf,
+ logger_name='wsgi',
+ format=None,
+ logging_level=logging.INFO,
+ setup_console_handler=True,
+ set_logger_level=logging.DEBUG):
+ from paste.util.converters import asbool
+ if isinstance(logging_level, (six.binary_type, six.text_type)):
+ logging_level = logging._levelNames[logging_level]
+ if isinstance(set_logger_level, (six.binary_type, six.text_type)):
+ set_logger_level = logging._levelNames[set_logger_level]
+ return TransLogger(
+ app,
+ format=format or None,
+ logging_level=logging_level,
+ logger_name=logger_name,
+ setup_console_handler=asbool(setup_console_handler),
+ set_logger_level=set_logger_level)
+
+make_filter.__doc__ = TransLogger.__doc__
diff --git a/paste/url.py b/paste/url.py
new file mode 100644
index 0000000..fb08d6d
--- /dev/null
+++ b/paste/url.py
@@ -0,0 +1,478 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+This module implements a class for handling URLs.
+"""
+from six.moves.urllib.parse import parse_qsl, quote, unquote, urlencode
+import cgi
+from paste import request
+import six
+
+# Imported lazily from FormEncode:
+variabledecode = None
+
+__all__ = ["URL", "Image"]
+
+def html_quote(v):
+ if v is None:
+ return ''
+ return cgi.escape(str(v), 1)
+
+def url_quote(v):
+ if v is None:
+ return ''
+ return quote(str(v))
+
+def js_repr(v):
+ if v is None:
+ return 'null'
+ elif v is False:
+ return 'false'
+ elif v is True:
+ return 'true'
+ elif isinstance(v, list):
+ return '[%s]' % ', '.join(map(js_repr, v))
+ elif isinstance(v, dict):
+ return '{%s}' % ', '.join(
+ ['%s: %s' % (js_repr(key), js_repr(value))
+ for key, value in v])
+ elif isinstance(v, str):
+ return repr(v)
+ elif isinstance(v, unicode):
+ # @@: how do you do Unicode literals in Javascript?
+ return repr(v.encode('UTF-8'))
+ elif isinstance(v, (float, int)):
+ return repr(v)
+ elif isinstance(v, long):
+ return repr(v).lstrip('L')
+ elif hasattr(v, '__js_repr__'):
+ return v.__js_repr__()
+ else:
+ raise ValueError(
+ "I don't know how to turn %r into a Javascript representation"
+ % v)
+
+class URLResource(object):
+
+ """
+ This is an abstract superclass for different kinds of URLs
+ """
+
+ default_params = {}
+
+ def __init__(self, url, vars=None, attrs=None,
+ params=None):
+ self.url = url or '/'
+ self.vars = vars or []
+ self.attrs = attrs or {}
+ self.params = self.default_params.copy()
+ self.original_params = params or {}
+ if params:
+ self.params.update(params)
+
+ #@classmethod
+ def from_environ(cls, environ, with_query_string=True,
+ with_path_info=True, script_name=None,
+ path_info=None, querystring=None):
+ url = request.construct_url(
+ environ, with_query_string=False,
+ with_path_info=with_path_info, script_name=script_name,
+ path_info=path_info)
+ if with_query_string:
+ if querystring is None:
+ vars = request.parse_querystring(environ)
+ else:
+ vars = parse_qsl(
+ querystring,
+ keep_blank_values=True,
+ strict_parsing=False)
+ else:
+ vars = None
+ v = cls(url, vars=vars)
+ return v
+
+ from_environ = classmethod(from_environ)
+
+ def __call__(self, *args, **kw):
+ res = self._add_positional(args)
+ res = res._add_vars(kw)
+ return res
+
+ def __getitem__(self, item):
+ if '=' in item:
+ name, value = item.split('=', 1)
+ return self._add_vars({unquote(name): unquote(value)})
+ return self._add_positional((item,))
+
+ def attr(self, **kw):
+ for key in kw.keys():
+ if key.endswith('_'):
+ kw[key[:-1]] = kw[key]
+ del kw[key]
+ new_attrs = self.attrs.copy()
+ new_attrs.update(kw)
+ return self.__class__(self.url, vars=self.vars,
+ attrs=new_attrs,
+ params=self.original_params)
+
+ def param(self, **kw):
+ new_params = self.original_params.copy()
+ new_params.update(kw)
+ return self.__class__(self.url, vars=self.vars,
+ attrs=self.attrs,
+ params=new_params)
+
+ def coerce_vars(self, vars):
+ global variabledecode
+ need_variable_encode = False
+ for key, value in vars.items():
+ if isinstance(value, dict):
+ need_variable_encode = True
+ if key.endswith('_'):
+ vars[key[:-1]] = vars[key]
+ del vars[key]
+ if need_variable_encode:
+ if variabledecode is None:
+ from formencode import variabledecode
+ vars = variabledecode.variable_encode(vars)
+ return vars
+
+
+ def var(self, **kw):
+ kw = self.coerce_vars(kw)
+ new_vars = self.vars + list(kw.items())
+ return self.__class__(self.url, vars=new_vars,
+ attrs=self.attrs,
+ params=self.original_params)
+
+ def setvar(self, **kw):
+ """
+ Like ``.var(...)``, except overwrites keys, where .var simply
+ extends the keys. Setting a variable to None here will
+ effectively delete it.
+ """
+ kw = self.coerce_vars(kw)
+ new_vars = []
+ for name, values in self.vars:
+ if name in kw:
+ continue
+ new_vars.append((name, values))
+ new_vars.extend(kw.items())
+ return self.__class__(self.url, vars=new_vars,
+ attrs=self.attrs,
+ params=self.original_params)
+
+ def setvars(self, **kw):
+ """
+ Creates a copy of this URL, but with all the variables set/reset
+ (like .setvar(), except clears past variables at the same time)
+ """
+ return self.__class__(self.url, vars=kw.items(),
+ attrs=self.attrs,
+ params=self.original_params)
+
+ def addpath(self, *paths):
+ u = self
+ for path in paths:
+ path = str(path).lstrip('/')
+ new_url = u.url
+ if not new_url.endswith('/'):
+ new_url += '/'
+ u = u.__class__(new_url+path, vars=u.vars,
+ attrs=u.attrs,
+ params=u.original_params)
+ return u
+
+ if six.PY3:
+ __truediv__ = addpath
+ else:
+ __div__ = addpath
+
+ def become(self, OtherClass):
+ return OtherClass(self.url, vars=self.vars,
+ attrs=self.attrs,
+ params=self.original_params)
+
+ def href__get(self):
+ s = self.url
+ if self.vars:
+ s += '?'
+ vars = []
+ for name, val in self.vars:
+ if isinstance(val, (list, tuple)):
+ val = [v for v in val if v is not None]
+ elif val is None:
+ continue
+ vars.append((name, val))
+ s += urlencode(vars, True)
+ return s
+
+ href = property(href__get)
+
+ def __repr__(self):
+ base = '<%s %s' % (self.__class__.__name__,
+ self.href or "''")
+ if self.attrs:
+ base += ' attrs(%s)' % (
+ ' '.join(['%s="%s"' % (html_quote(n), html_quote(v))
+ for n, v in self.attrs.items()]))
+ if self.original_params:
+ base += ' params(%s)' % (
+ ', '.join(['%s=%r' % (n, v)
+ for n, v in self.attrs.items()]))
+ return base + '>'
+
+ def html__get(self):
+ if not self.params.get('tag'):
+ raise ValueError(
+ "You cannot get the HTML of %r until you set the "
+ "'tag' param'" % self)
+ content = self._get_content()
+ tag = '<%s' % self.params.get('tag')
+ attrs = ' '.join([
+ '%s="%s"' % (html_quote(n), html_quote(v))
+ for n, v in self._html_attrs()])
+ if attrs:
+ tag += ' ' + attrs
+ tag += self._html_extra()
+ if content is None:
+ return tag + ' />'
+ else:
+ return '%s>%s</%s>' % (tag, content, self.params.get('tag'))
+
+ html = property(html__get)
+
+ def _html_attrs(self):
+ return self.attrs.items()
+
+ def _html_extra(self):
+ return ''
+
+ def _get_content(self):
+ """
+ Return the content for a tag (for self.html); return None
+ for an empty tag (like ``<img />``)
+ """
+ raise NotImplementedError
+
+ def _add_vars(self, vars):
+ raise NotImplementedError
+
+ def _add_positional(self, args):
+ raise NotImplementedError
+
+class URL(URLResource):
+
+ r"""
+ >>> u = URL('http://localhost')
+ >>> u
+ <URL http://localhost>
+ >>> u = u['view']
+ >>> str(u)
+ 'http://localhost/view'
+ >>> u['//foo'].param(content='view').html
+ '<a href="http://localhost/view/foo">view</a>'
+ >>> u.param(confirm='Really?', content='goto').html
+ '<a href="http://localhost/view" onclick="return confirm(\'Really?\')">goto</a>'
+ >>> u(title='See "it"', content='goto').html
+ '<a href="http://localhost/view?title=See+%22it%22">goto</a>'
+ >>> u('another', var='fuggetaboutit', content='goto').html
+ '<a href="http://localhost/view/another?var=fuggetaboutit">goto</a>'
+ >>> u.attr(content='goto').html
+ Traceback (most recent call last):
+ ....
+ ValueError: You must give a content param to <URL http://localhost/view attrs(content="goto")> generate anchor tags
+ >>> str(u['foo=bar%20stuff'])
+ 'http://localhost/view?foo=bar+stuff'
+ """
+
+ default_params = {'tag': 'a'}
+
+ def __str__(self):
+ return self.href
+
+ def _get_content(self):
+ if not self.params.get('content'):
+ raise ValueError(
+ "You must give a content param to %r generate anchor tags"
+ % self)
+ return self.params['content']
+
+ def _add_vars(self, vars):
+ url = self
+ for name in ('confirm', 'content'):
+ if name in vars:
+ url = url.param(**{name: vars.pop(name)})
+ if 'target' in vars:
+ url = url.attr(target=vars.pop('target'))
+ return url.var(**vars)
+
+ def _add_positional(self, args):
+ return self.addpath(*args)
+
+ def _html_attrs(self):
+ attrs = list(self.attrs.items())
+ attrs.insert(0, ('href', self.href))
+ if self.params.get('confirm'):
+ attrs.append(('onclick', 'return confirm(%s)'
+ % js_repr(self.params['confirm'])))
+ return attrs
+
+ def onclick_goto__get(self):
+ return 'location.href=%s; return false' % js_repr(self.href)
+
+ onclick_goto = property(onclick_goto__get)
+
+ def button__get(self):
+ return self.become(Button)
+
+ button = property(button__get)
+
+ def js_popup__get(self):
+ return self.become(JSPopup)
+
+ js_popup = property(js_popup__get)
+
+class Image(URLResource):
+
+ r"""
+ >>> i = Image('/images')
+ >>> i = i / '/foo.png'
+ >>> i.html
+ '<img src="/images/foo.png" />'
+ >>> str(i['alt=foo'])
+ '<img src="/images/foo.png" alt="foo" />'
+ >>> i.href
+ '/images/foo.png'
+ """
+
+ default_params = {'tag': 'img'}
+
+ def __str__(self):
+ return self.html
+
+ def _get_content(self):
+ return None
+
+ def _add_vars(self, vars):
+ return self.attr(**vars)
+
+ def _add_positional(self, args):
+ return self.addpath(*args)
+
+ def _html_attrs(self):
+ attrs = list(self.attrs.items())
+ attrs.insert(0, ('src', self.href))
+ return attrs
+
+class Button(URLResource):
+
+ r"""
+ >>> u = URL('/')
+ >>> u = u / 'delete'
+ >>> b = u.button['confirm=Sure?'](id=5, content='del')
+ >>> str(b)
+ '<button onclick="if (confirm(\'Sure?\')) {location.href=\'/delete?id=5\'}; return false">del</button>'
+ """
+
+ default_params = {'tag': 'button'}
+
+ def __str__(self):
+ return self.html
+
+ def _get_content(self):
+ if self.params.get('content'):
+ return self.params['content']
+ if self.attrs.get('value'):
+ return self.attrs['content']
+ # @@: Error?
+ return None
+
+ def _add_vars(self, vars):
+ button = self
+ if 'confirm' in vars:
+ button = button.param(confirm=vars.pop('confirm'))
+ if 'content' in vars:
+ button = button.param(content=vars.pop('content'))
+ return button.var(**vars)
+
+ def _add_positional(self, args):
+ return self.addpath(*args)
+
+ def _html_attrs(self):
+ attrs = list(self.attrs.items())
+ onclick = 'location.href=%s' % js_repr(self.href)
+ if self.params.get('confirm'):
+ onclick = 'if (confirm(%s)) {%s}' % (
+ js_repr(self.params['confirm']), onclick)
+ onclick += '; return false'
+ attrs.insert(0, ('onclick', onclick))
+ return attrs
+
+class JSPopup(URLResource):
+
+ r"""
+ >>> u = URL('/')
+ >>> u = u / 'view'
+ >>> j = u.js_popup(content='view')
+ >>> j.html
+ '<a href="/view" onclick="window.open(\'/view\', \'_blank\'); return false" target="_blank">view</a>'
+ """
+
+ default_params = {'tag': 'a', 'target': '_blank'}
+
+ def _add_vars(self, vars):
+ button = self
+ for var in ('width', 'height', 'stripped', 'content'):
+ if var in vars:
+ button = button.param(**{var: vars.pop(var)})
+ return button.var(**vars)
+
+ def _window_args(self):
+ p = self.params
+ features = []
+ if p.get('stripped'):
+ p['location'] = p['status'] = p['toolbar'] = '0'
+ for param in 'channelmode directories fullscreen location menubar resizable scrollbars status titlebar'.split():
+ if param not in p:
+ continue
+ v = p[param]
+ if v not in ('yes', 'no', '1', '0'):
+ if v:
+ v = '1'
+ else:
+ v = '0'
+ features.append('%s=%s' % (param, v))
+ for param in 'height left top width':
+ if not p.get(param):
+ continue
+ features.append('%s=%s' % (param, p[param]))
+ args = [self.href, p['target']]
+ if features:
+ args.append(','.join(features))
+ return ', '.join(map(js_repr, args))
+
+ def _html_attrs(self):
+ attrs = list(self.attrs.items())
+ onclick = ('window.open(%s); return false'
+ % self._window_args())
+ attrs.insert(0, ('target', self.params['target']))
+ attrs.insert(0, ('onclick', onclick))
+ attrs.insert(0, ('href', self.href))
+ return attrs
+
+ def _get_content(self):
+ if not self.params.get('content'):
+ raise ValueError(
+ "You must give a content param to %r generate anchor tags"
+ % self)
+ return self.params['content']
+
+ def _add_positional(self, args):
+ return self.addpath(*args)
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/paste/urlmap.py b/paste/urlmap.py
new file mode 100644
index 0000000..f721f2d
--- /dev/null
+++ b/paste/urlmap.py
@@ -0,0 +1,263 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Map URL prefixes to WSGI applications. See ``URLMap``
+"""
+
+import re
+import os
+import cgi
+try:
+ # Python 3
+ from collections import MutableMapping as DictMixin
+except ImportError:
+ # Python 2
+ from UserDict import DictMixin
+
+from paste import httpexceptions
+
+__all__ = ['URLMap', 'PathProxyURLMap']
+
+def urlmap_factory(loader, global_conf, **local_conf):
+ if 'not_found_app' in local_conf:
+ not_found_app = local_conf.pop('not_found_app')
+ else:
+ not_found_app = global_conf.get('not_found_app')
+ if not_found_app:
+ not_found_app = loader.get_app(not_found_app, global_conf=global_conf)
+ urlmap = URLMap(not_found_app=not_found_app)
+ for path, app_name in local_conf.items():
+ path = parse_path_expression(path)
+ app = loader.get_app(app_name, global_conf=global_conf)
+ urlmap[path] = app
+ return urlmap
+
+def parse_path_expression(path):
+ """
+ Parses a path expression like 'domain foobar.com port 20 /' or
+ just '/foobar' for a path alone. Returns as an address that
+ URLMap likes.
+ """
+ parts = path.split()
+ domain = port = path = None
+ while parts:
+ if parts[0] == 'domain':
+ parts.pop(0)
+ if not parts:
+ raise ValueError("'domain' must be followed with a domain name")
+ if domain:
+ raise ValueError("'domain' given twice")
+ domain = parts.pop(0)
+ elif parts[0] == 'port':
+ parts.pop(0)
+ if not parts:
+ raise ValueError("'port' must be followed with a port number")
+ if port:
+ raise ValueError("'port' given twice")
+ port = parts.pop(0)
+ else:
+ if path:
+ raise ValueError("more than one path given (have %r, got %r)"
+ % (path, parts[0]))
+ path = parts.pop(0)
+ s = ''
+ if domain:
+ s = 'http://%s' % domain
+ if port:
+ if not domain:
+ raise ValueError("If you give a port, you must also give a domain")
+ s += ':' + port
+ if path:
+ if s:
+ s += '/'
+ s += path
+ return s
+
+class URLMap(DictMixin):
+
+ """
+ URLMap instances are dictionary-like object that dispatch to one
+ of several applications based on the URL.
+
+ The dictionary keys are URLs to match (like
+ ``PATH_INFO.startswith(url)``), and the values are applications to
+ dispatch to. URLs are matched most-specific-first, i.e., longest
+ URL first. The ``SCRIPT_NAME`` and ``PATH_INFO`` environmental
+ variables are adjusted to indicate the new context.
+
+ URLs can also include domains, like ``http://blah.com/foo``, or as
+ tuples ``('blah.com', '/foo')``. This will match domain names; without
+ the ``http://domain`` or with a domain of ``None`` any domain will be
+ matched (so long as no other explicit domain matches). """
+
+ def __init__(self, not_found_app=None):
+ self.applications = []
+ if not not_found_app:
+ not_found_app = self.not_found_app
+ self.not_found_application = not_found_app
+
+ def __len__(self):
+ return len(self.applications)
+
+ def __iter__(self):
+ for app_url, app in self.applications:
+ yield app_url
+
+ norm_url_re = re.compile('//+')
+ domain_url_re = re.compile('^(http|https)://')
+
+ def not_found_app(self, environ, start_response):
+ mapper = environ.get('paste.urlmap_object')
+ if mapper:
+ matches = [p for p, a in mapper.applications]
+ extra = 'defined apps: %s' % (
+ ',\n '.join(map(repr, matches)))
+ else:
+ extra = ''
+ extra += '\nSCRIPT_NAME: %r' % cgi.escape(environ.get('SCRIPT_NAME'))
+ extra += '\nPATH_INFO: %r' % cgi.escape(environ.get('PATH_INFO'))
+ extra += '\nHTTP_HOST: %r' % cgi.escape(environ.get('HTTP_HOST'))
+ app = httpexceptions.HTTPNotFound(
+ environ['PATH_INFO'],
+ comment=cgi.escape(extra)).wsgi_application
+ return app(environ, start_response)
+
+ def normalize_url(self, url, trim=True):
+ if isinstance(url, (list, tuple)):
+ domain = url[0]
+ url = self.normalize_url(url[1])[1]
+ return domain, url
+ assert (not url or url.startswith('/')
+ or self.domain_url_re.search(url)), (
+ "URL fragments must start with / or http:// (you gave %r)" % url)
+ match = self.domain_url_re.search(url)
+ if match:
+ url = url[match.end():]
+ if '/' in url:
+ domain, url = url.split('/', 1)
+ url = '/' + url
+ else:
+ domain, url = url, ''
+ else:
+ domain = None
+ url = self.norm_url_re.sub('/', url)
+ if trim:
+ url = url.rstrip('/')
+ return domain, url
+
+ def sort_apps(self):
+ """
+ Make sure applications are sorted with longest URLs first
+ """
+ def key(app_desc):
+ (domain, url), app = app_desc
+ if not domain:
+ # Make sure empty domains sort last:
+ return '\xff', -len(url)
+ else:
+ return domain, -len(url)
+ apps = [(key(desc), desc) for desc in self.applications]
+ apps.sort()
+ self.applications = [desc for (sortable, desc) in apps]
+
+ def __setitem__(self, url, app):
+ if app is None:
+ try:
+ del self[url]
+ except KeyError:
+ pass
+ return
+ dom_url = self.normalize_url(url)
+ if dom_url in self:
+ del self[dom_url]
+ self.applications.append((dom_url, app))
+ self.sort_apps()
+
+ def __getitem__(self, url):
+ dom_url = self.normalize_url(url)
+ for app_url, app in self.applications:
+ if app_url == dom_url:
+ return app
+ raise KeyError(
+ "No application with the url %r (domain: %r; existing: %s)"
+ % (url[1], url[0] or '*', self.applications))
+
+ def __delitem__(self, url):
+ url = self.normalize_url(url)
+ for app_url, app in self.applications:
+ if app_url == url:
+ self.applications.remove((app_url, app))
+ break
+ else:
+ raise KeyError(
+ "No application with the url %r" % (url,))
+
+ def keys(self):
+ return [app_url for app_url, app in self.applications]
+
+ def __call__(self, environ, start_response):
+ host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower()
+ if ':' in host:
+ host, port = host.split(':', 1)
+ else:
+ if environ['wsgi.url_scheme'] == 'http':
+ port = '80'
+ else:
+ port = '443'
+ path_info = environ.get('PATH_INFO')
+ path_info = self.normalize_url(path_info, False)[1]
+ for (domain, app_url), app in self.applications:
+ if domain and domain != host and domain != host+':'+port:
+ continue
+ if (path_info == app_url
+ or path_info.startswith(app_url + '/')):
+ environ['SCRIPT_NAME'] += app_url
+ environ['PATH_INFO'] = path_info[len(app_url):]
+ return app(environ, start_response)
+ environ['paste.urlmap_object'] = self
+ return self.not_found_application(environ, start_response)
+
+
+class PathProxyURLMap(object):
+
+ """
+ This is a wrapper for URLMap that catches any strings that
+ are passed in as applications; these strings are treated as
+ filenames (relative to `base_path`) and are passed to the
+ callable `builder`, which will return an application.
+
+ This is intended for cases when configuration files can be
+ treated as applications.
+
+ `base_paste_url` is the URL under which all applications added through
+ this wrapper must go. Use ``""`` if you want this to not
+ change incoming URLs.
+ """
+
+ def __init__(self, map, base_paste_url, base_path, builder):
+ self.map = map
+ self.base_paste_url = self.map.normalize_url(base_paste_url)
+ self.base_path = base_path
+ self.builder = builder
+
+ def __setitem__(self, url, app):
+ if isinstance(app, (str, unicode)):
+ app_fn = os.path.join(self.base_path, app)
+ app = self.builder(app_fn)
+ url = self.map.normalize_url(url)
+ # @@: This means http://foo.com/bar will potentially
+ # match foo.com, but /base_paste_url/bar, which is unintuitive
+ url = (url[0] or self.base_paste_url[0],
+ self.base_paste_url[1] + url[1])
+ self.map[url] = app
+
+ def __getattr__(self, attr):
+ return getattr(self.map, attr)
+
+ # This is really the only settable attribute
+ def not_found_application__get(self):
+ return self.map.not_found_application
+ def not_found_application__set(self, value):
+ self.map.not_found_application = value
+ not_found_application = property(not_found_application__get,
+ not_found_application__set)
diff --git a/paste/urlparser.py b/paste/urlparser.py
new file mode 100644
index 0000000..19bcbac
--- /dev/null
+++ b/paste/urlparser.py
@@ -0,0 +1,639 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+WSGI applications that parse the URL and dispatch to on-disk resources
+"""
+
+import os
+import six
+import sys
+import imp
+import mimetypes
+try:
+ import pkg_resources
+except ImportError:
+ pkg_resources = None
+from paste import request
+from paste import fileapp
+from paste.util import import_string
+from paste import httpexceptions
+from .httpheaders import ETAG
+from paste.util import converters
+
+class NoDefault(object):
+ pass
+
+__all__ = ['URLParser', 'StaticURLParser', 'PkgResourcesParser']
+
+class URLParser(object):
+
+ """
+ WSGI middleware
+
+ Application dispatching, based on URL. An instance of `URLParser` is
+ an application that loads and delegates to other applications. It
+ looks for files in its directory that match the first part of
+ PATH_INFO; these may have an extension, but are not required to have
+ one, in which case the available files are searched to find the
+ appropriate file. If it is ambiguous, a 404 is returned and an error
+ logged.
+
+ By default there is a constructor for .py files that loads the module,
+ and looks for an attribute ``application``, which is a ready
+ application object, or an attribute that matches the module name,
+ which is a factory for building applications, and is called with no
+ arguments.
+
+ URLParser will also look in __init__.py for special overrides.
+ These overrides are:
+
+ ``urlparser_hook(environ)``
+ This can modify the environment. Its return value is ignored,
+ and it cannot be used to change the response in any way. You
+ *can* use this, for example, to manipulate SCRIPT_NAME/PATH_INFO
+ (try to keep them consistent with the original URL -- but
+ consuming PATH_INFO and moving that to SCRIPT_NAME is ok).
+
+ ``urlparser_wrap(environ, start_response, app)``:
+ After URLParser finds the application, it calls this function
+ (if present). If this function doesn't call
+ ``app(environ, start_response)`` then the application won't be
+ called at all! This can be used to allocate resources (with
+ ``try:finally:``) or otherwise filter the output of the
+ application.
+
+ ``not_found_hook(environ, start_response)``:
+ If no file can be found (*in this directory*) to match the
+ request, then this WSGI application will be called. You can
+ use this to change the URL and pass the request back to
+ URLParser again, or on to some other application. This
+ doesn't catch all ``404 Not Found`` responses, just missing
+ files.
+
+ ``application(environ, start_response)``:
+ This basically overrides URLParser completely, and the given
+ application is used for all requests. ``urlparser_wrap`` and
+ ``urlparser_hook`` are still called, but the filesystem isn't
+ searched in any way.
+ """
+
+ parsers_by_directory = {}
+
+ # This is lazily initialized
+ init_module = NoDefault
+
+ global_constructors = {}
+
+ def __init__(self, global_conf,
+ directory, base_python_name,
+ index_names=NoDefault,
+ hide_extensions=NoDefault,
+ ignore_extensions=NoDefault,
+ constructors=None,
+ **constructor_conf):
+ """
+ Create a URLParser object that looks at `directory`.
+ `base_python_name` is the package that this directory
+ represents, thus any Python modules in this directory will
+ be given names under this package.
+ """
+ if global_conf:
+ import warnings
+ warnings.warn(
+ 'The global_conf argument to URLParser is deprecated; '
+ 'either pass in None or {}, or use make_url_parser',
+ DeprecationWarning)
+ else:
+ global_conf = {}
+ if os.path.sep != '/':
+ directory = directory.replace(os.path.sep, '/')
+ self.directory = directory
+ self.base_python_name = base_python_name
+ # This logic here should be deprecated since it is in
+ # make_url_parser
+ if index_names is NoDefault:
+ index_names = global_conf.get(
+ 'index_names', ('index', 'Index', 'main', 'Main'))
+ self.index_names = converters.aslist(index_names)
+ if hide_extensions is NoDefault:
+ hide_extensions = global_conf.get(
+ 'hide_extensions', ('.pyc', '.bak', '.py~', '.pyo'))
+ self.hide_extensions = converters.aslist(hide_extensions)
+ if ignore_extensions is NoDefault:
+ ignore_extensions = global_conf.get(
+ 'ignore_extensions', ())
+ self.ignore_extensions = converters.aslist(ignore_extensions)
+ self.constructors = self.global_constructors.copy()
+ if constructors:
+ self.constructors.update(constructors)
+ # @@: Should we also check the global options for constructors?
+ for name, value in constructor_conf.items():
+ if not name.startswith('constructor '):
+ raise ValueError(
+ "Only extra configuration keys allowed are "
+ "'constructor .ext = import_expr'; you gave %r "
+ "(=%r)" % (name, value))
+ ext = name[len('constructor '):].strip()
+ if isinstance(value, (str, unicode)):
+ value = import_string.eval_import(value)
+ self.constructors[ext] = value
+
+ def __call__(self, environ, start_response):
+ environ['paste.urlparser.base_python_name'] = self.base_python_name
+ if self.init_module is NoDefault:
+ self.init_module = self.find_init_module(environ)
+ path_info = environ.get('PATH_INFO', '')
+ if not path_info:
+ return self.add_slash(environ, start_response)
+ if (self.init_module
+ and getattr(self.init_module, 'urlparser_hook', None)):
+ self.init_module.urlparser_hook(environ)
+ orig_path_info = environ['PATH_INFO']
+ orig_script_name = environ['SCRIPT_NAME']
+ application, filename = self.find_application(environ)
+ if not application:
+ if (self.init_module
+ and getattr(self.init_module, 'not_found_hook', None)
+ and environ.get('paste.urlparser.not_found_parser') is not self):
+ not_found_hook = self.init_module.not_found_hook
+ environ['paste.urlparser.not_found_parser'] = self
+ environ['PATH_INFO'] = orig_path_info
+ environ['SCRIPT_NAME'] = orig_script_name
+ return not_found_hook(environ, start_response)
+ if filename is None:
+ name, rest_of_path = request.path_info_split(environ['PATH_INFO'])
+ if not name:
+ name = 'one of %s' % ', '.join(
+ self.index_names or
+ ['(no index_names defined)'])
+
+ return self.not_found(
+ environ, start_response,
+ 'Tried to load %s from directory %s'
+ % (name, self.directory))
+ else:
+ environ['wsgi.errors'].write(
+ 'Found resource %s, but could not construct application\n'
+ % filename)
+ return self.not_found(
+ environ, start_response,
+ 'Tried to load %s from directory %s'
+ % (filename, self.directory))
+ if (self.init_module
+ and getattr(self.init_module, 'urlparser_wrap', None)):
+ return self.init_module.urlparser_wrap(
+ environ, start_response, application)
+ else:
+ return application(environ, start_response)
+
+ def find_application(self, environ):
+ if (self.init_module
+ and getattr(self.init_module, 'application', None)
+ and not environ.get('paste.urlparser.init_application') == environ['SCRIPT_NAME']):
+ environ['paste.urlparser.init_application'] = environ['SCRIPT_NAME']
+ return self.init_module.application, None
+ name, rest_of_path = request.path_info_split(environ['PATH_INFO'])
+ environ['PATH_INFO'] = rest_of_path
+ if name is not None:
+ environ['SCRIPT_NAME'] = environ.get('SCRIPT_NAME', '') + '/' + name
+ if not name:
+ names = self.index_names
+ for index_name in names:
+ filename = self.find_file(environ, index_name)
+ if filename:
+ break
+ else:
+ # None of the index files found
+ filename = None
+ else:
+ filename = self.find_file(environ, name)
+ if filename is None:
+ return None, filename
+ else:
+ return self.get_application(environ, filename), filename
+
+ def not_found(self, environ, start_response, debug_message=None):
+ exc = httpexceptions.HTTPNotFound(
+ 'The resource at %s could not be found'
+ % request.construct_url(environ),
+ comment=debug_message)
+ return exc.wsgi_application(environ, start_response)
+
+ def add_slash(self, environ, start_response):
+ """
+ This happens when you try to get to a directory
+ without a trailing /
+ """
+ url = request.construct_url(environ, with_query_string=False)
+ url += '/'
+ if environ.get('QUERY_STRING'):
+ url += '?' + environ['QUERY_STRING']
+ exc = httpexceptions.HTTPMovedPermanently(
+ 'The resource has moved to %s - you should be redirected '
+ 'automatically.' % url,
+ headers=[('location', url)])
+ return exc.wsgi_application(environ, start_response)
+
+ def find_file(self, environ, base_filename):
+ possible = []
+ """Cache a few values to reduce function call overhead"""
+ for filename in os.listdir(self.directory):
+ base, ext = os.path.splitext(filename)
+ full_filename = os.path.join(self.directory, filename)
+ if (ext in self.hide_extensions
+ or not base):
+ continue
+ if filename == base_filename:
+ possible.append(full_filename)
+ continue
+ if ext in self.ignore_extensions:
+ continue
+ if base == base_filename:
+ possible.append(full_filename)
+ if not possible:
+ #environ['wsgi.errors'].write(
+ # 'No file found matching %r in %s\n'
+ # % (base_filename, self.directory))
+ return None
+ if len(possible) > 1:
+ # If there is an exact match, this isn't 'ambiguous'
+ # per se; it might mean foo.gif and foo.gif.back for
+ # instance
+ if full_filename in possible:
+ return full_filename
+ else:
+ environ['wsgi.errors'].write(
+ 'Ambiguous URL: %s; matches files %s\n'
+ % (request.construct_url(environ),
+ ', '.join(possible)))
+ return None
+ return possible[0]
+
+ def get_application(self, environ, filename):
+ if os.path.isdir(filename):
+ t = 'dir'
+ else:
+ t = os.path.splitext(filename)[1]
+ constructor = self.constructors.get(t, self.constructors.get('*'))
+ if constructor is None:
+ #environ['wsgi.errors'].write(
+ # 'No constructor found for %s\n' % t)
+ return constructor
+ app = constructor(self, environ, filename)
+ if app is None:
+ #environ['wsgi.errors'].write(
+ # 'Constructor %s return None for %s\n' %
+ # (constructor, filename))
+ pass
+ return app
+
+ def register_constructor(cls, extension, constructor):
+ """
+ Register a function as a constructor. Registered constructors
+ apply to all instances of `URLParser`.
+
+ The extension should have a leading ``.``, or the special
+ extensions ``dir`` (for directories) and ``*`` (a catch-all).
+
+ `constructor` must be a callable that takes two arguments:
+ ``environ`` and ``filename``, and returns a WSGI application.
+ """
+ d = cls.global_constructors
+ assert extension not in d, (
+ "A constructor already exists for the extension %r (%r) "
+ "when attemption to register constructor %r"
+ % (extension, d[extension], constructor))
+ d[extension] = constructor
+ register_constructor = classmethod(register_constructor)
+
+ def get_parser(self, directory, base_python_name):
+ """
+ Get a parser for the given directory, or create one if
+ necessary. This way parsers can be cached and reused.
+
+ # @@: settings are inherited from the first caller
+ """
+ try:
+ return self.parsers_by_directory[(directory, base_python_name)]
+ except KeyError:
+ parser = self.__class__(
+ {},
+ directory, base_python_name,
+ index_names=self.index_names,
+ hide_extensions=self.hide_extensions,
+ ignore_extensions=self.ignore_extensions,
+ constructors=self.constructors)
+ self.parsers_by_directory[(directory, base_python_name)] = parser
+ return parser
+
+ def find_init_module(self, environ):
+ filename = os.path.join(self.directory, '__init__.py')
+ if not os.path.exists(filename):
+ return None
+ return load_module(environ, filename)
+
+ def __repr__(self):
+ return '<%s directory=%r; module=%s at %s>' % (
+ self.__class__.__name__,
+ self.directory,
+ self.base_python_name,
+ hex(abs(id(self))))
+
+def make_directory(parser, environ, filename):
+ base_python_name = environ['paste.urlparser.base_python_name']
+ if base_python_name:
+ base_python_name += "." + os.path.basename(filename)
+ else:
+ base_python_name = os.path.basename(filename)
+ return parser.get_parser(filename, base_python_name)
+
+URLParser.register_constructor('dir', make_directory)
+
+def make_unknown(parser, environ, filename):
+ return fileapp.FileApp(filename)
+
+URLParser.register_constructor('*', make_unknown)
+
+def load_module(environ, filename):
+ base_python_name = environ['paste.urlparser.base_python_name']
+ module_name = os.path.splitext(os.path.basename(filename))[0]
+ if base_python_name:
+ module_name = base_python_name + '.' + module_name
+ return load_module_from_name(environ, filename, module_name,
+ environ['wsgi.errors'])
+
+def load_module_from_name(environ, filename, module_name, errors):
+ if module_name in sys.modules:
+ return sys.modules[module_name]
+ init_filename = os.path.join(os.path.dirname(filename), '__init__.py')
+ if not os.path.exists(init_filename):
+ try:
+ f = open(init_filename, 'w')
+ except (OSError, IOError) as e:
+ errors.write(
+ 'Cannot write __init__.py file into directory %s (%s)\n'
+ % (os.path.dirname(filename), e))
+ return None
+ f.write('#\n')
+ f.close()
+ fp = None
+ if module_name in sys.modules:
+ return sys.modules[module_name]
+ if '.' in module_name:
+ parent_name = '.'.join(module_name.split('.')[:-1])
+ base_name = module_name.split('.')[-1]
+ parent = load_module_from_name(environ, os.path.dirname(filename),
+ parent_name, errors)
+ else:
+ base_name = module_name
+ fp = None
+ try:
+ fp, pathname, stuff = imp.find_module(
+ base_name, [os.path.dirname(filename)])
+ module = imp.load_module(module_name, fp, pathname, stuff)
+ finally:
+ if fp is not None:
+ fp.close()
+ return module
+
+def make_py(parser, environ, filename):
+ module = load_module(environ, filename)
+ if not module:
+ return None
+ if hasattr(module, 'application') and module.application:
+ return getattr(module.application, 'wsgi_application', module.application)
+ base_name = module.__name__.split('.')[-1]
+ if hasattr(module, base_name):
+ obj = getattr(module, base_name)
+ if hasattr(obj, 'wsgi_application'):
+ return obj.wsgi_application
+ else:
+ # @@: Old behavior; should probably be deprecated eventually:
+ return getattr(module, base_name)()
+ environ['wsgi.errors'].write(
+ "Cound not find application or %s in %s\n"
+ % (base_name, module))
+ return None
+
+URLParser.register_constructor('.py', make_py)
+
+class StaticURLParser(object):
+ """
+ Like ``URLParser`` but only serves static files.
+
+ ``cache_max_age``:
+ integer specifies Cache-Control max_age in seconds
+ """
+ # @@: Should URLParser subclass from this?
+
+ def __init__(self, directory, root_directory=None,
+ cache_max_age=None):
+ self.directory = self.normpath(directory)
+ self.root_directory = self.normpath(root_directory or directory)
+ self.cache_max_age = cache_max_age
+
+ def normpath(path):
+ return os.path.normcase(os.path.abspath(path))
+ normpath = staticmethod(normpath)
+
+ def __call__(self, environ, start_response):
+ path_info = environ.get('PATH_INFO', '')
+ if not path_info:
+ return self.add_slash(environ, start_response)
+ if path_info == '/':
+ # @@: This should obviously be configurable
+ filename = 'index.html'
+ else:
+ filename = request.path_info_pop(environ)
+ full = self.normpath(os.path.join(self.directory, filename))
+ if not full.startswith(self.root_directory):
+ # Out of bounds
+ return self.not_found(environ, start_response)
+ if not os.path.exists(full):
+ return self.not_found(environ, start_response)
+ if os.path.isdir(full):
+ # @@: Cache?
+ return self.__class__(full, root_directory=self.root_directory,
+ cache_max_age=self.cache_max_age)(environ,
+ start_response)
+ if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/':
+ return self.error_extra_path(environ, start_response)
+ if_none_match = environ.get('HTTP_IF_NONE_MATCH')
+ if if_none_match:
+ mytime = os.stat(full).st_mtime
+ if str(mytime) == if_none_match:
+ headers = []
+ ## FIXME: probably should be
+ ## ETAG.update(headers, '"%s"' % mytime)
+ ETAG.update(headers, mytime)
+ start_response('304 Not Modified', headers)
+ return [''] # empty body
+
+ fa = self.make_app(full)
+ if self.cache_max_age:
+ fa.cache_control(max_age=self.cache_max_age)
+ return fa(environ, start_response)
+
+ def make_app(self, filename):
+ return fileapp.FileApp(filename)
+
+ def add_slash(self, environ, start_response):
+ """
+ This happens when you try to get to a directory
+ without a trailing /
+ """
+ url = request.construct_url(environ, with_query_string=False)
+ url += '/'
+ if environ.get('QUERY_STRING'):
+ url += '?' + environ['QUERY_STRING']
+ exc = httpexceptions.HTTPMovedPermanently(
+ 'The resource has moved to %s - you should be redirected '
+ 'automatically.' % url,
+ headers=[('location', url)])
+ return exc.wsgi_application(environ, start_response)
+
+ def not_found(self, environ, start_response, debug_message=None):
+ exc = httpexceptions.HTTPNotFound(
+ 'The resource at %s could not be found'
+ % request.construct_url(environ),
+ comment='SCRIPT_NAME=%r; PATH_INFO=%r; looking in %r; debug: %s'
+ % (environ.get('SCRIPT_NAME'), environ.get('PATH_INFO'),
+ self.directory, debug_message or '(none)'))
+ return exc.wsgi_application(environ, start_response)
+
+ def error_extra_path(self, environ, start_response):
+ exc = httpexceptions.HTTPNotFound(
+ 'The trailing path %r is not allowed' % environ['PATH_INFO'])
+ return exc.wsgi_application(environ, start_response)
+
+ def __repr__(self):
+ return '<%s %r>' % (self.__class__.__name__, self.directory)
+
+def make_static(global_conf, document_root, cache_max_age=None):
+ """
+ Return a WSGI application that serves a directory (configured
+ with document_root)
+
+ cache_max_age - integer specifies CACHE_CONTROL max_age in seconds
+ """
+ if cache_max_age is not None:
+ cache_max_age = int(cache_max_age)
+ return StaticURLParser(
+ document_root, cache_max_age=cache_max_age)
+
+class PkgResourcesParser(StaticURLParser):
+
+ def __init__(self, egg_or_spec, resource_name, manager=None, root_resource=None):
+ if pkg_resources is None:
+ raise NotImplementedError("This class requires pkg_resources.")
+ if isinstance(egg_or_spec, (six.binary_type, six.text_type)):
+ self.egg = pkg_resources.get_distribution(egg_or_spec)
+ else:
+ self.egg = egg_or_spec
+ self.resource_name = resource_name
+ if manager is None:
+ manager = pkg_resources.ResourceManager()
+ self.manager = manager
+ if root_resource is None:
+ root_resource = resource_name
+ self.root_resource = os.path.normpath(root_resource)
+
+ def __repr__(self):
+ return '<%s for %s:%r>' % (
+ self.__class__.__name__,
+ self.egg.project_name,
+ self.resource_name)
+
+ def __call__(self, environ, start_response):
+ path_info = environ.get('PATH_INFO', '')
+ if not path_info:
+ return self.add_slash(environ, start_response)
+ if path_info == '/':
+ # @@: This should obviously be configurable
+ filename = 'index.html'
+ else:
+ filename = request.path_info_pop(environ)
+ resource = os.path.normcase(os.path.normpath(
+ self.resource_name + '/' + filename))
+ if self.root_resource is not None and not resource.startswith(self.root_resource):
+ # Out of bounds
+ return self.not_found(environ, start_response)
+ if not self.egg.has_resource(resource):
+ return self.not_found(environ, start_response)
+ if self.egg.resource_isdir(resource):
+ # @@: Cache?
+ child_root = self.root_resource is not None and self.root_resource or \
+ self.resource_name
+ return self.__class__(self.egg, resource, self.manager,
+ root_resource=child_root)(environ, start_response)
+ if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/':
+ return self.error_extra_path(environ, start_response)
+
+ type, encoding = mimetypes.guess_type(resource)
+ if not type:
+ type = 'application/octet-stream'
+ # @@: I don't know what to do with the encoding.
+ try:
+ file = self.egg.get_resource_stream(self.manager, resource)
+ except (IOError, OSError) as e:
+ exc = httpexceptions.HTTPForbidden(
+ 'You are not permitted to view this file (%s)' % e)
+ return exc.wsgi_application(environ, start_response)
+ start_response('200 OK',
+ [('content-type', type)])
+ return fileapp._FileIter(file)
+
+ def not_found(self, environ, start_response, debug_message=None):
+ exc = httpexceptions.HTTPNotFound(
+ 'The resource at %s could not be found'
+ % request.construct_url(environ),
+ comment='SCRIPT_NAME=%r; PATH_INFO=%r; looking in egg:%s#%r; debug: %s'
+ % (environ.get('SCRIPT_NAME'), environ.get('PATH_INFO'),
+ self.egg, self.resource_name, debug_message or '(none)'))
+ return exc.wsgi_application(environ, start_response)
+
+def make_pkg_resources(global_conf, egg, resource_name=''):
+ """
+ A static file parser that loads data from an egg using
+ ``pkg_resources``. Takes a configuration value ``egg``, which is
+ an egg spec, and a base ``resource_name`` (default empty string)
+ which is the path in the egg that this starts at.
+ """
+ if pkg_resources is None:
+ raise NotImplementedError("This function requires pkg_resources.")
+ return PkgResourcesParser(egg, resource_name)
+
+def make_url_parser(global_conf, directory, base_python_name,
+ index_names=None, hide_extensions=None,
+ ignore_extensions=None,
+ **constructor_conf):
+ """
+ Create a URLParser application that looks in ``directory``, which
+ should be the directory for the Python package named in
+ ``base_python_name``. ``index_names`` are used when viewing the
+ directory (like ``'index'`` for ``'index.html'``).
+ ``hide_extensions`` are extensions that are not viewable (like
+ ``'.pyc'``) and ``ignore_extensions`` are viewable but only if an
+ explicit extension is given.
+ """
+ if index_names is None:
+ index_names = global_conf.get(
+ 'index_names', ('index', 'Index', 'main', 'Main'))
+ index_names = converters.aslist(index_names)
+
+ if hide_extensions is None:
+ hide_extensions = global_conf.get(
+ 'hide_extensions', ('.pyc', 'bak', 'py~'))
+ hide_extensions = converters.aslist(hide_extensions)
+
+ if ignore_extensions is None:
+ ignore_extensions = global_conf.get(
+ 'ignore_extensions', ())
+ ignore_extensions = converters.aslist(ignore_extensions)
+ # There's no real way to set constructors currently...
+
+ return URLParser({}, directory, base_python_name,
+ index_names=index_names,
+ hide_extensions=hide_extensions,
+ ignore_extensions=ignore_extensions,
+ **constructor_conf)
diff --git a/paste/util/PySourceColor.py b/paste/util/PySourceColor.py
new file mode 100644
index 0000000..c576ead
--- /dev/null
+++ b/paste/util/PySourceColor.py
@@ -0,0 +1,2102 @@
+# -*- coding: Latin-1 -*-
+"""
+PySourceColor: color Python source code
+"""
+
+"""
+ PySourceColor.py
+
+----------------------------------------------------------------------------
+
+ A python source to colorized html/css/xhtml converter.
+ Hacked by M.E.Farmer Jr. 2004, 2005
+ Python license
+
+----------------------------------------------------------------------------
+
+ - HTML markup does not create w3c valid html, but it works on every
+ browser i've tried so far.(I.E.,Mozilla/Firefox,Opera,Konqueror,wxHTML).
+ - CSS markup is w3c validated html 4.01 strict,
+ but will not render correctly on all browsers.
+ - XHTML markup is w3c validated xhtml 1.0 strict,
+ like html 4.01, will not render correctly on all browsers.
+
+----------------------------------------------------------------------------
+
+Features:
+
+ -Three types of markup:
+ html (default)
+ css/html 4.01 strict
+ xhtml 1.0 strict
+
+ -Can tokenize and colorize:
+ 12 types of strings
+ 2 comment types
+ numbers
+ operators
+ brackets
+ math operators
+ class / name
+ def / name
+ decorator / name
+ keywords
+ arguments class/def/decorator
+ linenumbers
+ names
+ text
+
+ -Eight colorschemes built-in:
+ null
+ mono
+ lite (default)
+ dark
+ dark2
+ idle
+ viewcvs
+ pythonwin
+
+ -Header and footer
+ set to '' for builtin header / footer.
+ give path to a file containing the html
+ you want added as header or footer.
+
+ -Arbitrary text and html
+ html markup converts all to raw (TEXT token)
+ #@# for raw -> send raw text.
+ #$# for span -> inline html and text.
+ #%# for div -> block level html and text.
+
+ -Linenumbers
+ Supports all styles. New token is called LINENUMBER.
+ Defaults to NAME if not defined.
+
+ Style options
+
+ -ALL markups support these text styles:
+ b = bold
+ i = italic
+ u = underline
+ -CSS and XHTML has limited support for borders:
+ HTML markup functions will ignore these.
+ Optional: Border color in RGB hex
+ Defaults to the text forecolor.
+ #rrggbb = border color
+ Border size:
+ l = thick
+ m = medium
+ t = thin
+ Border type:
+ - = dashed
+ . = dotted
+ s = solid
+ d = double
+ g = groove
+ r = ridge
+ n = inset
+ o = outset
+ You can specify multiple sides,
+ they will all use the same style.
+ Optional: Default is full border.
+ v = bottom
+ < = left
+ > = right
+ ^ = top
+ NOTE: Specify the styles you want.
+ The markups will ignore unsupported styles
+ Also note not all browsers can show these options
+
+ -All tokens default to NAME if not defined
+ so the only absolutely critical ones to define are:
+ NAME, ERRORTOKEN, PAGEBACKGROUND
+
+----------------------------------------------------------------------------
+
+Example usage::
+
+ # import
+ import PySourceColor as psc
+ psc.convert('c:/Python22/PySourceColor.py', colors=psc.idle, show=1)
+
+ # from module import *
+ from PySourceColor import *
+ convert('c:/Python22/Lib', colors=lite, markup="css",
+ header='#$#<b>This is a simpe heading</b><hr/>')
+
+ # How to use a custom colorscheme, and most of the 'features'
+ from PySourceColor import *
+ new = {
+ ERRORTOKEN: ('bui','#FF8080',''),
+ DECORATOR_NAME: ('s','#AACBBC',''),
+ DECORATOR: ('n','#333333',''),
+ NAME: ('t.<v','#1133AA','#DDFF22'),
+ NUMBER: ('','#236676','#FF5555'),
+ OPERATOR: ('b','#454567','#BBBB11'),
+ MATH_OPERATOR: ('','#935623','#423afb'),
+ BRACKETS: ('b','#ac34bf','#6457a5'),
+ COMMENT: ('t-#0022FF','#545366','#AABBFF'),
+ DOUBLECOMMENT: ('<l#553455','#553455','#FF00FF'),
+ CLASS_NAME: ('m^v-','#000000','#FFFFFF'),
+ DEF_NAME: ('l=<v','#897845','#000022'),
+ KEYWORD: ('.b','#345345','#FFFF22'),
+ SINGLEQUOTE: ('mn','#223344','#AADDCC'),
+ SINGLEQUOTE_R: ('','#344522',''),
+ SINGLEQUOTE_U: ('','#234234',''),
+ DOUBLEQUOTE: ('m#0022FF','#334421',''),
+ DOUBLEQUOTE_R: ('','#345345',''),
+ DOUBLEQUOTE_U: ('','#678673',''),
+ TRIPLESINGLEQUOTE: ('tv','#FFFFFF','#000000'),
+ TRIPLESINGLEQUOTE_R: ('tbu','#443256','#DDFFDA'),
+ TRIPLESINGLEQUOTE_U: ('','#423454','#DDFFDA'),
+ TRIPLEDOUBLEQUOTE: ('li#236fd3b<>','#000000','#FFFFFF'),
+ TRIPLEDOUBLEQUOTE_R: ('tub','#000000','#FFFFFF'),
+ TRIPLEDOUBLEQUOTE_U: ('-', '#CCAABB','#FFFAFF'),
+ LINENUMBER: ('ib-','#ff66aa','#7733FF'),]
+ TEXT: ('','#546634',''),
+ PAGEBACKGROUND: '#FFFAAA',
+ }
+ if __name__ == '__main__':
+ import sys
+ convert(sys.argv[1], './xhtml.html', colors=new, markup='xhtml', show=1,
+ linenumbers=1)
+ convert(sys.argv[1], './html.html', colors=new, markup='html', show=1,
+ linenumbers=1)
+
+"""
+
+__all__ = ['ERRORTOKEN','DECORATOR_NAME', 'DECORATOR', 'ARGS', 'EXTRASPACE',
+ 'NAME', 'NUMBER', 'OPERATOR', 'COMMENT', 'MATH_OPERATOR',
+ 'DOUBLECOMMENT', 'CLASS_NAME', 'DEF_NAME', 'KEYWORD', 'BRACKETS',
+ 'SINGLEQUOTE','SINGLEQUOTE_R','SINGLEQUOTE_U','DOUBLEQUOTE',
+ 'DOUBLEQUOTE_R', 'DOUBLEQUOTE_U', 'TRIPLESINGLEQUOTE', 'TEXT',
+ 'TRIPLESINGLEQUOTE_R', 'TRIPLESINGLEQUOTE_U', 'TRIPLEDOUBLEQUOTE',
+ 'TRIPLEDOUBLEQUOTE_R', 'TRIPLEDOUBLEQUOTE_U', 'PAGEBACKGROUND',
+ 'LINENUMBER', 'CODESTART', 'CODEEND', 'PY', 'TOKEN_NAMES', 'CSSHOOK',
+ 'null', 'mono', 'lite', 'dark','dark2', 'pythonwin','idle',
+ 'viewcvs', 'Usage', 'cli', 'str2stdout', 'path2stdout', 'Parser',
+ 'str2file', 'str2html', 'str2css', 'str2markup', 'path2file',
+ 'path2html', 'convert', 'walkdir', 'defaultColors', 'showpage',
+ 'pageconvert','tagreplace', 'MARKUPDICT']
+__title__ = 'PySourceColor'
+__version__ = "2.1a"
+__date__ = '25 April 2005'
+__author__ = "M.E.Farmer Jr."
+__credits__ = '''This was originally based on a python recipe
+submitted by Jrgen Hermann to ASPN. Now based on the voices in my head.
+M.E.Farmer 2004, 2005
+Python license
+'''
+import os
+import sys
+import time
+import glob
+import getopt
+import keyword
+import token
+import tokenize
+import traceback
+from six.moves import cStringIO as StringIO
+# Do not edit
+NAME = token.NAME
+NUMBER = token.NUMBER
+COMMENT = tokenize.COMMENT
+OPERATOR = token.OP
+ERRORTOKEN = token.ERRORTOKEN
+ARGS = token.NT_OFFSET + 1
+DOUBLECOMMENT = token.NT_OFFSET + 2
+CLASS_NAME = token.NT_OFFSET + 3
+DEF_NAME = token.NT_OFFSET + 4
+KEYWORD = token.NT_OFFSET + 5
+SINGLEQUOTE = token.NT_OFFSET + 6
+SINGLEQUOTE_R = token.NT_OFFSET + 7
+SINGLEQUOTE_U = token.NT_OFFSET + 8
+DOUBLEQUOTE = token.NT_OFFSET + 9
+DOUBLEQUOTE_R = token.NT_OFFSET + 10
+DOUBLEQUOTE_U = token.NT_OFFSET + 11
+TRIPLESINGLEQUOTE = token.NT_OFFSET + 12
+TRIPLESINGLEQUOTE_R = token.NT_OFFSET + 13
+TRIPLESINGLEQUOTE_U = token.NT_OFFSET + 14
+TRIPLEDOUBLEQUOTE = token.NT_OFFSET + 15
+TRIPLEDOUBLEQUOTE_R = token.NT_OFFSET + 16
+TRIPLEDOUBLEQUOTE_U = token.NT_OFFSET + 17
+PAGEBACKGROUND = token.NT_OFFSET + 18
+DECORATOR = token.NT_OFFSET + 19
+DECORATOR_NAME = token.NT_OFFSET + 20
+BRACKETS = token.NT_OFFSET + 21
+MATH_OPERATOR = token.NT_OFFSET + 22
+LINENUMBER = token.NT_OFFSET + 23
+TEXT = token.NT_OFFSET + 24
+PY = token.NT_OFFSET + 25
+CODESTART = token.NT_OFFSET + 26
+CODEEND = token.NT_OFFSET + 27
+CSSHOOK = token.NT_OFFSET + 28
+EXTRASPACE = token.NT_OFFSET + 29
+
+# markup classname lookup
+MARKUPDICT = {
+ ERRORTOKEN: 'py_err',
+ DECORATOR_NAME: 'py_decn',
+ DECORATOR: 'py_dec',
+ ARGS: 'py_args',
+ NAME: 'py_name',
+ NUMBER: 'py_num',
+ OPERATOR: 'py_op',
+ COMMENT: 'py_com',
+ DOUBLECOMMENT: 'py_dcom',
+ CLASS_NAME: 'py_clsn',
+ DEF_NAME: 'py_defn',
+ KEYWORD: 'py_key',
+ SINGLEQUOTE: 'py_sq',
+ SINGLEQUOTE_R: 'py_sqr',
+ SINGLEQUOTE_U: 'py_squ',
+ DOUBLEQUOTE: 'py_dq',
+ DOUBLEQUOTE_R: 'py_dqr',
+ DOUBLEQUOTE_U: 'py_dqu',
+ TRIPLESINGLEQUOTE: 'py_tsq',
+ TRIPLESINGLEQUOTE_R: 'py_tsqr',
+ TRIPLESINGLEQUOTE_U: 'py_tsqu',
+ TRIPLEDOUBLEQUOTE: 'py_tdq',
+ TRIPLEDOUBLEQUOTE_R: 'py_tdqr',
+ TRIPLEDOUBLEQUOTE_U: 'py_tdqu',
+ BRACKETS: 'py_bra',
+ MATH_OPERATOR: 'py_mop',
+ LINENUMBER: 'py_lnum',
+ TEXT: 'py_text',
+ }
+# might help users that want to create custom schemes
+TOKEN_NAMES= {
+ ERRORTOKEN:'ERRORTOKEN',
+ DECORATOR_NAME:'DECORATOR_NAME',
+ DECORATOR:'DECORATOR',
+ ARGS:'ARGS',
+ NAME:'NAME',
+ NUMBER:'NUMBER',
+ OPERATOR:'OPERATOR',
+ COMMENT:'COMMENT',
+ DOUBLECOMMENT:'DOUBLECOMMENT',
+ CLASS_NAME:'CLASS_NAME',
+ DEF_NAME:'DEF_NAME',
+ KEYWORD:'KEYWORD',
+ SINGLEQUOTE:'SINGLEQUOTE',
+ SINGLEQUOTE_R:'SINGLEQUOTE_R',
+ SINGLEQUOTE_U:'SINGLEQUOTE_U',
+ DOUBLEQUOTE:'DOUBLEQUOTE',
+ DOUBLEQUOTE_R:'DOUBLEQUOTE_R',
+ DOUBLEQUOTE_U:'DOUBLEQUOTE_U',
+ TRIPLESINGLEQUOTE:'TRIPLESINGLEQUOTE',
+ TRIPLESINGLEQUOTE_R:'TRIPLESINGLEQUOTE_R',
+ TRIPLESINGLEQUOTE_U:'TRIPLESINGLEQUOTE_U',
+ TRIPLEDOUBLEQUOTE:'TRIPLEDOUBLEQUOTE',
+ TRIPLEDOUBLEQUOTE_R:'TRIPLEDOUBLEQUOTE_R',
+ TRIPLEDOUBLEQUOTE_U:'TRIPLEDOUBLEQUOTE_U',
+ BRACKETS:'BRACKETS',
+ MATH_OPERATOR:'MATH_OPERATOR',
+ LINENUMBER:'LINENUMBER',
+ TEXT:'TEXT',
+ PAGEBACKGROUND:'PAGEBACKGROUND',
+ }
+
+######################################################################
+# Edit colors and styles to taste
+# Create your own scheme, just copy one below , rename and edit.
+# Custom styles must at least define NAME, ERRORTOKEN, PAGEBACKGROUND,
+# all missing elements will default to NAME.
+# See module docstring for details on style attributes.
+######################################################################
+# Copy null and use it as a starter colorscheme.
+null = {# tokentype: ('tags border_color', 'textforecolor', 'textbackcolor')
+ ERRORTOKEN: ('','#000000',''),# Error token
+ DECORATOR_NAME: ('','#000000',''),# Decorator name
+ DECORATOR: ('','#000000',''),# @ symbol
+ ARGS: ('','#000000',''),# class,def,deco arguments
+ NAME: ('','#000000',''),# All other python text
+ NUMBER: ('','#000000',''),# 0->10
+ OPERATOR: ('','#000000',''),# ':','<=',';',',','.','==', etc
+ MATH_OPERATOR: ('','#000000',''),# '+','-','=','','**',etc
+ BRACKETS: ('','#000000',''),# '[',']','(',')','{','}'
+ COMMENT: ('','#000000',''),# Single comment
+ DOUBLECOMMENT: ('','#000000',''),## Double comment
+ CLASS_NAME: ('','#000000',''),# Class name
+ DEF_NAME: ('','#000000',''),# Def name
+ KEYWORD: ('','#000000',''),# Python keywords
+ SINGLEQUOTE: ('','#000000',''),# 'SINGLEQUOTE'
+ SINGLEQUOTE_R: ('','#000000',''),# r'SINGLEQUOTE'
+ SINGLEQUOTE_U: ('','#000000',''),# u'SINGLEQUOTE'
+ DOUBLEQUOTE: ('','#000000',''),# "DOUBLEQUOTE"
+ DOUBLEQUOTE_R: ('','#000000',''),# r"DOUBLEQUOTE"
+ DOUBLEQUOTE_U: ('','#000000',''),# u"DOUBLEQUOTE"
+ TRIPLESINGLEQUOTE: ('','#000000',''),# '''TRIPLESINGLEQUOTE'''
+ TRIPLESINGLEQUOTE_R: ('','#000000',''),# r'''TRIPLESINGLEQUOTE'''
+ TRIPLESINGLEQUOTE_U: ('','#000000',''),# u'''TRIPLESINGLEQUOTE'''
+ TRIPLEDOUBLEQUOTE: ('','#000000',''),# """TRIPLEDOUBLEQUOTE"""
+ TRIPLEDOUBLEQUOTE_R: ('','#000000',''),# r"""TRIPLEDOUBLEQUOTE"""
+ TRIPLEDOUBLEQUOTE_U: ('','#000000',''),# u"""TRIPLEDOUBLEQUOTE"""
+ TEXT: ('','#000000',''),# non python text
+ LINENUMBER: ('>ti#555555','#000000',''),# Linenumbers
+ PAGEBACKGROUND: '#FFFFFF'# set the page background
+ }
+
+mono = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('bu','#000000',''),
+ DECORATOR: ('b','#000000',''),
+ ARGS: ('b','#555555',''),
+ NAME: ('','#000000',''),
+ NUMBER: ('b','#000000',''),
+ OPERATOR: ('b','#000000',''),
+ MATH_OPERATOR: ('b','#000000',''),
+ BRACKETS: ('b','#000000',''),
+ COMMENT: ('i','#999999',''),
+ DOUBLECOMMENT: ('b','#999999',''),
+ CLASS_NAME: ('bu','#000000',''),
+ DEF_NAME: ('b','#000000',''),
+ KEYWORD: ('b','#000000',''),
+ SINGLEQUOTE: ('','#000000',''),
+ SINGLEQUOTE_R: ('','#000000',''),
+ SINGLEQUOTE_U: ('','#000000',''),
+ DOUBLEQUOTE: ('','#000000',''),
+ DOUBLEQUOTE_R: ('','#000000',''),
+ DOUBLEQUOTE_U: ('','#000000',''),
+ TRIPLESINGLEQUOTE: ('','#000000',''),
+ TRIPLESINGLEQUOTE_R: ('','#000000',''),
+ TRIPLESINGLEQUOTE_U: ('','#000000',''),
+ TRIPLEDOUBLEQUOTE: ('i','#000000',''),
+ TRIPLEDOUBLEQUOTE_R: ('i','#000000',''),
+ TRIPLEDOUBLEQUOTE_U: ('i','#000000',''),
+ TEXT: ('','#000000',''),
+ LINENUMBER: ('>ti#555555','#000000',''),
+ PAGEBACKGROUND: '#FFFFFF'
+ }
+
+dark = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('b','#FFBBAA',''),
+ DECORATOR: ('b','#CC5511',''),
+ ARGS: ('b','#DDDDFF',''),
+ NAME: ('','#DDDDDD',''),
+ NUMBER: ('','#FF0000',''),
+ OPERATOR: ('b','#FAF785',''),
+ MATH_OPERATOR: ('b','#FAF785',''),
+ BRACKETS: ('b','#FAF785',''),
+ COMMENT: ('','#45FCA0',''),
+ DOUBLECOMMENT: ('i','#A7C7A9',''),
+ CLASS_NAME: ('b','#B666FD',''),
+ DEF_NAME: ('b','#EBAE5C',''),
+ KEYWORD: ('b','#8680FF',''),
+ SINGLEQUOTE: ('','#F8BAFE',''),
+ SINGLEQUOTE_R: ('','#F8BAFE',''),
+ SINGLEQUOTE_U: ('','#F8BAFE',''),
+ DOUBLEQUOTE: ('','#FF80C0',''),
+ DOUBLEQUOTE_R: ('','#FF80C0',''),
+ DOUBLEQUOTE_U: ('','#FF80C0',''),
+ TRIPLESINGLEQUOTE: ('','#FF9595',''),
+ TRIPLESINGLEQUOTE_R: ('','#FF9595',''),
+ TRIPLESINGLEQUOTE_U: ('','#FF9595',''),
+ TRIPLEDOUBLEQUOTE: ('','#B3FFFF',''),
+ TRIPLEDOUBLEQUOTE_R: ('','#B3FFFF',''),
+ TRIPLEDOUBLEQUOTE_U: ('','#B3FFFF',''),
+ TEXT: ('','#FFFFFF',''),
+ LINENUMBER: ('>mi#555555','#bbccbb','#333333'),
+ PAGEBACKGROUND: '#000000'
+ }
+
+dark2 = {
+ ERRORTOKEN: ('','#FF0000',''),
+ DECORATOR_NAME: ('b','#FFBBAA',''),
+ DECORATOR: ('b','#CC5511',''),
+ ARGS: ('b','#DDDDDD',''),
+ NAME: ('','#C0C0C0',''),
+ NUMBER: ('b','#00FF00',''),
+ OPERATOR: ('b','#FF090F',''),
+ MATH_OPERATOR: ('b','#EE7020',''),
+ BRACKETS: ('b','#FFB90F',''),
+ COMMENT: ('i','#D0D000','#522000'),#'#88AA88','#11111F'),
+ DOUBLECOMMENT: ('i','#D0D000','#522000'),#'#77BB77','#11111F'),
+ CLASS_NAME: ('b','#DD4080',''),
+ DEF_NAME: ('b','#FF8040',''),
+ KEYWORD: ('b','#4726d1',''),
+ SINGLEQUOTE: ('','#8080C0',''),
+ SINGLEQUOTE_R: ('','#8080C0',''),
+ SINGLEQUOTE_U: ('','#8080C0',''),
+ DOUBLEQUOTE: ('','#ADB9F1',''),
+ DOUBLEQUOTE_R: ('','#ADB9F1',''),
+ DOUBLEQUOTE_U: ('','#ADB9F1',''),
+ TRIPLESINGLEQUOTE: ('','#00C1C1',''),#A050C0
+ TRIPLESINGLEQUOTE_R: ('','#00C1C1',''),#A050C0
+ TRIPLESINGLEQUOTE_U: ('','#00C1C1',''),#A050C0
+ TRIPLEDOUBLEQUOTE: ('','#33E3E3',''),#B090E0
+ TRIPLEDOUBLEQUOTE_R: ('','#33E3E3',''),#B090E0
+ TRIPLEDOUBLEQUOTE_U: ('','#33E3E3',''),#B090E0
+ TEXT: ('','#C0C0C0',''),
+ LINENUMBER: ('>mi#555555','#bbccbb','#333333'),
+ PAGEBACKGROUND: '#000000'
+ }
+
+lite = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('b','#BB4422',''),
+ DECORATOR: ('b','#3333AF',''),
+ ARGS: ('b','#000000',''),
+ NAME: ('','#333333',''),
+ NUMBER: ('b','#DD2200',''),
+ OPERATOR: ('b','#000000',''),
+ MATH_OPERATOR: ('b','#000000',''),
+ BRACKETS: ('b','#000000',''),
+ COMMENT: ('','#007F00',''),
+ DOUBLECOMMENT: ('','#608060',''),
+ CLASS_NAME: ('b','#0000DF',''),
+ DEF_NAME: ('b','#9C7A00',''),#f09030
+ KEYWORD: ('b','#0000AF',''),
+ SINGLEQUOTE: ('','#600080',''),
+ SINGLEQUOTE_R: ('','#600080',''),
+ SINGLEQUOTE_U: ('','#600080',''),
+ DOUBLEQUOTE: ('','#A0008A',''),
+ DOUBLEQUOTE_R: ('','#A0008A',''),
+ DOUBLEQUOTE_U: ('','#A0008A',''),
+ TRIPLESINGLEQUOTE: ('','#337799',''),
+ TRIPLESINGLEQUOTE_R: ('','#337799',''),
+ TRIPLESINGLEQUOTE_U: ('','#337799',''),
+ TRIPLEDOUBLEQUOTE: ('','#1166AA',''),
+ TRIPLEDOUBLEQUOTE_R: ('','#1166AA',''),
+ TRIPLEDOUBLEQUOTE_U: ('','#1166AA',''),
+ TEXT: ('','#000000',''),
+ LINENUMBER: ('>ti#555555','#000000',''),
+ PAGEBACKGROUND: '#FFFFFF'
+ }
+
+idle = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('','#900090',''),
+ DECORATOR: ('','#FF7700',''),
+ NAME: ('','#000000',''),
+ NUMBER: ('','#000000',''),
+ OPERATOR: ('','#000000',''),
+ MATH_OPERATOR: ('','#000000',''),
+ BRACKETS: ('','#000000',''),
+ COMMENT: ('','#DD0000',''),
+ DOUBLECOMMENT: ('','#DD0000',''),
+ CLASS_NAME: ('','#0000FF',''),
+ DEF_NAME: ('','#0000FF',''),
+ KEYWORD: ('','#FF7700',''),
+ SINGLEQUOTE: ('','#00AA00',''),
+ SINGLEQUOTE_R: ('','#00AA00',''),
+ SINGLEQUOTE_U: ('','#00AA00',''),
+ DOUBLEQUOTE: ('','#00AA00',''),
+ DOUBLEQUOTE_R: ('','#00AA00',''),
+ DOUBLEQUOTE_U: ('','#00AA00',''),
+ TRIPLESINGLEQUOTE: ('','#00AA00',''),
+ TRIPLESINGLEQUOTE_R: ('','#00AA00',''),
+ TRIPLESINGLEQUOTE_U: ('','#00AA00',''),
+ TRIPLEDOUBLEQUOTE: ('','#00AA00',''),
+ TRIPLEDOUBLEQUOTE_R: ('','#00AA00',''),
+ TRIPLEDOUBLEQUOTE_U: ('','#00AA00',''),
+ TEXT: ('','#000000',''),
+ LINENUMBER: ('>ti#555555','#000000',''),
+ PAGEBACKGROUND: '#FFFFFF'
+ }
+
+pythonwin = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('b','#DD0080',''),
+ DECORATOR: ('b','#000080',''),
+ ARGS: ('','#000000',''),
+ NAME: ('','#303030',''),
+ NUMBER: ('','#008080',''),
+ OPERATOR: ('','#000000',''),
+ MATH_OPERATOR: ('','#000000',''),
+ BRACKETS: ('','#000000',''),
+ COMMENT: ('','#007F00',''),
+ DOUBLECOMMENT: ('','#7F7F7F',''),
+ CLASS_NAME: ('b','#0000FF',''),
+ DEF_NAME: ('b','#007F7F',''),
+ KEYWORD: ('b','#000080',''),
+ SINGLEQUOTE: ('','#808000',''),
+ SINGLEQUOTE_R: ('','#808000',''),
+ SINGLEQUOTE_U: ('','#808000',''),
+ DOUBLEQUOTE: ('','#808000',''),
+ DOUBLEQUOTE_R: ('','#808000',''),
+ DOUBLEQUOTE_U: ('','#808000',''),
+ TRIPLESINGLEQUOTE: ('','#808000',''),
+ TRIPLESINGLEQUOTE_R: ('','#808000',''),
+ TRIPLESINGLEQUOTE_U: ('','#808000',''),
+ TRIPLEDOUBLEQUOTE: ('','#808000',''),
+ TRIPLEDOUBLEQUOTE_R: ('','#808000',''),
+ TRIPLEDOUBLEQUOTE_U: ('','#808000',''),
+ TEXT: ('','#303030',''),
+ LINENUMBER: ('>ti#555555','#000000',''),
+ PAGEBACKGROUND: '#FFFFFF'
+ }
+
+viewcvs = {
+ ERRORTOKEN: ('s#FF0000','#FF8080',''),
+ DECORATOR_NAME: ('','#000000',''),
+ DECORATOR: ('','#000000',''),
+ ARGS: ('','#000000',''),
+ NAME: ('','#000000',''),
+ NUMBER: ('','#000000',''),
+ OPERATOR: ('','#000000',''),
+ MATH_OPERATOR: ('','#000000',''),
+ BRACKETS: ('','#000000',''),
+ COMMENT: ('i','#b22222',''),
+ DOUBLECOMMENT: ('i','#b22222',''),
+ CLASS_NAME: ('','#000000',''),
+ DEF_NAME: ('b','#0000ff',''),
+ KEYWORD: ('b','#a020f0',''),
+ SINGLEQUOTE: ('b','#bc8f8f',''),
+ SINGLEQUOTE_R: ('b','#bc8f8f',''),
+ SINGLEQUOTE_U: ('b','#bc8f8f',''),
+ DOUBLEQUOTE: ('b','#bc8f8f',''),
+ DOUBLEQUOTE_R: ('b','#bc8f8f',''),
+ DOUBLEQUOTE_U: ('b','#bc8f8f',''),
+ TRIPLESINGLEQUOTE: ('b','#bc8f8f',''),
+ TRIPLESINGLEQUOTE_R: ('b','#bc8f8f',''),
+ TRIPLESINGLEQUOTE_U: ('b','#bc8f8f',''),
+ TRIPLEDOUBLEQUOTE: ('b','#bc8f8f',''),
+ TRIPLEDOUBLEQUOTE_R: ('b','#bc8f8f',''),
+ TRIPLEDOUBLEQUOTE_U: ('b','#bc8f8f',''),
+ TEXT: ('','#000000',''),
+ LINENUMBER: ('>ti#555555','#000000',''),
+ PAGEBACKGROUND: '#FFFFFF'
+ }
+
+defaultColors = lite
+
+def Usage():
+ doc = """
+ -----------------------------------------------------------------------------
+ PySourceColor.py ver: %s
+ -----------------------------------------------------------------------------
+ Module summary:
+ This module is designed to colorize python source code.
+ Input--->python source
+ Output-->colorized (html, html4.01/css, xhtml1.0)
+ Standalone:
+ This module will work from the command line with options.
+ This module will work with redirected stdio.
+ Imported:
+ This module can be imported and used directly in your code.
+ -----------------------------------------------------------------------------
+ Command line options:
+ -h, --help
+ Optional-> Display this help message.
+ -t, --test
+ Optional-> Will ignore all others flags but --profile
+ test all schemes and markup combinations
+ -p, --profile
+ Optional-> Works only with --test or -t
+ runs profile.py and makes the test work in quiet mode.
+ -i, --in, --input
+ Optional-> If you give input on stdin.
+ Use any of these for the current dir (.,cwd)
+ Input can be file or dir.
+ Input from stdin use one of the following (-,stdin)
+ If stdin is used as input stdout is output unless specified.
+ -o, --out, --output
+ Optional-> output dir for the colorized source.
+ default: output dir is the input dir.
+ To output html to stdout use one of the following (-,stdout)
+ Stdout can be used without stdin if you give a file as input.
+ -c, --color
+ Optional-> null, mono, dark, dark2, lite, idle, pythonwin, viewcvs
+ default: dark
+ -s, --show
+ Optional-> Show page after creation.
+ default: no show
+ -m, --markup
+ Optional-> html, css, xhtml
+ css, xhtml also support external stylesheets (-e,--external)
+ default: HTML
+ -e, --external
+ Optional-> use with css, xhtml
+ Writes an style sheet instead of embedding it in the page
+ saves it as pystyle.css in the same directory.
+ html markup will silently ignore this flag.
+ -H, --header
+ Opional-> add a page header to the top of the output
+ -H
+ Builtin header (name,date,hrule)
+ --header
+ You must specify a filename.
+ The header file must be valid html
+ and must handle its own font colors.
+ ex. --header c:/tmp/header.txt
+ -F, --footer
+ Opional-> add a page footer to the bottom of the output
+ -F
+ Builtin footer (hrule,name,date)
+ --footer
+ You must specify a filename.
+ The footer file must be valid html
+ and must handle its own font colors.
+ ex. --footer c:/tmp/footer.txt
+ -l, --linenumbers
+ Optional-> default is no linenumbers
+ Adds line numbers to the start of each line in the code.
+ --convertpage
+ Given a webpage that has code embedded in tags it will
+ convert embedded code to colorized html.
+ (see pageconvert for details)
+ -----------------------------------------------------------------------------
+ Option usage:
+ # Test and show pages
+ python PySourceColor.py -t -s
+ # Test and only show profile results
+ python PySourceColor.py -t -p
+ # Colorize all .py,.pyw files in cwdir you can also use: (.,cwd)
+ python PySourceColor.py -i .
+ # Using long options w/ =
+ python PySourceColor.py --in=c:/myDir/my.py --color=lite --show
+ # Using short options w/out =
+ python PySourceColor.py -i c:/myDir/ -c idle -m css -e
+ # Using any mix
+ python PySourceColor.py --in . -o=c:/myDir --show
+ # Place a custom header on your files
+ python PySourceColor.py -i . -o c:/tmp -m xhtml --header c:/header.txt
+ -----------------------------------------------------------------------------
+ Stdio usage:
+ # Stdio using no options
+ python PySourceColor.py < c:/MyFile.py > c:/tmp/MyFile.html
+ # Using stdin alone automatically uses stdout for output: (stdin,-)
+ python PySourceColor.py -i- < c:/MyFile.py > c:/tmp/myfile.html
+ # Stdout can also be written to directly from a file instead of stdin
+ python PySourceColor.py -i c:/MyFile.py -m css -o- > c:/tmp/myfile.html
+ # Stdin can be used as input , but output can still be specified
+ python PySourceColor.py -i- -o c:/pydoc.py.html -s < c:/Python22/my.py
+ _____________________________________________________________________________
+ """
+ print(doc % (__version__))
+ sys.exit(1)
+
+###################################################### Command line interface
+
+def cli():
+ """Handle command line args and redirections"""
+ try:
+ # try to get command line args
+ opts, args = getopt.getopt(sys.argv[1:],
+ "hseqtplHFi:o:c:m:h:f:",["help", "show", "quiet",
+ "test", "external", "linenumbers", "convertpage", "profile",
+ "input=", "output=", "color=", "markup=","header=", "footer="])
+ except getopt.GetoptError:
+ # on error print help information and exit:
+ Usage()
+ # init some names
+ input = None
+ output = None
+ colorscheme = None
+ markup = 'html'
+ header = None
+ footer = None
+ linenumbers = 0
+ show = 0
+ quiet = 0
+ test = 0
+ profile = 0
+ convertpage = 0
+ form = None
+ # if we have args then process them
+ for o, a in opts:
+ if o in ["-h", "--help"]:
+ Usage()
+ sys.exit()
+ if o in ["-o", "--output", "--out"]:
+ output = a
+ if o in ["-i", "--input", "--in"]:
+ input = a
+ if input in [".", "cwd"]:
+ input = os.getcwd()
+ if o in ["-s", "--show"]:
+ show = 1
+ if o in ["-q", "--quiet"]:
+ quiet = 1
+ if o in ["-t", "--test"]:
+ test = 1
+ if o in ["--convertpage"]:
+ convertpage = 1
+ if o in ["-p", "--profile"]:
+ profile = 1
+ if o in ["-e", "--external"]:
+ form = 'external'
+ if o in ["-m", "--markup"]:
+ markup = str(a)
+ if o in ["-l", "--linenumbers"]:
+ linenumbers = 1
+ if o in ["--header"]:
+ header = str(a)
+ elif o == "-H":
+ header = ''
+ if o in ["--footer"]:
+ footer = str(a)
+ elif o == "-F":
+ footer = ''
+ if o in ["-c", "--color"]:
+ try:
+ colorscheme = globals().get(a.lower())
+ except:
+ traceback.print_exc()
+ Usage()
+ if test:
+ if profile:
+ import profile
+ profile.run('_test(show=%s, quiet=%s)'%(show,quiet))
+ else:
+ # Parse this script in every possible colorscheme and markup
+ _test(show,quiet)
+ elif input in [None, "-", "stdin"] or output in ["-", "stdout"]:
+ # determine if we are going to use stdio
+ if input not in [None, "-", "stdin"]:
+ if os.path.isfile(input) :
+ path2stdout(input, colors=colorscheme, markup=markup,
+ linenumbers=linenumbers, header=header,
+ footer=footer, form=form)
+ else:
+ raise PathError('File does not exists!')
+ else:
+ try:
+ if sys.stdin.isatty():
+ raise InputError('Please check input!')
+ else:
+ if output in [None,"-","stdout"]:
+ str2stdout(sys.stdin.read(), colors=colorscheme,
+ markup=markup, header=header,
+ footer=footer, linenumbers=linenumbers,
+ form=form)
+ else:
+ str2file(sys.stdin.read(), outfile=output, show=show,
+ markup=markup, header=header, footer=footer,
+ linenumbers=linenumbers, form=form)
+ except:
+ traceback.print_exc()
+ Usage()
+ else:
+ if os.path.exists(input):
+ if convertpage:
+ # if there was at least an input given we can proceed
+ pageconvert(input, out=output, colors=colorscheme,
+ show=show, markup=markup,linenumbers=linenumbers)
+ else:
+ # if there was at least an input given we can proceed
+ convert(source=input, outdir=output, colors=colorscheme,
+ show=show, markup=markup, quiet=quiet, header=header,
+ footer=footer, linenumbers=linenumbers, form=form)
+ else:
+ raise PathError('File does not exists!')
+ Usage()
+
+######################################################### Simple markup tests
+
+def _test(show=0, quiet=0):
+ """Test the parser and most of the functions.
+
+ There are 19 test total(eight colorschemes in three diffrent markups,
+ and a str2file test. Most functions are tested by this.
+ """
+ fi = sys.argv[0]
+ if not fi.endswith('.exe'):# Do not test if frozen as an archive
+ # this is a collection of test, most things are covered.
+ path2file(fi, '/tmp/null.html', null, show=show, quiet=quiet)
+ path2file(fi, '/tmp/null_css.html', null, show=show,
+ markup='css', quiet=quiet)
+ path2file(fi, '/tmp/mono.html', mono, show=show, quiet=quiet)
+ path2file(fi, '/tmp/mono_css.html', mono, show=show,
+ markup='css', quiet=quiet)
+ path2file(fi, '/tmp/lite.html', lite, show=show, quiet=quiet)
+ path2file(fi, '/tmp/lite_css.html', lite, show=show,
+ markup='css', quiet=quiet, header='', footer='',
+ linenumbers=1)
+ path2file(fi, '/tmp/lite_xhtml.html', lite, show=show,
+ markup='xhtml', quiet=quiet)
+ path2file(fi, '/tmp/dark.html', dark, show=show, quiet=quiet)
+ path2file(fi, '/tmp/dark_css.html', dark, show=show,
+ markup='css', quiet=quiet, linenumbers=1)
+ path2file(fi, '/tmp/dark2.html', dark2, show=show, quiet=quiet)
+ path2file(fi, '/tmp/dark2_css.html', dark2, show=show,
+ markup='css', quiet=quiet)
+ path2file(fi, '/tmp/dark2_xhtml.html', dark2, show=show,
+ markup='xhtml', quiet=quiet, header='', footer='',
+ linenumbers=1, form='external')
+ path2file(fi, '/tmp/idle.html', idle, show=show, quiet=quiet)
+ path2file(fi, '/tmp/idle_css.html', idle, show=show,
+ markup='css', quiet=quiet)
+ path2file(fi, '/tmp/viewcvs.html', viewcvs, show=show,
+ quiet=quiet, linenumbers=1)
+ path2file(fi, '/tmp/viewcvs_css.html', viewcvs, show=show,
+ markup='css', linenumbers=1, quiet=quiet)
+ path2file(fi, '/tmp/pythonwin.html', pythonwin, show=show,
+ quiet=quiet)
+ path2file(fi, '/tmp/pythonwin_css.html', pythonwin, show=show,
+ markup='css', quiet=quiet)
+ teststr=r'''"""This is a test of decorators and other things"""
+# This should be line 421...
+@whatever(arg,arg2)
+@A @B(arghh) @C
+def LlamaSaysNi(arg='Ni!',arg2="RALPH"):
+ """This docstring is deeply disturbed by all the llama references"""
+ print('%s The Wonder Llama says %s'% (arg2,arg))
+# So I was like duh!, and he was like ya know?!,
+# and so we were both like huh...wtf!? RTFM!! LOL!!;)
+@staticmethod## Double comments are KewL.
+def LlamasRLumpy():
+ """This docstring is too sexy to be here.
+ """
+ u"""
+=============================
+A Mse once bit my sister...
+=============================
+ """
+ ## Relax, this won't hurt a bit, just a simple, painless procedure,
+ ## hold still while I get the anesthetizing hammer.
+ m = {'three':'1','won':'2','too':'3'}
+ o = r'fishy\fishy\fishy/fish\oh/where/is\my/little\..'
+ python = uR"""
+ No realli! She was Karving her initials n the mse with the sharpened end
+ of an interspace tthbrush given her by Svenge - her brother-in-law -an Oslo
+ dentist and star of many Norwegian mvies: "The Ht Hands of an Oslo
+ Dentist", "Fillings of Passion", "The Huge Mlars of Horst Nordfink"..."""
+ RU"""142 MEXICAN WHOOPING LLAMAS"""#<-Can you fit 142 llamas in a red box?
+ n = u' HERMSGERVRDENBRTBRDA ' + """ YUTTE """
+ t = """SAMALLNIATNUOMNAIRODAUCE"""+"DENIARTYLLAICEPS04"
+ ## We apologise for the fault in the
+ ## comments. Those responsible have been
+ ## sacked.
+ y = '14 NORTH CHILEAN GUANACOS \
+(CLOSELY RELATED TO THE LLAMA)'
+ rules = [0,1,2,3,4,5]
+ print y'''
+ htmlPath = os.path.abspath('/tmp/strtest_lines.html')
+ str2file(teststr, htmlPath, colors=dark, markup='xhtml',
+ linenumbers=420, show=show)
+ _printinfo(" wrote %s" % htmlPath, quiet)
+ htmlPath = os.path.abspath('/tmp/strtest_nolines.html')
+ str2file(teststr, htmlPath, colors=dark, markup='xhtml',
+ show=show)
+ _printinfo(" wrote %s" % htmlPath, quiet)
+ else:
+ Usage()
+ return
+
+# emacs wants this: '
+
+####################################################### User funtctions
+
+def str2stdout(sourcestring, colors=None, title='', markup='html',
+ header=None, footer=None,
+ linenumbers=0, form=None):
+ """Converts a code(string) to colorized HTML. Writes to stdout.
+
+ form='code',or'snip' (for "<pre>yourcode</pre>" only)
+ colors=null,mono,lite,dark,dark2,idle,or pythonwin
+ """
+ Parser(sourcestring, colors=colors, title=title, markup=markup,
+ header=header, footer=footer,
+ linenumbers=linenumbers).format(form)
+
+def path2stdout(sourcepath, title='', colors=None, markup='html',
+ header=None, footer=None,
+ linenumbers=0, form=None):
+ """Converts code(file) to colorized HTML. Writes to stdout.
+
+ form='code',or'snip' (for "<pre>yourcode</pre>" only)
+ colors=null,mono,lite,dark,dark2,idle,or pythonwin
+ """
+ sourcestring = open(sourcepath).read()
+ Parser(sourcestring, colors=colors, title=sourcepath,
+ markup=markup, header=header, footer=footer,
+ linenumbers=linenumbers).format(form)
+
+def str2html(sourcestring, colors=None, title='',
+ markup='html', header=None, footer=None,
+ linenumbers=0, form=None):
+ """Converts a code(string) to colorized HTML. Returns an HTML string.
+
+ form='code',or'snip' (for "<pre>yourcode</pre>" only)
+ colors=null,mono,lite,dark,dark2,idle,or pythonwin
+ """
+ stringIO = StringIO.StringIO()
+ Parser(sourcestring, colors=colors, title=title, out=stringIO,
+ markup=markup, header=header, footer=footer,
+ linenumbers=linenumbers).format(form)
+ stringIO.seek(0)
+ return stringIO.read()
+
+def str2css(sourcestring, colors=None, title='',
+ markup='css', header=None, footer=None,
+ linenumbers=0, form=None):
+ """Converts a code string to colorized CSS/HTML. Returns CSS/HTML string
+
+ If form != None then this will return (stylesheet_str, code_str)
+ colors=null,mono,lite,dark,dark2,idle,or pythonwin
+ """
+ if markup.lower() not in ['css' ,'xhtml']:
+ markup = 'css'
+ stringIO = StringIO.StringIO()
+ parse = Parser(sourcestring, colors=colors, title=title,
+ out=stringIO, markup=markup,
+ header=header, footer=footer,
+ linenumbers=linenumbers)
+ parse.format(form)
+ stringIO.seek(0)
+ if form != None:
+ return parse._sendCSSStyle(external=1), stringIO.read()
+ else:
+ return None, stringIO.read()
+
+def str2markup(sourcestring, colors=None, title = '',
+ markup='xhtml', header=None, footer=None,
+ linenumbers=0, form=None):
+ """ Convert code strings into ([stylesheet or None], colorized string) """
+ if markup.lower() == 'html':
+ return None, str2html(sourcestring, colors=colors, title=title,
+ header=header, footer=footer, markup=markup,
+ linenumbers=linenumbers, form=form)
+ else:
+ return str2css(sourcestring, colors=colors, title=title,
+ header=header, footer=footer, markup=markup,
+ linenumbers=linenumbers, form=form)
+
+def str2file(sourcestring, outfile, colors=None, title='',
+ markup='html', header=None, footer=None,
+ linenumbers=0, show=0, dosheet=1, form=None):
+ """Converts a code string to a file.
+
+ makes no attempt at correcting bad pathnames
+ """
+ css , html = str2markup(sourcestring, colors=colors, title='',
+ markup=markup, header=header, footer=footer,
+ linenumbers=linenumbers, form=form)
+ # write html
+ f = open(outfile,'wt')
+ f.writelines(html)
+ f.close()
+ #write css
+ if css != None and dosheet:
+ dir = os.path.dirname(outfile)
+ outcss = os.path.join(dir,'pystyle.css')
+ f = open(outcss,'wt')
+ f.writelines(css)
+ f.close()
+ if show:
+ showpage(outfile)
+
+def path2html(sourcepath, colors=None, markup='html',
+ header=None, footer=None,
+ linenumbers=0, form=None):
+ """Converts code(file) to colorized HTML. Returns an HTML string.
+
+ form='code',or'snip' (for "<pre>yourcode</pre>" only)
+ colors=null,mono,lite,dark,dark2,idle,or pythonwin
+ """
+ stringIO = StringIO.StringIO()
+ sourcestring = open(sourcepath).read()
+ Parser(sourcestring, colors, title=sourcepath, out=stringIO,
+ markup=markup, header=header, footer=footer,
+ linenumbers=linenumbers).format(form)
+ stringIO.seek(0)
+ return stringIO.read()
+
+def convert(source, outdir=None, colors=None,
+ show=0, markup='html', quiet=0,
+ header=None, footer=None, linenumbers=0, form=None):
+ """Takes a file or dir as input and places the html in the outdir.
+
+ If outdir is none it defaults to the input dir
+ """
+ count=0
+ # If it is a filename then path2file
+ if not os.path.isdir(source):
+ if os.path.isfile(source):
+ count+=1
+ path2file(source, outdir, colors, show, markup,
+ quiet, form, header, footer, linenumbers, count)
+ else:
+ raise PathError('File does not exist!')
+ # If we pass in a dir we need to walkdir for files.
+ # Then we need to colorize them with path2file
+ else:
+ fileList = walkdir(source)
+ if fileList != None:
+ # make sure outdir is a dir
+ if outdir != None:
+ if os.path.splitext(outdir)[1] != '':
+ outdir = os.path.split(outdir)[0]
+ for item in fileList:
+ count+=1
+ path2file(item, outdir, colors, show, markup,
+ quiet, form, header, footer, linenumbers, count)
+ _printinfo('Completed colorizing %s files.'%str(count), quiet)
+ else:
+ _printinfo("No files to convert in dir.", quiet)
+
+def path2file(sourcePath, out=None, colors=None, show=0,
+ markup='html', quiet=0, form=None,
+ header=None, footer=None, linenumbers=0, count=1):
+ """ Converts python source to html file"""
+ # If no outdir is given we use the sourcePath
+ if out == None:#this is a guess
+ htmlPath = sourcePath + '.html'
+ else:
+ # If we do give an out_dir, and it does
+ # not exist , it will be created.
+ if os.path.splitext(out)[1] == '':
+ if not os.path.isdir(out):
+ os.makedirs(out)
+ sourceName = os.path.basename(sourcePath)
+ htmlPath = os.path.join(out,sourceName)+'.html'
+ # If we do give an out_name, and its dir does
+ # not exist , it will be created.
+ else:
+ outdir = os.path.split(out)[0]
+ if not os.path.isdir(outdir):
+ os.makedirs(outdir)
+ htmlPath = out
+ htmlPath = os.path.abspath(htmlPath)
+ # Open the text and do the parsing.
+ source = open(sourcePath).read()
+ parse = Parser(source, colors, sourcePath, open(htmlPath, 'wt'),
+ markup, header, footer, linenumbers)
+ parse.format(form)
+ _printinfo(" wrote %s" % htmlPath, quiet)
+ # html markup will ignore the external flag, but
+ # we need to stop the blank file from being written.
+ if form == 'external' and count == 1 and markup != 'html':
+ cssSheet = parse._sendCSSStyle(external=1)
+ cssPath = os.path.join(os.path.dirname(htmlPath),'pystyle.css')
+ css = open(cssPath, 'wt')
+ css.write(cssSheet)
+ css.close()
+ _printinfo(" wrote %s" % cssPath, quiet)
+ if show:
+ # load HTML page into the default web browser.
+ showpage(htmlPath)
+ return htmlPath
+
+def tagreplace(sourcestr, colors=lite, markup='xhtml',
+ linenumbers=0, dosheet=1, tagstart='<PY>'.lower(),
+ tagend='</PY>'.lower(), stylesheet='pystyle.css'):
+ """This is a helper function for pageconvert. Returns css, page.
+ """
+ if markup.lower() != 'html':
+ link = '<link rel="stylesheet" href="%s" type="text/css"/></head>'
+ css = link%stylesheet
+ if sourcestr.find(css) == -1:
+ sourcestr = sourcestr.replace('</head>', css, 1)
+ starttags = sourcestr.count(tagstart)
+ endtags = sourcestr.count(tagend)
+ if starttags:
+ if starttags == endtags:
+ for _ in range(starttags):
+ datastart = sourcestr.find(tagstart)
+ dataend = sourcestr.find(tagend)
+ data = sourcestr[datastart+len(tagstart):dataend]
+ data = unescape(data)
+ css , data = str2markup(data, colors=colors,
+ linenumbers=linenumbers, markup=markup, form='embed')
+ start = sourcestr[:datastart]
+ end = sourcestr[dataend+len(tagend):]
+ sourcestr = ''.join([start,data,end])
+ else:
+ raise InputError('Tag mismatch!\nCheck %s,%s tags'%tagstart,tagend)
+ if not dosheet:
+ css = None
+ return css, sourcestr
+
+def pageconvert(path, out=None, colors=lite, markup='xhtml', linenumbers=0,
+ dosheet=1, tagstart='<PY>'.lower(), tagend='</PY>'.lower(),
+ stylesheet='pystyle', show=1, returnstr=0):
+ """This function can colorize Python source
+
+ that is written in a webpage enclosed in tags.
+ """
+ if out == None:
+ out = os.path.dirname(path)
+ infile = open(path, 'r').read()
+ css,page = tagreplace(sourcestr=infile,colors=colors,
+ markup=markup, linenumbers=linenumbers, dosheet=dosheet,
+ tagstart=tagstart, tagend=tagend, stylesheet=stylesheet)
+ if not returnstr:
+ newpath = os.path.abspath(os.path.join(
+ out,'tmp', os.path.basename(path)))
+ if not os.path.exists(newpath):
+ try:
+ os.makedirs(os.path.dirname(newpath))
+ except:
+ pass#traceback.print_exc()
+ #Usage()
+ y = open(newpath, 'w')
+ y.write(page)
+ y.close()
+ if css:
+ csspath = os.path.abspath(os.path.join(
+ out,'tmp','%s.css'%stylesheet))
+ x = open(csspath,'w')
+ x.write(css)
+ x.close()
+ if show:
+ try:
+ os.startfile(newpath)
+ except:
+ traceback.print_exc()
+ return newpath
+ else:
+ return css, page
+
+##################################################################### helpers
+
+def walkdir(dir):
+ """Return a list of .py and .pyw files from a given directory.
+
+ This function can be written as a generator Python 2.3, or a genexp
+ in Python 2.4. But 2.2 and 2.1 would be left out....
+ """
+ # Get a list of files that match *.py*
+ GLOB_PATTERN = os.path.join(dir, "*.[p][y]*")
+ pathlist = glob.glob(GLOB_PATTERN)
+ # Now filter out all but py and pyw
+ filterlist = [x for x in pathlist
+ if x.endswith('.py')
+ or x.endswith('.pyw')]
+ if filterlist != []:
+ # if we have a list send it
+ return filterlist
+ else:
+ return None
+
+def showpage(path):
+ """Helper function to open webpages"""
+ try:
+ import webbrowser
+ webbrowser.open_new(os.path.abspath(path))
+ except:
+ traceback.print_exc()
+
+def _printinfo(message, quiet):
+ """Helper to print messages"""
+ if not quiet:
+ print(message)
+
+def escape(text):
+ """escape text for html. similar to cgi.escape"""
+ text = text.replace("&", "&amp;")
+ text = text.replace("<", "&lt;")
+ text = text.replace(">", "&gt;")
+ return text
+
+def unescape(text):
+ """unsecape escaped text"""
+ text = text.replace("&quot;", '"')
+ text = text.replace("&gt;", ">")
+ text = text.replace("&lt;", "<")
+ text = text.replace("&amp;", "&")
+ return text
+
+########################################################### Custom Exceptions
+
+class PySourceColorError(Exception):
+ # Base for custom errors
+ def __init__(self, msg=''):
+ self._msg = msg
+ Exception.__init__(self, msg)
+ def __repr__(self):
+ return self._msg
+ __str__ = __repr__
+
+class PathError(PySourceColorError):
+ def __init__(self, msg):
+ PySourceColorError.__init__(self,
+ 'Path error! : %s'% msg)
+
+class InputError(PySourceColorError):
+ def __init__(self, msg):
+ PySourceColorError.__init__(self,
+ 'Input error! : %s'% msg)
+
+########################################################## Python code parser
+
+class Parser(object):
+
+ """MoinMoin python parser heavily chopped :)"""
+
+ def __init__(self, raw, colors=None, title='', out=sys.stdout,
+ markup='html', header=None, footer=None, linenumbers=0):
+ """Store the source text & set some flags"""
+ if colors == None:
+ colors = defaultColors
+ self.raw = raw.expandtabs().rstrip()
+ self.title = os.path.basename(title)
+ self.out = out
+ self.line = ''
+ self.lasttext = ''
+ self.argFlag = 0
+ self.classFlag = 0
+ self.defFlag = 0
+ self.decoratorFlag = 0
+ self.external = 0
+ self.markup = markup.upper()
+ self.colors = colors
+ self.header = header
+ self.footer = footer
+ self.doArgs = 1 # overrides the new tokens
+ self.doNames = 1 # overrides the new tokens
+ self.doMathOps = 1 # overrides the new tokens
+ self.doBrackets = 1 # overrides the new tokens
+ self.doURL = 1 # override url conversion
+ self.LINENUMHOLDER = "___line___".upper()
+ self.LINESTART = "___start___".upper()
+ self.skip = 0
+ # add space left side of code for padding.Override in color dict.
+ self.extraspace = self.colors.get(EXTRASPACE, '')
+ # Linenumbers less then zero also have numberlinks
+ self.dolinenums = self.linenum = abs(linenumbers)
+ if linenumbers < 0:
+ self.numberlinks = 1
+ else:
+ self.numberlinks = 0
+
+ def format(self, form=None):
+ """Parse and send the colorized source"""
+ if form in ('snip','code'):
+ self.addEnds = 0
+ elif form == 'embed':
+ self.addEnds = 0
+ self.external = 1
+ else:
+ if form == 'external':
+ self.external = 1
+ self.addEnds = 1
+
+ # Store line offsets in self.lines
+ self.lines = [0, 0]
+ pos = 0
+
+ # Add linenumbers
+ if self.dolinenums:
+ start=self.LINENUMHOLDER+' '+self.extraspace
+ else:
+ start=''+self.extraspace
+ newlines = []
+ lines = self.raw.splitlines(0)
+ for l in lines:
+ # span and div escape for customizing and embedding raw text
+ if (l.startswith('#$#')
+ or l.startswith('#%#')
+ or l.startswith('#@#')):
+ newlines.append(l)
+ else:
+ # kludge for line spans in css,xhtml
+ if self.markup in ['XHTML','CSS']:
+ newlines.append(self.LINESTART+' '+start+l)
+ else:
+ newlines.append(start+l)
+ self.raw = "\n".join(newlines)+'\n'# plus an extra newline at the end
+
+ # Gather lines
+ while 1:
+ pos = self.raw.find('\n', pos) + 1
+ if not pos: break
+ self.lines.append(pos)
+ self.lines.append(len(self.raw))
+
+ # Wrap text in a filelike object
+ self.pos = 0
+ text = StringIO.StringIO(self.raw)
+
+ # Markup start
+ if self.addEnds:
+ self._doPageStart()
+ else:
+ self._doSnippetStart()
+
+ ## Tokenize calls the __call__
+ ## function for each token till done.
+ # Parse the source and write out the results.
+ try:
+ tokenize.tokenize(text.readline, self)
+ except tokenize.TokenError as ex:
+ msg = ex[0]
+ line = ex[1][0]
+ self.out.write("<h3>ERROR: %s</h3>%s\n"%
+ (msg, self.raw[self.lines[line]:]))
+ #traceback.print_exc()
+
+ # Markup end
+ if self.addEnds:
+ self._doPageEnd()
+ else:
+ self._doSnippetEnd()
+
+ def __call__(self, toktype, toktext, srow_col, erow_col, line):
+ """Token handler. Order is important do not rearrange."""
+ self.line = line
+ srow, scol = srow_col
+ erow, ecol = erow_col
+ # Calculate new positions
+ oldpos = self.pos
+ newpos = self.lines[srow] + scol
+ self.pos = newpos + len(toktext)
+ # Handle newlines
+ if toktype in (token.NEWLINE, tokenize.NL):
+ self.decoratorFlag = self.argFlag = 0
+ # kludge for line spans in css,xhtml
+ if self.markup in ['XHTML','CSS']:
+ self.out.write('</span>')
+ self.out.write('\n')
+ return
+
+ # Send the original whitespace, and tokenize backslashes if present.
+ # Tokenizer.py just sends continued line backslashes with whitespace.
+ # This is a hack to tokenize continued line slashes as operators.
+ # Should continued line backslashes be treated as operators
+ # or some other token?
+
+ if newpos > oldpos:
+ if self.raw[oldpos:newpos].isspace():
+ # consume a single space after linestarts and linenumbers
+ # had to have them so tokenizer could seperate them.
+ # multiline strings are handled by do_Text functions
+ if self.lasttext != self.LINESTART \
+ and self.lasttext != self.LINENUMHOLDER:
+ self.out.write(self.raw[oldpos:newpos])
+ else:
+ self.out.write(self.raw[oldpos+1:newpos])
+ else:
+ slash = self.raw[oldpos:newpos].find('\\')+oldpos
+ self.out.write(self.raw[oldpos:slash])
+ getattr(self, '_send%sText'%(self.markup))(OPERATOR, '\\')
+ self.linenum+=1
+ # kludge for line spans in css,xhtml
+ if self.markup in ['XHTML','CSS']:
+ self.out.write('</span>')
+ self.out.write(self.raw[slash+1:newpos])
+
+ # Skip indenting tokens
+ if toktype in (token.INDENT, token.DEDENT):
+ self.pos = newpos
+ return
+
+ # Look for operators
+ if token.LPAR <= toktype and toktype <= token.OP:
+ # Trap decorators py2.4 >
+ if toktext == '@':
+ toktype = DECORATOR
+ # Set a flag if this was the decorator start so
+ # the decorator name and arguments can be identified
+ self.decoratorFlag = self.argFlag = 1
+ else:
+ if self.doArgs:
+ # Find the start for arguments
+ if toktext == '(' and self.argFlag:
+ self.argFlag = 2
+ # Find the end for arguments
+ elif toktext == ':':
+ self.argFlag = 0
+ ## Seperate the diffrent operator types
+ # Brackets
+ if self.doBrackets and toktext in ['[',']','(',')','{','}']:
+ toktype = BRACKETS
+ # Math operators
+ elif self.doMathOps and toktext in ['*=','**=','-=','+=','|=',
+ '%=','>>=','<<=','=','^=',
+ '/=', '+','-','**','*','/','%']:
+ toktype = MATH_OPERATOR
+ # Operator
+ else:
+ toktype = OPERATOR
+ # example how flags should work.
+ # def fun(arg=argvalue,arg2=argvalue2):
+ # 0 1 2 A 1 N 2 A 1 N 0
+ if toktext == "=" and self.argFlag == 2:
+ self.argFlag = 1
+ elif toktext == "," and self.argFlag == 1:
+ self.argFlag = 2
+ # Look for keywords
+ elif toktype == NAME and keyword.iskeyword(toktext):
+ toktype = KEYWORD
+ # Set a flag if this was the class / def start so
+ # the class / def name and arguments can be identified
+ if toktext in ['class', 'def']:
+ if toktext =='class' and \
+ not line[:line.find('class')].endswith('.'):
+ self.classFlag = self.argFlag = 1
+ elif toktext == 'def' and \
+ not line[:line.find('def')].endswith('.'):
+ self.defFlag = self.argFlag = 1
+ else:
+ # must have used a keyword as a name i.e. self.class
+ toktype = ERRORTOKEN
+
+ # Look for class, def, decorator name
+ elif (self.classFlag or self.defFlag or self.decoratorFlag) \
+ and self.doNames:
+ if self.classFlag:
+ self.classFlag = 0
+ toktype = CLASS_NAME
+ elif self.defFlag:
+ self.defFlag = 0
+ toktype = DEF_NAME
+ elif self.decoratorFlag:
+ self.decoratorFlag = 0
+ toktype = DECORATOR_NAME
+
+ # Look for strings
+ # Order of evaluation is important do not change.
+ elif toktype == token.STRING:
+ text = toktext.lower()
+ # TRIPLE DOUBLE QUOTE's
+ if (text[:3] == '"""'):
+ toktype = TRIPLEDOUBLEQUOTE
+ elif (text[:4] == 'r"""'):
+ toktype = TRIPLEDOUBLEQUOTE_R
+ elif (text[:4] == 'u"""' or
+ text[:5] == 'ur"""'):
+ toktype = TRIPLEDOUBLEQUOTE_U
+ # DOUBLE QUOTE's
+ elif (text[:1] == '"'):
+ toktype = DOUBLEQUOTE
+ elif (text[:2] == 'r"'):
+ toktype = DOUBLEQUOTE_R
+ elif (text[:2] == 'u"' or
+ text[:3] == 'ur"'):
+ toktype = DOUBLEQUOTE_U
+ # TRIPLE SINGLE QUOTE's
+ elif (text[:3] == "'''"):
+ toktype = TRIPLESINGLEQUOTE
+ elif (text[:4] == "r'''"):
+ toktype = TRIPLESINGLEQUOTE_R
+ elif (text[:4] == "u'''" or
+ text[:5] == "ur'''"):
+ toktype = TRIPLESINGLEQUOTE_U
+ # SINGLE QUOTE's
+ elif (text[:1] == "'"):
+ toktype = SINGLEQUOTE
+ elif (text[:2] == "r'"):
+ toktype = SINGLEQUOTE_R
+ elif (text[:2] == "u'" or
+ text[:3] == "ur'"):
+ toktype = SINGLEQUOTE_U
+
+ # test for invalid string declaration
+ if self.lasttext.lower() == 'ru':
+ toktype = ERRORTOKEN
+
+ # Look for comments
+ elif toktype == COMMENT:
+ if toktext[:2] == "##":
+ toktype = DOUBLECOMMENT
+ elif toktext[:3] == '#$#':
+ toktype = TEXT
+ self.textFlag = 'SPAN'
+ toktext = toktext[3:]
+ elif toktext[:3] == '#%#':
+ toktype = TEXT
+ self.textFlag = 'DIV'
+ toktext = toktext[3:]
+ elif toktext[:3] == '#@#':
+ toktype = TEXT
+ self.textFlag = 'RAW'
+ toktext = toktext[3:]
+ if self.doURL:
+ # this is a 'fake helper function'
+ # url(URI,Alias_name) or url(URI)
+ url_pos = toktext.find('url(')
+ if url_pos != -1:
+ before = toktext[:url_pos]
+ url = toktext[url_pos+4:]
+ splitpoint = url.find(',')
+ endpoint = url.find(')')
+ after = url[endpoint+1:]
+ url = url[:endpoint]
+ if splitpoint != -1:
+ urlparts = url.split(',',1)
+ toktext = '%s<a href="%s">%s</a>%s'%(
+ before,urlparts[0],urlparts[1].lstrip(),after)
+ else:
+ toktext = '%s<a href="%s">%s</a>%s'%(before,url,url,after)
+
+ # Seperate errors from decorators
+ elif toktype == ERRORTOKEN:
+ # Bug fix for < py2.4
+ # space between decorators
+ if self.argFlag and toktext.isspace():
+ #toktype = NAME
+ self.out.write(toktext)
+ return
+ # Bug fix for py2.2 linenumbers with decorators
+ elif toktext.isspace():
+ # What if we have a decorator after a >>> or ...
+ #p = line.find('@')
+ #if p >= 0 and not line[:p].isspace():
+ #self.out.write(toktext)
+ #return
+ if self.skip:
+ self.skip=0
+ return
+ else:
+ self.out.write(toktext)
+ return
+ # trap decorators < py2.4
+ elif toktext == '@':
+ toktype = DECORATOR
+ # Set a flag if this was the decorator start so
+ # the decorator name and arguments can be identified
+ self.decoratorFlag = self.argFlag = 1
+
+ # Seperate args from names
+ elif (self.argFlag == 2 and
+ toktype == NAME and
+ toktext != 'None' and
+ self.doArgs):
+ toktype = ARGS
+
+ # Look for line numbers
+ # The conversion code for them is in the send_text functions.
+ if toktext in [self.LINENUMHOLDER,self.LINESTART]:
+ toktype = LINENUMBER
+ # if we don't have linenumbers set flag
+ # to skip the trailing space from linestart
+ if toktext == self.LINESTART and not self.dolinenums \
+ or toktext == self.LINENUMHOLDER:
+ self.skip=1
+
+
+ # Skip blank token that made it thru
+ ## bugfix for the last empty tag.
+ if toktext == '':
+ return
+
+ # Last token text history
+ self.lasttext = toktext
+
+ # escape all but the urls in the comments
+ if toktype in (DOUBLECOMMENT, COMMENT):
+ if toktext.find('<a href=') == -1:
+ toktext = escape(toktext)
+ else:
+ pass
+ elif toktype == TEXT:
+ pass
+ else:
+ toktext = escape(toktext)
+
+ # Send text for any markup
+ getattr(self, '_send%sText'%(self.markup))(toktype, toktext)
+ return
+
+ ################################################################# Helpers
+
+ def _doSnippetStart(self):
+ if self.markup == 'HTML':
+ # Start of html snippet
+ self.out.write('<pre>\n')
+ else:
+ # Start of css/xhtml snippet
+ self.out.write(self.colors.get(CODESTART,'<pre class="py">\n'))
+
+ def _doSnippetEnd(self):
+ # End of html snippet
+ self.out.write(self.colors.get(CODEEND,'</pre>\n'))
+
+ ######################################################## markup selectors
+
+ def _getFile(self, filepath):
+ try:
+ _file = open(filepath,'r')
+ content = _file.read()
+ _file.close()
+ except:
+ traceback.print_exc()
+ content = ''
+ return content
+
+ def _doPageStart(self):
+ getattr(self, '_do%sStart'%(self.markup))()
+
+ def _doPageHeader(self):
+ if self.header != None:
+ if self.header.find('#$#') != -1 or \
+ self.header.find('#$#') != -1 or \
+ self.header.find('#%#') != -1:
+ self.out.write(self.header[3:])
+ else:
+ if self.header != '':
+ self.header = self._getFile(self.header)
+ getattr(self, '_do%sHeader'%(self.markup))()
+
+ def _doPageFooter(self):
+ if self.footer != None:
+ if self.footer.find('#$#') != -1 or \
+ self.footer.find('#@#') != -1 or \
+ self.footer.find('#%#') != -1:
+ self.out.write(self.footer[3:])
+ else:
+ if self.footer != '':
+ self.footer = self._getFile(self.footer)
+ getattr(self, '_do%sFooter'%(self.markup))()
+
+ def _doPageEnd(self):
+ getattr(self, '_do%sEnd'%(self.markup))()
+
+ ################################################### color/style retrieval
+ ## Some of these are not used anymore but are kept for documentation
+
+ def _getLineNumber(self):
+ num = self.linenum
+ self.linenum+=1
+ return str(num).rjust(5)+" "
+
+ def _getTags(self, key):
+ # style tags
+ return self.colors.get(key, self.colors[NAME])[0]
+
+ def _getForeColor(self, key):
+ # get text foreground color, if not set to black
+ color = self.colors.get(key, self.colors[NAME])[1]
+ if color[:1] != '#':
+ color = '#000000'
+ return color
+
+ def _getBackColor(self, key):
+ # get text background color
+ return self.colors.get(key, self.colors[NAME])[2]
+
+ def _getPageColor(self):
+ # get page background color
+ return self.colors.get(PAGEBACKGROUND, '#FFFFFF')
+
+ def _getStyle(self, key):
+ # get the token style from the color dictionary
+ return self.colors.get(key, self.colors[NAME])
+
+ def _getMarkupClass(self, key):
+ # get the markup class name from the markup dictionary
+ return MARKUPDICT.get(key, MARKUPDICT[NAME])
+
+ def _getDocumentCreatedBy(self):
+ return '<!--This document created by %s ver.%s on: %s-->\n'%(
+ __title__,__version__,time.ctime())
+
+ ################################################### HTML markup functions
+
+ def _doHTMLStart(self):
+ # Start of html page
+ self.out.write('<!DOCTYPE html PUBLIC \
+"-//W3C//DTD HTML 4.01//EN">\n')
+ self.out.write('<html><head><title>%s</title>\n'%(self.title))
+ self.out.write(self._getDocumentCreatedBy())
+ self.out.write('<meta http-equiv="Content-Type" \
+content="text/html;charset=iso-8859-1">\n')
+ # Get background
+ self.out.write('</head><body bgcolor="%s">\n'%self._getPageColor())
+ self._doPageHeader()
+ self.out.write('<pre>')
+
+ def _getHTMLStyles(self, toktype, toktext):
+ # Get styles
+ tags, color = self.colors.get(toktype, self.colors[NAME])[:2]#
+ tagstart=[]
+ tagend=[]
+ # check for styles and set them if needed.
+ if 'b' in tags:#Bold
+ tagstart.append('<b>')
+ tagend.append('</b>')
+ if 'i' in tags:#Italics
+ tagstart.append('<i>')
+ tagend.append('</i>')
+ if 'u' in tags:#Underline
+ tagstart.append('<u>')
+ tagend.append('</u>')
+ # HTML tags should be paired like so : <b><i><u>Doh!</u></i></b>
+ tagend.reverse()
+ starttags="".join(tagstart)
+ endtags="".join(tagend)
+ return starttags,endtags,color
+
+ def _sendHTMLText(self, toktype, toktext):
+ numberlinks = self.numberlinks
+
+ # If it is an error, set a red box around the bad tokens
+ # older browsers should ignore it
+ if toktype == ERRORTOKEN:
+ style = ' style="border: solid 1.5pt #FF0000;"'
+ else:
+ style = ''
+ # Get styles
+ starttag, endtag, color = self._getHTMLStyles(toktype, toktext)
+ # This is a hack to 'fix' multi-line strings.
+ # Multi-line strings are treated as only one token
+ # even though they can be several physical lines.
+ # That makes it hard to spot the start of a line,
+ # because at this level all we know about are tokens.
+
+ if toktext.count(self.LINENUMHOLDER):
+ # rip apart the string and separate it by line.
+ # count lines and change all linenum token to line numbers.
+ # embedded all the new font tags inside the current one.
+ # Do this by ending the tag first then writing our new tags,
+ # then starting another font tag exactly like the first one.
+ if toktype == LINENUMBER:
+ splittext = toktext.split(self.LINENUMHOLDER)
+ else:
+ splittext = toktext.split(self.LINENUMHOLDER+' ')
+ store = []
+ store.append(splittext.pop(0))
+ lstarttag, lendtag, lcolor = self._getHTMLStyles(LINENUMBER, toktext)
+ count = len(splittext)
+ for item in splittext:
+ num = self._getLineNumber()
+ if numberlinks:
+ numstrip = num.strip()
+ content = '<a name="%s" href="#%s">%s</a>' \
+ %(numstrip,numstrip,num)
+ else:
+ content = num
+ if count <= 1:
+ endtag,starttag = '',''
+ linenumber = ''.join([endtag,'<font color=', lcolor, '>',
+ lstarttag, content, lendtag, '</font>' ,starttag])
+ store.append(linenumber+item)
+ toktext = ''.join(store)
+ # send text
+ ## Output optimization
+ # skip font tag if black text, but styles will still be sent. (b,u,i)
+ if color !='#000000':
+ startfont = '<font color="%s"%s>'%(color, style)
+ endfont = '</font>'
+ else:
+ startfont, endfont = ('','')
+ if toktype != LINENUMBER:
+ self.out.write(''.join([startfont,starttag,
+ toktext,endtag,endfont]))
+ else:
+ self.out.write(toktext)
+ return
+
+ def _doHTMLHeader(self):
+ # Optional
+ if self.header != '':
+ self.out.write('%s\n'%self.header)
+ else:
+ color = self._getForeColor(NAME)
+ self.out.write('<b><font color="%s"># %s \
+ <br># %s</font></b><hr>\n'%
+ (color, self.title, time.ctime()))
+
+ def _doHTMLFooter(self):
+ # Optional
+ if self.footer != '':
+ self.out.write('%s\n'%self.footer)
+ else:
+ color = self._getForeColor(NAME)
+ self.out.write('<b><font color="%s"> \
+ <hr># %s<br># %s</font></b>\n'%
+ (color, self.title, time.ctime()))
+
+ def _doHTMLEnd(self):
+ # End of html page
+ self.out.write('</pre>\n')
+ # Write a little info at the bottom
+ self._doPageFooter()
+ self.out.write('</body></html>\n')
+
+ #################################################### CSS markup functions
+
+ def _getCSSStyle(self, key):
+ # Get the tags and colors from the dictionary
+ tags, forecolor, backcolor = self._getStyle(key)
+ style=[]
+ border = None
+ bordercolor = None
+ tags = tags.lower()
+ if tags:
+ # get the border color if specified
+ # the border color will be appended to
+ # the list after we define a border
+ if '#' in tags:# border color
+ start = tags.find('#')
+ end = start + 7
+ bordercolor = tags[start:end]
+ tags.replace(bordercolor,'',1)
+ # text styles
+ if 'b' in tags:# Bold
+ style.append('font-weight:bold;')
+ else:
+ style.append('font-weight:normal;')
+ if 'i' in tags:# Italic
+ style.append('font-style:italic;')
+ if 'u' in tags:# Underline
+ style.append('text-decoration:underline;')
+ # border size
+ if 'l' in tags:# thick border
+ size='thick'
+ elif 'm' in tags:# medium border
+ size='medium'
+ elif 't' in tags:# thin border
+ size='thin'
+ else:# default
+ size='medium'
+ # border styles
+ if 'n' in tags:# inset border
+ border='inset'
+ elif 'o' in tags:# outset border
+ border='outset'
+ elif 'r' in tags:# ridge border
+ border='ridge'
+ elif 'g' in tags:# groove border
+ border='groove'
+ elif '=' in tags:# double border
+ border='double'
+ elif '.' in tags:# dotted border
+ border='dotted'
+ elif '-' in tags:# dashed border
+ border='dashed'
+ elif 's' in tags:# solid border
+ border='solid'
+ # border type check
+ seperate_sides=0
+ for side in ['<','>','^','v']:
+ if side in tags:
+ seperate_sides+=1
+ # border box or seperate sides
+ if seperate_sides==0 and border:
+ style.append('border: %s %s;'%(border,size))
+ else:
+ if border == None:
+ border = 'solid'
+ if 'v' in tags:# bottom border
+ style.append('border-bottom:%s %s;'%(border,size))
+ if '<' in tags:# left border
+ style.append('border-left:%s %s;'%(border,size))
+ if '>' in tags:# right border
+ style.append('border-right:%s %s;'%(border,size))
+ if '^' in tags:# top border
+ style.append('border-top:%s %s;'%(border,size))
+ else:
+ style.append('font-weight:normal;')# css inherited style fix
+ # we have to define our borders before we set colors
+ if bordercolor:
+ style.append('border-color:%s;'%bordercolor)
+ # text forecolor
+ style.append('color:%s;'% forecolor)
+ # text backcolor
+ if backcolor:
+ style.append('background-color:%s;'%backcolor)
+ return (self._getMarkupClass(key),' '.join(style))
+
+ def _sendCSSStyle(self, external=0):
+ """ create external and internal style sheets"""
+ styles = []
+ external += self.external
+ if not external:
+ styles.append('<style type="text/css">\n<!--\n')
+ # Get page background color and write styles ignore any we don't know
+ styles.append('body { background:%s; }\n'%self._getPageColor())
+ # write out the various css styles
+ for key in MARKUPDICT:
+ styles.append('.%s { %s }\n'%self._getCSSStyle(key))
+ # If you want to style the pre tag you must modify the color dict.
+ # Example:
+ # lite[PY] = .py {border: solid thin #000000;background:#555555}\n'''
+ styles.append(self.colors.get(PY, '.py { }\n'))
+ # Extra css can be added here
+ # add CSSHOOK to the color dict if you need it.
+ # Example:
+ #lite[CSSHOOK] = """.mytag { border: solid thin #000000; } \n
+ # .myothertag { font-weight:bold; )\n"""
+ styles.append(self.colors.get(CSSHOOK,''))
+ if not self.external:
+ styles.append('--></style>\n')
+ return ''.join(styles)
+
+ def _doCSSStart(self):
+ # Start of css/html 4.01 page
+ self.out.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN">\n')
+ self.out.write('<html><head><title>%s</title>\n'%(self.title))
+ self.out.write(self._getDocumentCreatedBy())
+ self.out.write('<meta http-equiv="Content-Type" \
+content="text/html;charset=iso-8859-1">\n')
+ self._doCSSStyleSheet()
+ self.out.write('</head>\n<body>\n')
+ # Write a little info at the top.
+ self._doPageHeader()
+ self.out.write(self.colors.get(CODESTART,'<pre class="py">\n'))
+ return
+
+ def _doCSSStyleSheet(self):
+ if not self.external:
+ # write an embedded style sheet
+ self.out.write(self._sendCSSStyle())
+ else:
+ # write a link to an external style sheet
+ self.out.write('<link rel="stylesheet" \
+href="pystyle.css" type="text/css">')
+ return
+
+ def _sendCSSText(self, toktype, toktext):
+ # This is a hack to 'fix' multi-line strings.
+ # Multi-line strings are treated as only one token
+ # even though they can be several physical lines.
+ # That makes it hard to spot the start of a line,
+ # because at this level all we know about are tokens.
+ markupclass = MARKUPDICT.get(toktype, MARKUPDICT[NAME])
+ # if it is a LINENUMBER type then we can skip the rest
+ if toktext == self.LINESTART and toktype == LINENUMBER:
+ self.out.write('<span class="py_line">')
+ return
+ if toktext.count(self.LINENUMHOLDER):
+ # rip apart the string and separate it by line
+ # count lines and change all linenum token to line numbers
+ # also convert linestart and lineend tokens
+ # <linestart> <lnumstart> lnum <lnumend> text <lineend>
+ #################################################
+ newmarkup = MARKUPDICT.get(LINENUMBER, MARKUPDICT[NAME])
+ lstartspan = '<span class="%s">'%(newmarkup)
+ if toktype == LINENUMBER:
+ splittext = toktext.split(self.LINENUMHOLDER)
+ else:
+ splittext = toktext.split(self.LINENUMHOLDER+' ')
+ store = []
+ # we have already seen the first linenumber token
+ # so we can skip the first one
+ store.append(splittext.pop(0))
+ for item in splittext:
+ num = self._getLineNumber()
+ if self.numberlinks:
+ numstrip = num.strip()
+ content= '<a name="%s" href="#%s">%s</a>' \
+ %(numstrip,numstrip,num)
+ else:
+ content = num
+ linenumber= ''.join([lstartspan,content,'</span>'])
+ store.append(linenumber+item)
+ toktext = ''.join(store)
+ if toktext.count(self.LINESTART):
+ # wraps the textline in a line span
+ # this adds a lot of kludges, is it really worth it?
+ store = []
+ parts = toktext.split(self.LINESTART+' ')
+ # handle the first part differently
+ # the whole token gets wraqpped in a span later on
+ first = parts.pop(0)
+ # place spans before the newline
+ pos = first.rfind('\n')
+ if pos != -1:
+ first=first[:pos]+'</span></span>'+first[pos:]
+ store.append(first)
+ #process the rest of the string
+ for item in parts:
+ #handle line numbers if present
+ if self.dolinenums:
+ item = item.replace('</span>',
+ '</span><span class="%s">'%(markupclass))
+ else:
+ item = '<span class="%s">%s'%(markupclass,item)
+ # add endings for line and string tokens
+ pos = item.rfind('\n')
+ if pos != -1:
+ item=item[:pos]+'</span></span>\n'
+ store.append(item)
+ # add start tags for lines
+ toktext = '<span class="py_line">'.join(store)
+ # Send text
+ if toktype != LINENUMBER:
+ if toktype == TEXT and self.textFlag == 'DIV':
+ startspan = '<div class="%s">'%(markupclass)
+ endspan = '</div>'
+ elif toktype == TEXT and self.textFlag == 'RAW':
+ startspan,endspan = ('','')
+ else:
+ startspan = '<span class="%s">'%(markupclass)
+ endspan = '</span>'
+ self.out.write(''.join([startspan, toktext, endspan]))
+ else:
+ self.out.write(toktext)
+ return
+
+ def _doCSSHeader(self):
+ if self.header != '':
+ self.out.write('%s\n'%self.header)
+ else:
+ name = MARKUPDICT.get(NAME)
+ self.out.write('<div class="%s"># %s <br> \
+# %s</div><hr>\n'%(name, self.title, time.ctime()))
+
+ def _doCSSFooter(self):
+ # Optional
+ if self.footer != '':
+ self.out.write('%s\n'%self.footer)
+ else:
+ self.out.write('<hr><div class="%s"># %s <br> \
+# %s</div>\n'%(MARKUPDICT.get(NAME),self.title, time.ctime()))
+
+ def _doCSSEnd(self):
+ # End of css/html page
+ self.out.write(self.colors.get(CODEEND,'</pre>\n'))
+ # Write a little info at the bottom
+ self._doPageFooter()
+ self.out.write('</body></html>\n')
+ return
+
+ ################################################## XHTML markup functions
+
+ def _doXHTMLStart(self):
+ # XHTML is really just XML + HTML 4.01.
+ # We only need to change the page headers,
+ # and a few tags to get valid XHTML.
+ # Start of xhtml page
+ self.out.write('<?xml version="1.0"?>\n \
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"\n \
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n \
+<html xmlns="http://www.w3.org/1999/xhtml">\n')
+ self.out.write('<head><title>%s</title>\n'%(self.title))
+ self.out.write(self._getDocumentCreatedBy())
+ self.out.write('<meta http-equiv="Content-Type" \
+content="text/html;charset=iso-8859-1"/>\n')
+ self._doXHTMLStyleSheet()
+ self.out.write('</head>\n<body>\n')
+ # Write a little info at the top.
+ self._doPageHeader()
+ self.out.write(self.colors.get(CODESTART,'<pre class="py">\n'))
+ return
+
+ def _doXHTMLStyleSheet(self):
+ if not self.external:
+ # write an embedded style sheet
+ self.out.write(self._sendCSSStyle())
+ else:
+ # write a link to an external style sheet
+ self.out.write('<link rel="stylesheet" \
+href="pystyle.css" type="text/css"/>\n')
+ return
+
+ def _sendXHTMLText(self, toktype, toktext):
+ self._sendCSSText(toktype, toktext)
+
+ def _doXHTMLHeader(self):
+ # Optional
+ if self.header:
+ self.out.write('%s\n'%self.header)
+ else:
+ name = MARKUPDICT.get(NAME)
+ self.out.write('<div class="%s"># %s <br/> \
+# %s</div><hr/>\n '%(
+ name, self.title, time.ctime()))
+
+ def _doXHTMLFooter(self):
+ # Optional
+ if self.footer:
+ self.out.write('%s\n'%self.footer)
+ else:
+ self.out.write('<hr/><div class="%s"># %s <br/> \
+# %s</div>\n'%(MARKUPDICT.get(NAME), self.title, time.ctime()))
+
+ def _doXHTMLEnd(self):
+ self._doCSSEnd()
+
+#############################################################################
+
+if __name__ == '__main__':
+ cli()
+
+#############################################################################
+# PySourceColor.py
+# 2004, 2005 M.E.Farmer Jr.
+# Python license
diff --git a/paste/util/__init__.py b/paste/util/__init__.py
new file mode 100644
index 0000000..ea4ff1e
--- /dev/null
+++ b/paste/util/__init__.py
@@ -0,0 +1,4 @@
+"""
+Package for miscellaneous routines that do not depend on other parts
+of Paste
+"""
diff --git a/paste/util/classinit.py b/paste/util/classinit.py
new file mode 100644
index 0000000..e4e6b28
--- /dev/null
+++ b/paste/util/classinit.py
@@ -0,0 +1,42 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+class ClassInitMeta(type):
+
+ def __new__(meta, class_name, bases, new_attrs):
+ cls = type.__new__(meta, class_name, bases, new_attrs)
+ if (new_attrs.has_key('__classinit__')
+ and not isinstance(cls.__classinit__, staticmethod)):
+ setattr(cls, '__classinit__',
+ staticmethod(cls.__classinit__.im_func))
+ if hasattr(cls, '__classinit__'):
+ cls.__classinit__(cls, new_attrs)
+ return cls
+
+def build_properties(cls, new_attrs):
+ """
+ Given a class and a new set of attributes (as passed in by
+ __classinit__), create or modify properties based on functions
+ with special names ending in __get, __set, and __del.
+ """
+ for name, value in new_attrs.items():
+ if (name.endswith('__get') or name.endswith('__set')
+ or name.endswith('__del')):
+ base = name[:-5]
+ if hasattr(cls, base):
+ old_prop = getattr(cls, base)
+ if not isinstance(old_prop, property):
+ raise ValueError(
+ "Attribute %s is a %s, not a property; function %s is named like a property"
+ % (base, type(old_prop), name))
+ attrs = {'fget': old_prop.fget,
+ 'fset': old_prop.fset,
+ 'fdel': old_prop.fdel,
+ 'doc': old_prop.__doc__}
+ else:
+ attrs = {}
+ attrs['f' + name[-3:]] = value
+ if name.endswith('__get') and value.__doc__:
+ attrs['doc'] = value.__doc__
+ new_prop = property(**attrs)
+ setattr(cls, base, new_prop)
diff --git a/paste/util/classinstance.py b/paste/util/classinstance.py
new file mode 100644
index 0000000..6436a44
--- /dev/null
+++ b/paste/util/classinstance.py
@@ -0,0 +1,38 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+class classinstancemethod(object):
+ """
+ Acts like a class method when called from a class, like an
+ instance method when called by an instance. The method should
+ take two arguments, 'self' and 'cls'; one of these will be None
+ depending on how the method was called.
+ """
+
+ def __init__(self, func):
+ self.func = func
+ self.__doc__ = func.__doc__
+
+ def __get__(self, obj, type=None):
+ return _methodwrapper(self.func, obj=obj, type=type)
+
+class _methodwrapper(object):
+
+ def __init__(self, func, obj, type):
+ self.func = func
+ self.obj = obj
+ self.type = type
+
+ def __call__(self, *args, **kw):
+ assert 'self' not in kw and 'cls' not in kw, (
+ "You cannot use 'self' or 'cls' arguments to a "
+ "classinstancemethod")
+ return self.func(*((self.obj, self.type) + args), **kw)
+
+ def __repr__(self):
+ if self.obj is None:
+ return ('<bound class method %s.%s>'
+ % (self.type.__name__, self.func.func_name))
+ else:
+ return ('<bound method %s.%s of %r>'
+ % (self.type.__name__, self.func.func_name, self.obj))
diff --git a/paste/util/converters.py b/paste/util/converters.py
new file mode 100644
index 0000000..11451bc
--- /dev/null
+++ b/paste/util/converters.py
@@ -0,0 +1,30 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+import six
+
+
+def asbool(obj):
+ if isinstance(obj, (six.binary_type, six.text_type)):
+ obj = obj.strip().lower()
+ if obj in ['true', 'yes', 'on', 'y', 't', '1']:
+ return True
+ elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
+ return False
+ else:
+ raise ValueError(
+ "String is not true/false: %r" % obj)
+ return bool(obj)
+
+def aslist(obj, sep=None, strip=True):
+ if isinstance(obj, (six.binary_type, six.text_type)):
+ lst = obj.split(sep)
+ if strip:
+ lst = [v.strip() for v in lst]
+ return lst
+ elif isinstance(obj, (list, tuple)):
+ return obj
+ elif obj is None:
+ return []
+ else:
+ return [obj]
diff --git a/paste/util/dateinterval.py b/paste/util/dateinterval.py
new file mode 100644
index 0000000..023bce4
--- /dev/null
+++ b/paste/util/dateinterval.py
@@ -0,0 +1,104 @@
+"""
+DateInterval.py
+
+Convert interval strings (in the form of 1w2d, etc) to
+seconds, and back again. Is not exactly about months or
+years (leap years in particular).
+
+Accepts (y)ear, (b)month, (w)eek, (d)ay, (h)our, (m)inute, (s)econd.
+
+Exports only timeEncode and timeDecode functions.
+"""
+
+import re
+
+__all__ = ['interval_decode', 'interval_encode']
+
+second = 1
+minute = second*60
+hour = minute*60
+day = hour*24
+week = day*7
+month = day*30
+year = day*365
+timeValues = {
+ 'y': year,
+ 'b': month,
+ 'w': week,
+ 'd': day,
+ 'h': hour,
+ 'm': minute,
+ 's': second,
+ }
+timeOrdered = list(timeValues.items())
+timeOrdered.sort(key=lambda x: x[1], reverse=True)
+
+
+def interval_encode(seconds, include_sign=False):
+ """Encodes a number of seconds (representing a time interval)
+ into a form like 1h2d3s.
+
+ >>> interval_encode(10)
+ '10s'
+ >>> interval_encode(493939)
+ '5d17h12m19s'
+ """
+ s = ''
+ orig = seconds
+ seconds = abs(seconds)
+ for char, amount in timeOrdered:
+ if seconds >= amount:
+ i, seconds = divmod(seconds, amount)
+ s += '%i%s' % (i, char)
+ if orig < 0:
+ s = '-' + s
+ elif not orig:
+ return '0'
+ elif include_sign:
+ s = '+' + s
+ return s
+
+_timeRE = re.compile(r'[0-9]+[a-zA-Z]')
+def interval_decode(s):
+ """Decodes a number in the format 1h4d3m (1 hour, 3 days, 3 minutes)
+ into a number of seconds
+
+ >>> interval_decode('40s')
+ 40
+ >>> interval_decode('10000s')
+ 10000
+ >>> interval_decode('3d1w45s')
+ 864045
+ """
+ time = 0
+ sign = 1
+ s = s.strip()
+ if s.startswith('-'):
+ s = s[1:]
+ sign = -1
+ elif s.startswith('+'):
+ s = s[1:]
+ for match in allMatches(s, _timeRE):
+ char = match.group(0)[-1].lower()
+ if char not in timeValues:
+ # @@: should signal error
+ continue
+ time += int(match.group(0)[:-1]) * timeValues[char]
+ return time
+
+# @@-sgd 2002-12-23 - this function does not belong in this module, find a better place.
+def allMatches(source, regex):
+ """Return a list of matches for regex in source
+ """
+ pos = 0
+ end = len(source)
+ rv = []
+ match = regex.search(source, pos)
+ while match:
+ rv.append(match)
+ match = regex.search(source, match.end() )
+ return rv
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/paste/util/datetimeutil.py b/paste/util/datetimeutil.py
new file mode 100644
index 0000000..3c6d7d9
--- /dev/null
+++ b/paste/util/datetimeutil.py
@@ -0,0 +1,359 @@
+# (c) 2005 Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# Some of this code was funded by: http://prometheusresearch.com
+"""
+Date, Time, and Timespan Parsing Utilities
+
+This module contains parsing support to create "human friendly"
+``datetime`` object parsing. The explicit goal of these routines is
+to provide a multi-format date/time support not unlike that found in
+Microsoft Excel. In most approaches, the input is very "strict" to
+prevent errors -- however, this approach is much more liberal since we
+are assuming the user-interface is parroting back the normalized value
+and thus the user has immediate feedback if the data is not typed in
+correctly.
+
+ ``parse_date`` and ``normalize_date``
+
+ These functions take a value like '9 jan 2007' and returns either an
+ ``date`` object, or an ISO 8601 formatted date value such
+ as '2007-01-09'. There is an option to provide an Oracle database
+ style output as well, ``09 JAN 2007``, but this is not the default.
+
+ This module always treats '/' delimiters as using US date order
+ (since the author's clients are US based), hence '1/9/2007' is
+ January 9th. Since this module treats the '-' as following
+ European order this supports both modes of data-entry; together
+ with immediate parroting back the result to the screen, the author
+ has found this approach to work well in pratice.
+
+ ``parse_time`` and ``normalize_time``
+
+ These functions take a value like '1 pm' and returns either an
+ ``time`` object, or an ISO 8601 formatted 24h clock time
+ such as '13:00'. There is an option to provide for US style time
+ values, '1:00 PM', however this is not the default.
+
+ ``parse_datetime`` and ``normalize_datetime``
+
+ These functions take a value like '9 jan 2007 at 1 pm' and returns
+ either an ``datetime`` object, or an ISO 8601 formatted
+ return (without the T) such as '2007-01-09 13:00'. There is an
+ option to provide for Oracle / US style, '09 JAN 2007 @ 1:00 PM',
+ however this is not the default.
+
+ ``parse_delta`` and ``normalize_delta``
+
+ These functions take a value like '1h 15m' and returns either an
+ ``timedelta`` object, or an 2-decimal fixed-point
+ numerical value in hours, such as '1.25'. The rationale is to
+ support meeting or time-billing lengths, not to be an accurate
+ representation in mili-seconds. As such not all valid
+ ``timedelta`` values will have a normalized representation.
+
+"""
+from datetime import timedelta, time, date
+from time import localtime
+
+__all__ = ['parse_timedelta', 'normalize_timedelta',
+ 'parse_time', 'normalize_time',
+ 'parse_date', 'normalize_date']
+
+def _number(val):
+ try:
+ return int(val)
+ except:
+ return None
+
+#
+# timedelta
+#
+def parse_timedelta(val):
+ """
+ returns a ``timedelta`` object, or None
+ """
+ if not val:
+ return None
+ val = val.lower()
+ if "." in val:
+ val = float(val)
+ return timedelta(hours=int(val), minutes=60*(val % 1.0))
+ fHour = ("h" in val or ":" in val)
+ fMin = ("m" in val or ":" in val)
+ for noise in "minu:teshour()":
+ val = val.replace(noise, ' ')
+ val = val.strip()
+ val = val.split()
+ hr = 0.0
+ mi = 0
+ val.reverse()
+ if fHour:
+ hr = int(val.pop())
+ if fMin:
+ mi = int(val.pop())
+ if len(val) > 0 and not hr:
+ hr = int(val.pop())
+ return timedelta(hours=hr, minutes=mi)
+
+def normalize_timedelta(val):
+ """
+ produces a normalized string value of the timedelta
+
+ This module returns a normalized time span value consisting of the
+ number of hours in fractional form. For example '1h 15min' is
+ formatted as 01.25.
+ """
+ if type(val) == str:
+ val = parse_timedelta(val)
+ if not val:
+ return ''
+ hr = val.seconds/3600
+ mn = (val.seconds % 3600)/60
+ return "%d.%02d" % (hr, mn * 100/60)
+
+#
+# time
+#
+def parse_time(val):
+ if not val:
+ return None
+ hr = mi = 0
+ val = val.lower()
+ amflag = (-1 != val.find('a')) # set if AM is found
+ pmflag = (-1 != val.find('p')) # set if PM is found
+ for noise in ":amp.":
+ val = val.replace(noise, ' ')
+ val = val.split()
+ if len(val) > 1:
+ hr = int(val[0])
+ mi = int(val[1])
+ else:
+ val = val[0]
+ if len(val) < 1:
+ pass
+ elif 'now' == val:
+ tm = localtime()
+ hr = tm[3]
+ mi = tm[4]
+ elif 'noon' == val:
+ hr = 12
+ elif len(val) < 3:
+ hr = int(val)
+ if not amflag and not pmflag and hr < 7:
+ hr += 12
+ elif len(val) < 5:
+ hr = int(val[:-2])
+ mi = int(val[-2:])
+ else:
+ hr = int(val[:1])
+ if amflag and hr >= 12:
+ hr = hr - 12
+ if pmflag and hr < 12:
+ hr = hr + 12
+ return time(hr, mi)
+
+def normalize_time(value, ampm):
+ if not value:
+ return ''
+ if type(value) == str:
+ value = parse_time(value)
+ if not ampm:
+ return "%02d:%02d" % (value.hour, value.minute)
+ hr = value.hour
+ am = "AM"
+ if hr < 1 or hr > 23:
+ hr = 12
+ elif hr >= 12:
+ am = "PM"
+ if hr > 12:
+ hr = hr - 12
+ return "%02d:%02d %s" % (hr, value.minute, am)
+
+#
+# Date Processing
+#
+
+_one_day = timedelta(days=1)
+
+_str2num = {'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6,
+ 'jul':7, 'aug':8, 'sep':9, 'oct':10, 'nov':11, 'dec':12 }
+
+def _month(val):
+ for (key, mon) in _str2num.items():
+ if key in val:
+ return mon
+ raise TypeError("unknown month '%s'" % val)
+
+_days_in_month = {1: 31, 2: 28, 3: 31, 4: 30, 5: 31, 6: 30,
+ 7: 31, 8: 31, 9: 30, 10: 31, 11: 30, 12: 31,
+ }
+_num2str = {1: 'Jan', 2: 'Feb', 3: 'Mar', 4: 'Apr', 5: 'May', 6: 'Jun',
+ 7: 'Jul', 8: 'Aug', 9: 'Sep', 10: 'Oct', 11: 'Nov', 12: 'Dec',
+ }
+_wkdy = ("mon", "tue", "wed", "thu", "fri", "sat", "sun")
+
+def parse_date(val):
+ if not(val):
+ return None
+ val = val.lower()
+ now = None
+
+ # optimized check for YYYY-MM-DD
+ strict = val.split("-")
+ if len(strict) == 3:
+ (y, m, d) = strict
+ if "+" in d:
+ d = d.split("+")[0]
+ if " " in d:
+ d = d.split(" ")[0]
+ try:
+ now = date(int(y), int(m), int(d))
+ val = "xxx" + val[10:]
+ except ValueError:
+ pass
+
+ # allow for 'now', 'mon', 'tue', etc.
+ if not now:
+ chk = val[:3]
+ if chk in ('now','tod'):
+ now = date.today()
+ elif chk in _wkdy:
+ now = date.today()
+ idx = list(_wkdy).index(chk) + 1
+ while now.isoweekday() != idx:
+ now += _one_day
+
+ # allow dates to be modified via + or - /w number of days, so
+ # that now+3 is three days from now
+ if now:
+ tail = val[3:].strip()
+ tail = tail.replace("+"," +").replace("-"," -")
+ for item in tail.split():
+ try:
+ days = int(item)
+ except ValueError:
+ pass
+ else:
+ now += timedelta(days=days)
+ return now
+
+ # ok, standard parsing
+ yr = mo = dy = None
+ for noise in ('/', '-', ',', '*'):
+ val = val.replace(noise, ' ')
+ for noise in _wkdy:
+ val = val.replace(noise, ' ')
+ out = []
+ last = False
+ ldig = False
+ for ch in val:
+ if ch.isdigit():
+ if last and not ldig:
+ out.append(' ')
+ last = ldig = True
+ else:
+ if ldig:
+ out.append(' ')
+ ldig = False
+ last = True
+ out.append(ch)
+ val = "".join(out).split()
+ if 3 == len(val):
+ a = _number(val[0])
+ b = _number(val[1])
+ c = _number(val[2])
+ if len(val[0]) == 4:
+ yr = a
+ if b: # 1999 6 23
+ mo = b
+ dy = c
+ else: # 1999 Jun 23
+ mo = _month(val[1])
+ dy = c
+ elif a is not None and a > 0:
+ yr = c
+ if len(val[2]) < 4:
+ raise TypeError("four digit year required")
+ if b: # 6 23 1999
+ dy = b
+ mo = a
+ else: # 23 Jun 1999
+ dy = a
+ mo = _month(val[1])
+ else: # Jun 23, 2000
+ dy = b
+ yr = c
+ if len(val[2]) < 4:
+ raise TypeError("four digit year required")
+ mo = _month(val[0])
+ elif 2 == len(val):
+ a = _number(val[0])
+ b = _number(val[1])
+ if a is not None and a > 999:
+ yr = a
+ dy = 1
+ if b is not None and b > 0: # 1999 6
+ mo = b
+ else: # 1999 Jun
+ mo = _month(val[1])
+ elif a is not None and a > 0:
+ if b is not None and b > 999: # 6 1999
+ mo = a
+ yr = b
+ dy = 1
+ elif b is not None and b > 0: # 6 23
+ mo = a
+ dy = b
+ else: # 23 Jun
+ dy = a
+ mo = _month(val[1])
+ else:
+ if b > 999: # Jun 2001
+ yr = b
+ dy = 1
+ else: # Jun 23
+ dy = b
+ mo = _month(val[0])
+ elif 1 == len(val):
+ val = val[0]
+ if not val.isdigit():
+ mo = _month(val)
+ if mo is not None:
+ dy = 1
+ else:
+ v = _number(val)
+ val = str(v)
+ if 8 == len(val): # 20010623
+ yr = _number(val[:4])
+ mo = _number(val[4:6])
+ dy = _number(val[6:])
+ elif len(val) in (3,4):
+ if v is not None and v > 1300: # 2004
+ yr = v
+ mo = 1
+ dy = 1
+ else: # 1202
+ mo = _number(val[:-2])
+ dy = _number(val[-2:])
+ elif v < 32:
+ dy = v
+ else:
+ raise TypeError("four digit year required")
+ tm = localtime()
+ if mo is None:
+ mo = tm[1]
+ if dy is None:
+ dy = tm[2]
+ if yr is None:
+ yr = tm[0]
+ return date(yr, mo, dy)
+
+def normalize_date(val, iso8601=True):
+ if not val:
+ return ''
+ if type(val) == str:
+ val = parse_date(val)
+ if iso8601:
+ return "%4d-%02d-%02d" % (val.year, val.month, val.day)
+ return "%02d %s %4d" % (val.day, _num2str[val.month], val.year)
diff --git a/paste/util/filemixin.py b/paste/util/filemixin.py
new file mode 100644
index 0000000..b06b039
--- /dev/null
+++ b/paste/util/filemixin.py
@@ -0,0 +1,53 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+class FileMixin(object):
+
+ """
+ Used to provide auxiliary methods to objects simulating files.
+ Objects must implement write, and read if they are input files.
+ Also they should implement close.
+
+ Other methods you may wish to override:
+ * flush()
+ * seek(offset[, whence])
+ * tell()
+ * truncate([size])
+
+ Attributes you may wish to provide:
+ * closed
+ * encoding (you should also respect that in write())
+ * mode
+ * newlines (hard to support)
+ * softspace
+ """
+
+ def flush(self):
+ pass
+
+ def next(self):
+ return self.readline()
+
+ def readline(self, size=None):
+ # @@: This is a lame implementation; but a buffer would probably
+ # be necessary for a better implementation
+ output = []
+ while 1:
+ next = self.read(1)
+ if not next:
+ return ''.join(output)
+ output.append(next)
+ if size and size > 0 and len(output) >= size:
+ return ''.join(output)
+ if next == '\n':
+ # @@: also \r?
+ return ''.join(output)
+
+ def xreadlines(self):
+ return self
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+
diff --git a/paste/util/finddata.py b/paste/util/finddata.py
new file mode 100644
index 0000000..bb7c760
--- /dev/null
+++ b/paste/util/finddata.py
@@ -0,0 +1,98 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+# Note: you may want to copy this into your setup.py file verbatim, as
+# you can't import this from another package, when you don't know if
+# that package is installed yet.
+
+from __future__ import print_function
+import os
+import sys
+from fnmatch import fnmatchcase
+from distutils.util import convert_path
+
+# Provided as an attribute, so you can append to these instead
+# of replicating them:
+standard_exclude = ('*.py', '*.pyc', '*$py.class', '*~', '.*', '*.bak')
+standard_exclude_directories = ('.*', 'CVS', '_darcs', './build',
+ './dist', 'EGG-INFO', '*.egg-info')
+
+def find_package_data(
+ where='.', package='',
+ exclude=standard_exclude,
+ exclude_directories=standard_exclude_directories,
+ only_in_packages=True,
+ show_ignored=False):
+ """
+ Return a dictionary suitable for use in ``package_data``
+ in a distutils ``setup.py`` file.
+
+ The dictionary looks like::
+
+ {'package': [files]}
+
+ Where ``files`` is a list of all the files in that package that
+ don't match anything in ``exclude``.
+
+ If ``only_in_packages`` is true, then top-level directories that
+ are not packages won't be included (but directories under packages
+ will).
+
+ Directories matching any pattern in ``exclude_directories`` will
+ be ignored; by default directories with leading ``.``, ``CVS``,
+ and ``_darcs`` will be ignored.
+
+ If ``show_ignored`` is true, then all the files that aren't
+ included in package data are shown on stderr (for debugging
+ purposes).
+
+ Note patterns use wildcards, or can be exact paths (including
+ leading ``./``), and all searching is case-insensitive.
+ """
+
+ out = {}
+ stack = [(convert_path(where), '', package, only_in_packages)]
+ while stack:
+ where, prefix, package, only_in_packages = stack.pop(0)
+ for name in os.listdir(where):
+ fn = os.path.join(where, name)
+ if os.path.isdir(fn):
+ bad_name = False
+ for pattern in exclude_directories:
+ if (fnmatchcase(name, pattern)
+ or fn.lower() == pattern.lower()):
+ bad_name = True
+ if show_ignored:
+ print("Directory %s ignored by pattern %s"
+ % (fn, pattern), file=sys.stderr)
+ break
+ if bad_name:
+ continue
+ if (os.path.isfile(os.path.join(fn, '__init__.py'))
+ and not prefix):
+ if not package:
+ new_package = name
+ else:
+ new_package = package + '.' + name
+ stack.append((fn, '', new_package, False))
+ else:
+ stack.append((fn, prefix + name + '/', package, only_in_packages))
+ elif package or not only_in_packages:
+ # is a file
+ bad_name = False
+ for pattern in exclude:
+ if (fnmatchcase(name, pattern)
+ or fn.lower() == pattern.lower()):
+ bad_name = True
+ if show_ignored:
+ print("File %s ignored by pattern %s"
+ % (fn, pattern), file=sys.stderr)
+ break
+ if bad_name:
+ continue
+ out.setdefault(package, []).append(prefix+name)
+ return out
+
+if __name__ == '__main__':
+ import pprint
+ pprint.pprint(
+ find_package_data(show_ignored=True))
diff --git a/paste/util/findpackage.py b/paste/util/findpackage.py
new file mode 100644
index 0000000..9d653e5
--- /dev/null
+++ b/paste/util/findpackage.py
@@ -0,0 +1,26 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+import sys
+import os
+
+def find_package(dir):
+ """
+ Given a directory, finds the equivalent package name. If it
+ is directly in sys.path, returns ''.
+ """
+ dir = os.path.abspath(dir)
+ orig_dir = dir
+ path = map(os.path.abspath, sys.path)
+ packages = []
+ last_dir = None
+ while 1:
+ if dir in path:
+ return '.'.join(packages)
+ packages.insert(0, os.path.basename(dir))
+ dir = os.path.dirname(dir)
+ if last_dir == dir:
+ raise ValueError(
+ "%s is not under any path found in sys.path" % orig_dir)
+ last_dir = dir
+
diff --git a/paste/util/import_string.py b/paste/util/import_string.py
new file mode 100644
index 0000000..a10db18
--- /dev/null
+++ b/paste/util/import_string.py
@@ -0,0 +1,95 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+'imports' a string -- converts a string to a Python object, importing
+any necessary modules and evaluating the expression. Everything
+before the : in an import expression is the module path; everything
+after is an expression to be evaluated in the namespace of that
+module.
+
+Alternately, if no : is present, then import the modules and get the
+attributes as necessary. Arbitrary expressions are not allowed in
+that case.
+"""
+
+def eval_import(s):
+ """
+ Import a module, or import an object from a module.
+
+ A module name like ``foo.bar:baz()`` can be used, where
+ ``foo.bar`` is the module, and ``baz()`` is an expression
+ evaluated in the context of that module. Note this is not safe on
+ arbitrary strings because of the eval.
+ """
+ if ':' not in s:
+ return simple_import(s)
+ module_name, expr = s.split(':', 1)
+ module = import_module(module_name)
+ obj = eval(expr, module.__dict__)
+ return obj
+
+def simple_import(s):
+ """
+ Import a module, or import an object from a module.
+
+ A name like ``foo.bar.baz`` can be a module ``foo.bar.baz`` or a
+ module ``foo.bar`` with an object ``baz`` in it, or a module
+ ``foo`` with an object ``bar`` with an attribute ``baz``.
+ """
+ parts = s.split('.')
+ module = import_module(parts[0])
+ name = parts[0]
+ parts = parts[1:]
+ last_import_error = None
+ while parts:
+ name += '.' + parts[0]
+ try:
+ module = import_module(name)
+ parts = parts[1:]
+ except ImportError as e:
+ last_import_error = e
+ break
+ obj = module
+ while parts:
+ try:
+ obj = getattr(module, parts[0])
+ except AttributeError:
+ raise ImportError(
+ "Cannot find %s in module %r (stopped importing modules with error %s)" % (parts[0], module, last_import_error))
+ parts = parts[1:]
+ return obj
+
+def import_module(s):
+ """
+ Import a module.
+ """
+ mod = __import__(s)
+ parts = s.split('.')
+ for part in parts[1:]:
+ mod = getattr(mod, part)
+ return mod
+
+def try_import_module(module_name):
+ """
+ Imports a module, but catches import errors. Only catches errors
+ when that module doesn't exist; if that module itself has an
+ import error it will still get raised. Returns None if the module
+ doesn't exist.
+ """
+ try:
+ return import_module(module_name)
+ except ImportError as e:
+ if not getattr(e, 'args', None):
+ raise
+ desc = e.args[0]
+ if not desc.startswith('No module named '):
+ raise
+ desc = desc[len('No module named '):]
+ # If you import foo.bar.baz, the bad import could be any
+ # of foo.bar.baz, bar.baz, or baz; we'll test them all:
+ parts = module_name.split('.')
+ for i in range(len(parts)):
+ if desc == '.'.join(parts[i:]):
+ return None
+ raise
diff --git a/paste/util/intset.py b/paste/util/intset.py
new file mode 100644
index 0000000..3e026e2
--- /dev/null
+++ b/paste/util/intset.py
@@ -0,0 +1,515 @@
+# -*- coding: iso-8859-15 -*-
+"""Immutable integer set type.
+
+Integer set class.
+
+Copyright (C) 2006, Heiko Wundram.
+Released under the MIT license.
+"""
+import six
+
+# Version information
+# -------------------
+
+__author__ = "Heiko Wundram <me@modelnine.org>"
+__version__ = "0.2"
+__revision__ = "6"
+__date__ = "2006-01-20"
+
+
+# Utility classes
+# ---------------
+
+class _Infinity(object):
+ """Internal type used to represent infinity values."""
+
+ __slots__ = ["_neg"]
+
+ def __init__(self,neg):
+ self._neg = neg
+
+ def __lt__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return ( self._neg and
+ not ( isinstance(value,_Infinity) and value._neg ) )
+
+ def __le__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return self._neg
+
+ def __gt__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return not ( self._neg or
+ ( isinstance(value,_Infinity) and not value._neg ) )
+
+ def __ge__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return not self._neg
+
+ def __eq__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return isinstance(value,_Infinity) and self._neg == value._neg
+
+ def __ne__(self,value):
+ if not isinstance(value, _VALID_TYPES):
+ return NotImplemented
+ return not isinstance(value,_Infinity) or self._neg != value._neg
+
+ def __repr__(self):
+ return "None"
+
+_VALID_TYPES = six.integer_types + (_Infinity,)
+
+
+
+# Constants
+# ---------
+
+_MININF = _Infinity(True)
+_MAXINF = _Infinity(False)
+
+
+# Integer set class
+# -----------------
+
+class IntSet(object):
+ """Integer set class with efficient storage in a RLE format of ranges.
+ Supports minus and plus infinity in the range."""
+
+ __slots__ = ["_ranges","_min","_max","_hash"]
+
+ def __init__(self,*args,**kwargs):
+ """Initialize an integer set. The constructor accepts an unlimited
+ number of arguments that may either be tuples in the form of
+ (start,stop) where either start or stop may be a number or None to
+ represent maximum/minimum in that direction. The range specified by
+ (start,stop) is always inclusive (differing from the builtin range
+ operator).
+
+ Keyword arguments that can be passed to an integer set are min and
+ max, which specify the minimum and maximum number in the set,
+ respectively. You can also pass None here to represent minus or plus
+ infinity, which is also the default.
+ """
+
+ # Special case copy constructor.
+ if len(args) == 1 and isinstance(args[0],IntSet):
+ if kwargs:
+ raise ValueError("No keyword arguments for copy constructor.")
+ self._min = args[0]._min
+ self._max = args[0]._max
+ self._ranges = args[0]._ranges
+ self._hash = args[0]._hash
+ return
+
+ # Initialize set.
+ self._ranges = []
+
+ # Process keyword arguments.
+ self._min = kwargs.pop("min",_MININF)
+ self._max = kwargs.pop("max",_MAXINF)
+ if self._min is None:
+ self._min = _MININF
+ if self._max is None:
+ self._max = _MAXINF
+
+ # Check keyword arguments.
+ if kwargs:
+ raise ValueError("Invalid keyword argument.")
+ if not ( isinstance(self._min, six.integer_types) or self._min is _MININF ):
+ raise TypeError("Invalid type of min argument.")
+ if not ( isinstance(self._max, six.integer_types) or self._max is _MAXINF ):
+ raise TypeError("Invalid type of max argument.")
+ if ( self._min is not _MININF and self._max is not _MAXINF and
+ self._min > self._max ):
+ raise ValueError("Minimum is not smaller than maximum.")
+ if isinstance(self._max, six.integer_types):
+ self._max += 1
+
+ # Process arguments.
+ for arg in args:
+ if isinstance(arg, six.integer_types):
+ start, stop = arg, arg+1
+ elif isinstance(arg,tuple):
+ if len(arg) != 2:
+ raise ValueError("Invalid tuple, must be (start,stop).")
+
+ # Process argument.
+ start, stop = arg
+ if start is None:
+ start = self._min
+ if stop is None:
+ stop = self._max
+
+ # Check arguments.
+ if not ( isinstance(start, six.integer_types) or start is _MININF ):
+ raise TypeError("Invalid type of tuple start.")
+ if not ( isinstance(stop, six.integer_types) or stop is _MAXINF ):
+ raise TypeError("Invalid type of tuple stop.")
+ if ( start is not _MININF and stop is not _MAXINF and
+ start > stop ):
+ continue
+ if isinstance(stop, six.integer_types):
+ stop += 1
+ else:
+ raise TypeError("Invalid argument.")
+
+ if start > self._max:
+ continue
+ elif start < self._min:
+ start = self._min
+ if stop < self._min:
+ continue
+ elif stop > self._max:
+ stop = self._max
+ self._ranges.append((start,stop))
+
+ # Normalize set.
+ self._normalize()
+
+ # Utility functions for set operations
+ # ------------------------------------
+
+ def _iterranges(self,r1,r2,minval=_MININF,maxval=_MAXINF):
+ curval = minval
+ curstates = {"r1":False,"r2":False}
+ imax, jmax = 2*len(r1), 2*len(r2)
+ i, j = 0, 0
+ while i < imax or j < jmax:
+ if i < imax and ( ( j < jmax and
+ r1[i>>1][i&1] < r2[j>>1][j&1] ) or
+ j == jmax ):
+ cur_r, newname, newstate = r1[i>>1][i&1], "r1", not (i&1)
+ i += 1
+ else:
+ cur_r, newname, newstate = r2[j>>1][j&1], "r2", not (j&1)
+ j += 1
+ if curval < cur_r:
+ if cur_r > maxval:
+ break
+ yield curstates, (curval,cur_r)
+ curval = cur_r
+ curstates[newname] = newstate
+ if curval < maxval:
+ yield curstates, (curval,maxval)
+
+ def _normalize(self):
+ self._ranges.sort()
+ i = 1
+ while i < len(self._ranges):
+ if self._ranges[i][0] < self._ranges[i-1][1]:
+ self._ranges[i-1] = (self._ranges[i-1][0],
+ max(self._ranges[i-1][1],
+ self._ranges[i][1]))
+ del self._ranges[i]
+ else:
+ i += 1
+ self._ranges = tuple(self._ranges)
+ self._hash = hash(self._ranges)
+
+ def __coerce__(self,other):
+ if isinstance(other,IntSet):
+ return self, other
+ elif isinstance(other, six.integer_types + (tuple,)):
+ try:
+ return self, self.__class__(other)
+ except TypeError:
+ # Catch a type error, in that case the structure specified by
+ # other is something we can't coerce, return NotImplemented.
+ # ValueErrors are not caught, they signal that the data was
+ # invalid for the constructor. This is appropriate to signal
+ # as a ValueError to the caller.
+ return NotImplemented
+ elif isinstance(other,list):
+ try:
+ return self, self.__class__(*other)
+ except TypeError:
+ # See above.
+ return NotImplemented
+ return NotImplemented
+
+ # Set function definitions
+ # ------------------------
+
+ def _make_function(name,type,doc,pall,pany=None):
+ """Makes a function to match two ranges. Accepts two types: either
+ 'set', which defines a function which returns a set with all ranges
+ matching pall (pany is ignored), or 'bool', which returns True if pall
+ matches for all ranges and pany matches for any one range. doc is the
+ dostring to give this function. pany may be none to ignore the any
+ match.
+
+ The predicates get a dict with two keys, 'r1', 'r2', which denote
+ whether the current range is present in range1 (self) and/or range2
+ (other) or none of the two, respectively."""
+
+ if type == "set":
+ def f(self,other):
+ coerced = self.__coerce__(other)
+ if coerced is NotImplemented:
+ return NotImplemented
+ other = coerced[1]
+ newset = self.__class__.__new__(self.__class__)
+ newset._min = min(self._min,other._min)
+ newset._max = max(self._max,other._max)
+ newset._ranges = []
+ for states, (start,stop) in \
+ self._iterranges(self._ranges,other._ranges,
+ newset._min,newset._max):
+ if pall(states):
+ if newset._ranges and newset._ranges[-1][1] == start:
+ newset._ranges[-1] = (newset._ranges[-1][0],stop)
+ else:
+ newset._ranges.append((start,stop))
+ newset._ranges = tuple(newset._ranges)
+ newset._hash = hash(self._ranges)
+ return newset
+ elif type == "bool":
+ def f(self,other):
+ coerced = self.__coerce__(other)
+ if coerced is NotImplemented:
+ return NotImplemented
+ other = coerced[1]
+ _min = min(self._min,other._min)
+ _max = max(self._max,other._max)
+ found = not pany
+ for states, (start,stop) in \
+ self._iterranges(self._ranges,other._ranges,_min,_max):
+ if not pall(states):
+ return False
+ found = found or pany(states)
+ return found
+ else:
+ raise ValueError("Invalid type of function to create.")
+ try:
+ f.func_name = name
+ except TypeError:
+ pass
+ f.func_doc = doc
+ return f
+
+ # Intersection.
+ __and__ = _make_function("__and__","set",
+ "Intersection of two sets as a new set.",
+ lambda s: s["r1"] and s["r2"])
+ __rand__ = _make_function("__rand__","set",
+ "Intersection of two sets as a new set.",
+ lambda s: s["r1"] and s["r2"])
+ intersection = _make_function("intersection","set",
+ "Intersection of two sets as a new set.",
+ lambda s: s["r1"] and s["r2"])
+
+ # Union.
+ __or__ = _make_function("__or__","set",
+ "Union of two sets as a new set.",
+ lambda s: s["r1"] or s["r2"])
+ __ror__ = _make_function("__ror__","set",
+ "Union of two sets as a new set.",
+ lambda s: s["r1"] or s["r2"])
+ union = _make_function("union","set",
+ "Union of two sets as a new set.",
+ lambda s: s["r1"] or s["r2"])
+
+ # Difference.
+ __sub__ = _make_function("__sub__","set",
+ "Difference of two sets as a new set.",
+ lambda s: s["r1"] and not s["r2"])
+ __rsub__ = _make_function("__rsub__","set",
+ "Difference of two sets as a new set.",
+ lambda s: s["r2"] and not s["r1"])
+ difference = _make_function("difference","set",
+ "Difference of two sets as a new set.",
+ lambda s: s["r1"] and not s["r2"])
+
+ # Symmetric difference.
+ __xor__ = _make_function("__xor__","set",
+ "Symmetric difference of two sets as a new set.",
+ lambda s: s["r1"] ^ s["r2"])
+ __rxor__ = _make_function("__rxor__","set",
+ "Symmetric difference of two sets as a new set.",
+ lambda s: s["r1"] ^ s["r2"])
+ symmetric_difference = _make_function("symmetric_difference","set",
+ "Symmetric difference of two sets as a new set.",
+ lambda s: s["r1"] ^ s["r2"])
+
+ # Containership testing.
+ __contains__ = _make_function("__contains__","bool",
+ "Returns true if self is superset of other.",
+ lambda s: s["r1"] or not s["r2"])
+ issubset = _make_function("issubset","bool",
+ "Returns true if self is subset of other.",
+ lambda s: s["r2"] or not s["r1"])
+ istruesubset = _make_function("istruesubset","bool",
+ "Returns true if self is true subset of other.",
+ lambda s: s["r2"] or not s["r1"],
+ lambda s: s["r2"] and not s["r1"])
+ issuperset = _make_function("issuperset","bool",
+ "Returns true if self is superset of other.",
+ lambda s: s["r1"] or not s["r2"])
+ istruesuperset = _make_function("istruesuperset","bool",
+ "Returns true if self is true superset of other.",
+ lambda s: s["r1"] or not s["r2"],
+ lambda s: s["r1"] and not s["r2"])
+ overlaps = _make_function("overlaps","bool",
+ "Returns true if self overlaps with other.",
+ lambda s: True,
+ lambda s: s["r1"] and s["r2"])
+
+ # Comparison.
+ __eq__ = _make_function("__eq__","bool",
+ "Returns true if self is equal to other.",
+ lambda s: not ( s["r1"] ^ s["r2"] ))
+ __ne__ = _make_function("__ne__","bool",
+ "Returns true if self is different to other.",
+ lambda s: True,
+ lambda s: s["r1"] ^ s["r2"])
+
+ # Clean up namespace.
+ del _make_function
+
+ # Define other functions.
+ def inverse(self):
+ """Inverse of set as a new set."""
+
+ newset = self.__class__.__new__(self.__class__)
+ newset._min = self._min
+ newset._max = self._max
+ newset._ranges = []
+ laststop = self._min
+ for r in self._ranges:
+ if laststop < r[0]:
+ newset._ranges.append((laststop,r[0]))
+ laststop = r[1]
+ if laststop < self._max:
+ newset._ranges.append((laststop,self._max))
+ return newset
+
+ __invert__ = inverse
+
+ # Hashing
+ # -------
+
+ def __hash__(self):
+ """Returns a hash value representing this integer set. As the set is
+ always stored normalized, the hash value is guaranteed to match for
+ matching ranges."""
+
+ return self._hash
+
+ # Iterating
+ # ---------
+
+ def __len__(self):
+ """Get length of this integer set. In case the length is larger than
+ 2**31 (including infinitely sized integer sets), it raises an
+ OverflowError. This is due to len() restricting the size to
+ 0 <= len < 2**31."""
+
+ if not self._ranges:
+ return 0
+ if self._ranges[0][0] is _MININF or self._ranges[-1][1] is _MAXINF:
+ raise OverflowError("Infinitely sized integer set.")
+ rlen = 0
+ for r in self._ranges:
+ rlen += r[1]-r[0]
+ if rlen >= 2**31:
+ raise OverflowError("Integer set bigger than 2**31.")
+ return rlen
+
+ def len(self):
+ """Returns the length of this integer set as an integer. In case the
+ length is infinite, returns -1. This function exists because of a
+ limitation of the builtin len() function which expects values in
+ the range 0 <= len < 2**31. Use this function in case your integer
+ set might be larger."""
+
+ if not self._ranges:
+ return 0
+ if self._ranges[0][0] is _MININF or self._ranges[-1][1] is _MAXINF:
+ return -1
+ rlen = 0
+ for r in self._ranges:
+ rlen += r[1]-r[0]
+ return rlen
+
+ def __nonzero__(self):
+ """Returns true if this integer set contains at least one item."""
+
+ return bool(self._ranges)
+
+ def __iter__(self):
+ """Iterate over all values in this integer set. Iteration always starts
+ by iterating from lowest to highest over the ranges that are bounded.
+ After processing these, all ranges that are unbounded (maximum 2) are
+ yielded intermixed."""
+
+ ubranges = []
+ for r in self._ranges:
+ if r[0] is _MININF:
+ if r[1] is _MAXINF:
+ ubranges.extend(([0,1],[-1,-1]))
+ else:
+ ubranges.append([r[1]-1,-1])
+ elif r[1] is _MAXINF:
+ ubranges.append([r[0],1])
+ else:
+ for val in xrange(r[0],r[1]):
+ yield val
+ if ubranges:
+ while True:
+ for ubrange in ubranges:
+ yield ubrange[0]
+ ubrange[0] += ubrange[1]
+
+ # Printing
+ # --------
+
+ def __repr__(self):
+ """Return a representation of this integer set. The representation is
+ executable to get an equal integer set."""
+
+ rv = []
+ for start, stop in self._ranges:
+ if ( isinstance(start, six.integer_types) and isinstance(stop, six.integer_types)
+ and stop-start == 1 ):
+ rv.append("%r" % start)
+ elif isinstance(stop, six.integer_types):
+ rv.append("(%r,%r)" % (start,stop-1))
+ else:
+ rv.append("(%r,%r)" % (start,stop))
+ if self._min is not _MININF:
+ rv.append("min=%r" % self._min)
+ if self._max is not _MAXINF:
+ rv.append("max=%r" % self._max)
+ return "%s(%s)" % (self.__class__.__name__,",".join(rv))
+
+if __name__ == "__main__":
+ # Little test script demonstrating functionality.
+ x = IntSet((10,20),30)
+ y = IntSet((10,20))
+ z = IntSet((10,20),30,(15,19),min=0,max=40)
+ print(x)
+ print(x&110)
+ print(x|110)
+ print(x^(15,25))
+ print(x-12)
+ print(12 in x)
+ print(x.issubset(x))
+ print(y.issubset(x))
+ print(x.istruesubset(x))
+ print(y.istruesubset(x))
+ for val in x:
+ print(val)
+ print(x.inverse())
+ print(x == z)
+ print(x == y)
+ print(x != y)
+ print(hash(x))
+ print(hash(z))
+ print(len(x))
+ print(x.len())
diff --git a/paste/util/ip4.py b/paste/util/ip4.py
new file mode 100644
index 0000000..9ce17b8
--- /dev/null
+++ b/paste/util/ip4.py
@@ -0,0 +1,274 @@
+# -*- coding: iso-8859-15 -*-
+"""IP4 address range set implementation.
+
+Implements an IPv4-range type.
+
+Copyright (C) 2006, Heiko Wundram.
+Released under the MIT-license.
+"""
+
+# Version information
+# -------------------
+
+__author__ = "Heiko Wundram <me@modelnine.org>"
+__version__ = "0.2"
+__revision__ = "3"
+__date__ = "2006-01-20"
+
+
+# Imports
+# -------
+
+from paste.util import intset
+import socket
+import six
+
+
+# IP4Range class
+# --------------
+
+class IP4Range(intset.IntSet):
+ """IP4 address range class with efficient storage of address ranges.
+ Supports all set operations."""
+
+ _MINIP4 = 0
+ _MAXIP4 = (1<<32) - 1
+ _UNITYTRANS = "".join([chr(n) for n in range(256)])
+ _IPREMOVE = "0123456789."
+
+ def __init__(self,*args):
+ """Initialize an ip4range class. The constructor accepts an unlimited
+ number of arguments that may either be tuples in the form (start,stop),
+ integers, longs or strings, where start and stop in a tuple may
+ also be of the form integer, long or string.
+
+ Passing an integer or long means passing an IPv4-address that's already
+ been converted to integer notation, whereas passing a string specifies
+ an address where this conversion still has to be done. A string
+ address may be in the following formats:
+
+ - 1.2.3.4 - a plain address, interpreted as a single address
+ - 1.2.3 - a set of addresses, interpreted as 1.2.3.0-1.2.3.255
+ - localhost - hostname to look up, interpreted as single address
+ - 1.2.3<->5 - a set of addresses, interpreted as 1.2.3.0-1.2.5.255
+ - 1.2.0.0/16 - a set of addresses, interpreted as 1.2.0.0-1.2.255.255
+
+ Only the first three notations are valid if you use a string address in
+ a tuple, whereby notation 2 is interpreted as 1.2.3.0 if specified as
+ lower bound and 1.2.3.255 if specified as upper bound, not as a range
+ of addresses.
+
+ Specifying a range is done with the <-> operator. This is necessary
+ because '-' might be present in a hostname. '<->' shouldn't be, ever.
+ """
+
+ # Special case copy constructor.
+ if len(args) == 1 and isinstance(args[0],IP4Range):
+ super(IP4Range,self).__init__(args[0])
+ return
+
+ # Convert arguments to tuple syntax.
+ args = list(args)
+ for i in range(len(args)):
+ argval = args[i]
+ if isinstance(argval,str):
+ if "<->" in argval:
+ # Type 4 address.
+ args[i] = self._parseRange(*argval.split("<->",1))
+ continue
+ elif "/" in argval:
+ # Type 5 address.
+ args[i] = self._parseMask(*argval.split("/",1))
+ else:
+ # Type 1, 2 or 3.
+ args[i] = self._parseAddrRange(argval)
+ elif isinstance(argval,tuple):
+ if len(tuple) != 2:
+ raise ValueError("Tuple is of invalid length.")
+ addr1, addr2 = argval
+ if isinstance(addr1,str):
+ addr1 = self._parseAddrRange(addr1)[0]
+ elif not isinstance(addr1, six.integer_types):
+ raise TypeError("Invalid argument.")
+ if isinstance(addr2,str):
+ addr2 = self._parseAddrRange(addr2)[1]
+ elif not isinstance(addr2, six.integer_types):
+ raise TypeError("Invalid argument.")
+ args[i] = (addr1,addr2)
+ elif not isinstance(argval, six.integer_types):
+ raise TypeError("Invalid argument.")
+
+ # Initialize the integer set.
+ super(IP4Range,self).__init__(min=self._MINIP4,max=self._MAXIP4,*args)
+
+ # Parsing functions
+ # -----------------
+
+ def _parseRange(self,addr1,addr2):
+ naddr1, naddr1len = _parseAddr(addr1)
+ naddr2, naddr2len = _parseAddr(addr2)
+ if naddr2len < naddr1len:
+ naddr2 += naddr1&(((1<<((naddr1len-naddr2len)*8))-1)<<
+ (naddr2len*8))
+ naddr2len = naddr1len
+ elif naddr2len > naddr1len:
+ raise ValueError("Range has more dots than address.")
+ naddr1 <<= (4-naddr1len)*8
+ naddr2 <<= (4-naddr2len)*8
+ naddr2 += (1<<((4-naddr2len)*8))-1
+ return (naddr1,naddr2)
+
+ def _parseMask(self,addr,mask):
+ naddr, naddrlen = _parseAddr(addr)
+ naddr <<= (4-naddrlen)*8
+ try:
+ if not mask:
+ masklen = 0
+ else:
+ masklen = int(mask)
+ if not 0 <= masklen <= 32:
+ raise ValueError
+ except ValueError:
+ try:
+ mask = _parseAddr(mask,False)
+ except ValueError:
+ raise ValueError("Mask isn't parseable.")
+ remaining = 0
+ masklen = 0
+ if not mask:
+ masklen = 0
+ else:
+ while not (mask&1):
+ remaining += 1
+ while (mask&1):
+ mask >>= 1
+ masklen += 1
+ if remaining+masklen != 32:
+ raise ValueError("Mask isn't a proper host mask.")
+ naddr1 = naddr & (((1<<masklen)-1)<<(32-masklen))
+ naddr2 = naddr1 + (1<<(32-masklen)) - 1
+ return (naddr1,naddr2)
+
+ def _parseAddrRange(self,addr):
+ naddr, naddrlen = _parseAddr(addr)
+ naddr1 = naddr<<((4-naddrlen)*8)
+ naddr2 = ( (naddr<<((4-naddrlen)*8)) +
+ (1<<((4-naddrlen)*8)) - 1 )
+ return (naddr1,naddr2)
+
+ # Utility functions
+ # -----------------
+
+ def _int2ip(self,num):
+ rv = []
+ for i in range(4):
+ rv.append(str(num&255))
+ num >>= 8
+ return ".".join(reversed(rv))
+
+ # Iterating
+ # ---------
+
+ def iteraddresses(self):
+ """Returns an iterator which iterates over ips in this iprange. An
+ IP is returned in string form (e.g. '1.2.3.4')."""
+
+ for v in super(IP4Range,self).__iter__():
+ yield self._int2ip(v)
+
+ def iterranges(self):
+ """Returns an iterator which iterates over ip-ip ranges which build
+ this iprange if combined. An ip-ip pair is returned in string form
+ (e.g. '1.2.3.4-2.3.4.5')."""
+
+ for r in self._ranges:
+ if r[1]-r[0] == 1:
+ yield self._int2ip(r[0])
+ else:
+ yield '%s-%s' % (self._int2ip(r[0]),self._int2ip(r[1]-1))
+
+ def itermasks(self):
+ """Returns an iterator which iterates over ip/mask pairs which build
+ this iprange if combined. An IP/Mask pair is returned in string form
+ (e.g. '1.2.3.0/24')."""
+
+ for r in self._ranges:
+ for v in self._itermasks(r):
+ yield v
+
+ def _itermasks(self,r):
+ ranges = [r]
+ while ranges:
+ cur = ranges.pop()
+ curmask = 0
+ while True:
+ curmasklen = 1<<(32-curmask)
+ start = (cur[0]+curmasklen-1)&(((1<<curmask)-1)<<(32-curmask))
+ if start >= cur[0] and start+curmasklen <= cur[1]:
+ break
+ else:
+ curmask += 1
+ yield "%s/%s" % (self._int2ip(start),curmask)
+ if cur[0] < start:
+ ranges.append((cur[0],start))
+ if cur[1] > start+curmasklen:
+ ranges.append((start+curmasklen,cur[1]))
+
+ __iter__ = iteraddresses
+
+ # Printing
+ # --------
+
+ def __repr__(self):
+ """Returns a string which can be used to reconstruct this iprange."""
+
+ rv = []
+ for start, stop in self._ranges:
+ if stop-start == 1:
+ rv.append("%r" % (self._int2ip(start),))
+ else:
+ rv.append("(%r,%r)" % (self._int2ip(start),
+ self._int2ip(stop-1)))
+ return "%s(%s)" % (self.__class__.__name__,",".join(rv))
+
+def _parseAddr(addr,lookup=True):
+ if lookup and any(ch not in IP4Range._IPREMOVE for ch in addr):
+ try:
+ addr = socket.gethostbyname(addr)
+ except socket.error:
+ raise ValueError("Invalid Hostname as argument.")
+ naddr = 0
+ for naddrpos, part in enumerate(addr.split(".")):
+ if naddrpos >= 4:
+ raise ValueError("Address contains more than four parts.")
+ try:
+ if not part:
+ part = 0
+ else:
+ part = int(part)
+ if not 0 <= part < 256:
+ raise ValueError
+ except ValueError:
+ raise ValueError("Address part out of range.")
+ naddr <<= 8
+ naddr += part
+ return naddr, naddrpos+1
+
+def ip2int(addr, lookup=True):
+ return _parseAddr(addr, lookup=lookup)[0]
+
+if __name__ == "__main__":
+ # Little test script.
+ x = IP4Range("172.22.162.250/24")
+ y = IP4Range("172.22.162.250","172.22.163.250","172.22.163.253<->255")
+ print(x)
+ for val in x.itermasks():
+ print(val)
+ for val in y.itermasks():
+ print(val)
+ for val in (x|y).itermasks():
+ print(val)
+ for val in (x^y).iterranges():
+ print(val)
+ for val in x:
+ print(val)
diff --git a/paste/util/killthread.py b/paste/util/killthread.py
new file mode 100644
index 0000000..4df4f42
--- /dev/null
+++ b/paste/util/killthread.py
@@ -0,0 +1,30 @@
+"""
+Kill a thread, from http://sebulba.wikispaces.com/recipe+thread2
+"""
+import six
+try:
+ import ctypes
+except ImportError:
+ raise ImportError(
+ "You cannot use paste.util.killthread without ctypes installed")
+if not hasattr(ctypes, 'pythonapi'):
+ raise ImportError(
+ "You cannot use paste.util.killthread without ctypes.pythonapi")
+
+def async_raise(tid, exctype):
+ """raises the exception, performs cleanup if needed.
+
+ tid is the value given by thread.get_ident() (an integer).
+ Raise SystemExit to kill a thread."""
+ if not isinstance(exctype, (six.class_types, type)):
+ raise TypeError("Only types can be raised (not instances)")
+ if not isinstance(tid, int):
+ raise TypeError("tid must be an integer")
+ res = ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), ctypes.py_object(exctype))
+ if res == 0:
+ raise ValueError("invalid thread id")
+ elif res != 1:
+ # """if it returns a number greater than one, you're in trouble,
+ # and you should call it again with exc=NULL to revert the effect"""
+ ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), 0)
+ raise SystemError("PyThreadState_SetAsyncExc failed")
diff --git a/paste/util/looper.py b/paste/util/looper.py
new file mode 100644
index 0000000..b56358a
--- /dev/null
+++ b/paste/util/looper.py
@@ -0,0 +1,156 @@
+"""
+Helper for looping over sequences, particular in templates.
+
+Often in a loop in a template it's handy to know what's next up,
+previously up, if this is the first or last item in the sequence, etc.
+These can be awkward to manage in a normal Python loop, but using the
+looper you can get a better sense of the context. Use like::
+
+ >>> for loop, item in looper(['a', 'b', 'c']):
+ ... print("%s %s" % (loop.number, item))
+ ... if not loop.last:
+ ... print('---')
+ 1 a
+ ---
+ 2 b
+ ---
+ 3 c
+
+"""
+
+__all__ = ['looper']
+
+import six
+
+
+class looper(object):
+ """
+ Helper for looping (particularly in templates)
+
+ Use this like::
+
+ for loop, item in looper(seq):
+ if loop.first:
+ ...
+ """
+
+ def __init__(self, seq):
+ self.seq = seq
+
+ def __iter__(self):
+ return looper_iter(self.seq)
+
+ def __repr__(self):
+ return '<%s for %r>' % (
+ self.__class__.__name__, self.seq)
+
+class looper_iter(object):
+
+ def __init__(self, seq):
+ self.seq = list(seq)
+ self.pos = 0
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.pos >= len(self.seq):
+ raise StopIteration
+ result = loop_pos(self.seq, self.pos), self.seq[self.pos]
+ self.pos += 1
+ return result
+ __next__ = next
+
+class loop_pos(object):
+
+ def __init__(self, seq, pos):
+ self.seq = seq
+ self.pos = pos
+
+ def __repr__(self):
+ return '<loop pos=%r at %r>' % (
+ self.seq[self.pos], self.pos)
+
+ def index(self):
+ return self.pos
+ index = property(index)
+
+ def number(self):
+ return self.pos + 1
+ number = property(number)
+
+ def item(self):
+ return self.seq[self.pos]
+ item = property(item)
+
+ def next(self):
+ try:
+ return self.seq[self.pos+1]
+ except IndexError:
+ return None
+ next = property(next)
+
+ def previous(self):
+ if self.pos == 0:
+ return None
+ return self.seq[self.pos-1]
+ previous = property(previous)
+
+ def odd(self):
+ return not self.pos % 2
+ odd = property(odd)
+
+ def even(self):
+ return self.pos % 2
+ even = property(even)
+
+ def first(self):
+ return self.pos == 0
+ first = property(first)
+
+ def last(self):
+ return self.pos == len(self.seq)-1
+ last = property(last)
+
+ def length(self):
+ return len(self.seq)
+ length = property(length)
+
+ def first_group(self, getter=None):
+ """
+ Returns true if this item is the start of a new group,
+ where groups mean that some attribute has changed. The getter
+ can be None (the item itself changes), an attribute name like
+ ``'.attr'``, a function, or a dict key or list index.
+ """
+ if self.first:
+ return True
+ return self._compare_group(self.item, self.previous, getter)
+
+ def last_group(self, getter=None):
+ """
+ Returns true if this item is the end of a new group,
+ where groups mean that some attribute has changed. The getter
+ can be None (the item itself changes), an attribute name like
+ ``'.attr'``, a function, or a dict key or list index.
+ """
+ if self.last:
+ return True
+ return self._compare_group(self.item, self.next, getter)
+
+ def _compare_group(self, item, other, getter):
+ if getter is None:
+ return item != other
+ elif (isinstance(getter, (six.binary_type, six.text_type))
+ and getter.startswith('.')):
+ getter = getter[1:]
+ if getter.endswith('()'):
+ getter = getter[:-2]
+ return getattr(item, getter)() != getattr(other, getter)()
+ else:
+ return getattr(item, getter) != getattr(other, getter)
+ elif callable(getter):
+ return getter(item) != getter(other)
+ else:
+ return item[getter] != other[getter]
+
diff --git a/paste/util/mimeparse.py b/paste/util/mimeparse.py
new file mode 100644
index 0000000..b796c8b
--- /dev/null
+++ b/paste/util/mimeparse.py
@@ -0,0 +1,160 @@
+"""MIME-Type Parser
+
+This module provides basic functions for handling mime-types. It can handle
+matching mime-types against a list of media-ranges. See section 14.1 of
+the HTTP specification [RFC 2616] for a complete explanation.
+
+ http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
+
+Based on mimeparse 0.1.2 by Joe Gregorio:
+
+ http://code.google.com/p/mimeparse/
+
+Contents:
+ - parse_mime_type(): Parses a mime-type into its component parts.
+ - parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' quality parameter.
+ - quality(): Determines the quality ('q') of a mime-type when compared against a list of media-ranges.
+ - quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
+ - best_match(): Choose the mime-type with the highest quality ('q') from a list of candidates.
+ - desired_matches(): Filter against a list of desired mime-types in the order the server prefers.
+
+"""
+
+
+def parse_mime_type(mime_type):
+ """Carves up a mime-type and returns a tuple of the
+ (type, subtype, params) where 'params' is a dictionary
+ of all the parameters for the media range.
+ For example, the media range 'application/xhtml;q=0.5' would
+ get parsed into:
+
+ ('application', 'xhtml', {'q', '0.5'})
+ """
+ type = mime_type.split(';')
+ type, plist = type[0], type[1:]
+ try:
+ type, subtype = type.split('/', 1)
+ except ValueError:
+ type, subtype = type.strip() or '*', '*'
+ else:
+ type = type.strip() or '*'
+ subtype = subtype.strip() or '*'
+ params = {}
+ for param in plist:
+ param = param.split('=', 1)
+ if len(param) == 2:
+ key, value = param[0].strip(), param[1].strip()
+ if key and value:
+ params[key] = value
+ return type, subtype, params
+
+def parse_media_range(range):
+ """Carves up a media range and returns a tuple of the
+ (type, subtype, params) where 'params' is a dictionary
+ of all the parameters for the media range.
+ For example, the media range 'application/*;q=0.5' would
+ get parsed into:
+
+ ('application', '*', {'q', '0.5'})
+
+ In addition this function also guarantees that there
+ is a value for 'q' in the params dictionary, filling it
+ in with a proper default if necessary.
+ """
+ type, subtype, params = parse_mime_type(range)
+ try:
+ if not 0 <= float(params['q']) <= 1:
+ raise ValueError
+ except (KeyError, ValueError):
+ params['q'] = '1'
+ return type, subtype, params
+
+def fitness_and_quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a given mime-type against
+ a list of media_ranges that have already been
+ parsed by parse_media_range(). Returns a tuple of
+ the fitness value and the value of the 'q' quality
+ parameter of the best match, or (-1, 0) if no match
+ was found. Just as for quality_parsed(), 'parsed_ranges'
+ must be a list of parsed media ranges."""
+ best_fitness, best_fit_q = -1, 0
+ target_type, target_subtype, target_params = parse_media_range(mime_type)
+ for type, subtype, params in parsed_ranges:
+ if (type == target_type
+ or type == '*' or target_type == '*') and (
+ subtype == target_subtype
+ or subtype == '*' or target_subtype == '*'):
+ fitness = 0
+ if type == target_type:
+ fitness += 100
+ if subtype == target_subtype:
+ fitness += 10
+ for key in target_params:
+ if key != 'q' and key in params:
+ if params[key] == target_params[key]:
+ fitness += 1
+ if fitness > best_fitness:
+ best_fitness = fitness
+ best_fit_q = params['q']
+ return best_fitness, float(best_fit_q)
+
+def quality_parsed(mime_type, parsed_ranges):
+ """Find the best match for a given mime-type against
+ a list of media_ranges that have already been
+ parsed by parse_media_range(). Returns the
+ 'q' quality parameter of the best match, 0 if no
+ match was found. This function behaves the same as quality()
+ except that 'parsed_ranges' must be a list of
+ parsed media ranges."""
+ return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
+
+def quality(mime_type, ranges):
+ """Returns the quality 'q' of a mime-type when compared
+ against the media-ranges in ranges. For example:
+
+ >>> quality('text/html','text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
+ 0.7
+
+ """
+ parsed_ranges = map(parse_media_range, ranges.split(','))
+ return quality_parsed(mime_type, parsed_ranges)
+
+def best_match(supported, header):
+ """Takes a list of supported mime-types and finds the best
+ match for all the media-ranges listed in header. In case of
+ ambiguity, whatever comes first in the list will be chosen.
+ The value of header must be a string that conforms to the format
+ of the HTTP Accept: header. The value of 'supported' is a list
+ of mime-types.
+
+ >>> best_match(['application/xbel+xml', 'text/xml'], 'text/*;q=0.5,*/*; q=0.1')
+ 'text/xml'
+ """
+ if not supported:
+ return ''
+ parsed_header = list(map(parse_media_range, header.split(',')))
+ best_type = max([
+ (fitness_and_quality_parsed(mime_type, parsed_header), -n)
+ for n, mime_type in enumerate(supported)])
+ return best_type[0][1] and supported[-best_type[1]] or ''
+
+def desired_matches(desired, header):
+ """Takes a list of desired mime-types in the order the server prefers to
+ send them regardless of the browsers preference.
+
+ Browsers (such as Firefox) technically want XML over HTML depending on how
+ one reads the specification. This function is provided for a server to
+ declare a set of desired mime-types it supports, and returns a subset of
+ the desired list in the same order should each one be Accepted by the
+ browser.
+
+ >>> desired_matches(['text/html', 'application/xml'], \
+ ... 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png')
+ ['text/html', 'application/xml']
+ >>> desired_matches(['text/html', 'application/xml'], 'application/xml,application/json')
+ ['application/xml']
+ """
+ parsed_ranges = list(map(parse_media_range, header.split(',')))
+ return [mimetype for mimetype in desired
+ if quality_parsed(mimetype, parsed_ranges)]
+
diff --git a/paste/util/multidict.py b/paste/util/multidict.py
new file mode 100644
index 0000000..701d1ac
--- /dev/null
+++ b/paste/util/multidict.py
@@ -0,0 +1,429 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+import cgi
+import copy
+import six
+import sys
+
+try:
+ # Python 3
+ from collections import MutableMapping as DictMixin
+except ImportError:
+ # Python 2
+ from UserDict import DictMixin
+
+class MultiDict(DictMixin):
+
+ """
+ An ordered dictionary that can have multiple values for each key.
+ Adds the methods getall, getone, mixed, and add to the normal
+ dictionary interface.
+ """
+
+ def __init__(self, *args, **kw):
+ if len(args) > 1:
+ raise TypeError(
+ "MultiDict can only be called with one positional argument")
+ if args:
+ if hasattr(args[0], 'iteritems'):
+ items = args[0].iteritems()
+ elif hasattr(args[0], 'items'):
+ items = args[0].items()
+ else:
+ items = args[0]
+ self._items = list(items)
+ else:
+ self._items = []
+ self._items.extend(six.iteritems(kw))
+
+ def __getitem__(self, key):
+ for k, v in self._items:
+ if k == key:
+ return v
+ raise KeyError(repr(key))
+
+ def __setitem__(self, key, value):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+ self._items.append((key, value))
+
+ def add(self, key, value):
+ """
+ Add the key and value, not overwriting any previous value.
+ """
+ self._items.append((key, value))
+
+ def getall(self, key):
+ """
+ Return a list of all values matching the key (may be an empty list)
+ """
+ result = []
+ for k, v in self._items:
+ if type(key) == type(k) and key == k:
+ result.append(v)
+ return result
+
+ def getone(self, key):
+ """
+ Get one value matching the key, raising a KeyError if multiple
+ values were found.
+ """
+ v = self.getall(key)
+ if not v:
+ raise KeyError('Key not found: %r' % key)
+ if len(v) > 1:
+ raise KeyError('Multiple values match %r: %r' % (key, v))
+ return v[0]
+
+ def mixed(self):
+ """
+ Returns a dictionary where the values are either single
+ values, or a list of values when a key/value appears more than
+ once in this dictionary. This is similar to the kind of
+ dictionary often used to represent the variables in a web
+ request.
+ """
+ result = {}
+ multi = {}
+ for key, value in self._items:
+ if key in result:
+ # We do this to not clobber any lists that are
+ # *actual* values in this dictionary:
+ if key in multi:
+ result[key].append(value)
+ else:
+ result[key] = [result[key], value]
+ multi[key] = None
+ else:
+ result[key] = value
+ return result
+
+ def dict_of_lists(self):
+ """
+ Returns a dictionary where each key is associated with a
+ list of values.
+ """
+ result = {}
+ for key, value in self._items:
+ if key in result:
+ result[key].append(value)
+ else:
+ result[key] = [value]
+ return result
+
+ def __delitem__(self, key):
+ items = self._items
+ found = False
+ for i in range(len(items)-1, -1, -1):
+ if type(items[i][0]) == type(key) and items[i][0] == key:
+ del items[i]
+ found = True
+ if not found:
+ raise KeyError(repr(key))
+
+ def __contains__(self, key):
+ for k, v in self._items:
+ if type(k) == type(key) and k == key:
+ return True
+ return False
+
+ has_key = __contains__
+
+ def clear(self):
+ self._items = []
+
+ def copy(self):
+ return MultiDict(self)
+
+ def setdefault(self, key, default=None):
+ for k, v in self._items:
+ if key == k:
+ return v
+ self._items.append((key, default))
+ return default
+
+ def pop(self, key, *args):
+ if len(args) > 1:
+ raise TypeError("pop expected at most 2 arguments, got "
+ + repr(1 + len(args)))
+ for i in range(len(self._items)):
+ if type(self._items[i][0]) == type(key) and self._items[i][0] == key:
+ v = self._items[i][1]
+ del self._items[i]
+ return v
+ if args:
+ return args[0]
+ else:
+ raise KeyError(repr(key))
+
+ def popitem(self):
+ return self._items.pop()
+
+ def update(self, other=None, **kwargs):
+ if other is None:
+ pass
+ elif hasattr(other, 'items'):
+ self._items.extend(other.items())
+ elif hasattr(other, 'keys'):
+ for k in other.keys():
+ self._items.append((k, other[k]))
+ else:
+ for k, v in other:
+ self._items.append((k, v))
+ if kwargs:
+ self.update(kwargs)
+
+ def __repr__(self):
+ items = ', '.join(['(%r, %r)' % v for v in self._items])
+ return '%s([%s])' % (self.__class__.__name__, items)
+
+ def __len__(self):
+ return len(self._items)
+
+ ##
+ ## All the iteration:
+ ##
+
+ def keys(self):
+ return [k for k, v in self._items]
+
+ def iterkeys(self):
+ for k, v in self._items:
+ yield k
+
+ __iter__ = iterkeys
+
+ def items(self):
+ return self._items[:]
+
+ def iteritems(self):
+ return iter(self._items)
+
+ def values(self):
+ return [v for k, v in self._items]
+
+ def itervalues(self):
+ for k, v in self._items:
+ yield v
+
+class UnicodeMultiDict(DictMixin):
+ """
+ A MultiDict wrapper that decodes returned values to unicode on the
+ fly. Decoding is not applied to assigned values.
+
+ The key/value contents are assumed to be ``str``/``strs`` or
+ ``str``/``FieldStorages`` (as is returned by the ``paste.request.parse_``
+ functions).
+
+ Can optionally also decode keys when the ``decode_keys`` argument is
+ True.
+
+ ``FieldStorage`` instances are cloned, and the clone's ``filename``
+ variable is decoded. Its ``name`` variable is decoded when ``decode_keys``
+ is enabled.
+
+ """
+ def __init__(self, multi=None, encoding=None, errors='strict',
+ decode_keys=False):
+ self.multi = multi
+ if encoding is None:
+ encoding = sys.getdefaultencoding()
+ self.encoding = encoding
+ self.errors = errors
+ self.decode_keys = decode_keys
+ if self.decode_keys:
+ items = self.multi._items
+ for index, item in enumerate(items):
+ key, value = item
+ key = self._encode_key(key)
+ items[index] = (key, value)
+
+ def _encode_key(self, key):
+ if self.decode_keys:
+ try:
+ key = key.encode(self.encoding, self.errors)
+ except AttributeError:
+ pass
+ return key
+
+ def _decode_key(self, key):
+ if self.decode_keys:
+ try:
+ key = key.decode(self.encoding, self.errors)
+ except AttributeError:
+ pass
+ return key
+
+ def _decode_value(self, value):
+ """
+ Decode the specified value to unicode. Assumes value is a ``str`` or
+ `FieldStorage`` object.
+
+ ``FieldStorage`` objects are specially handled.
+ """
+ if isinstance(value, cgi.FieldStorage):
+ # decode FieldStorage's field name and filename
+ value = copy.copy(value)
+ if self.decode_keys and isinstance(value.name, six.binary_type):
+ value.name = value.name.decode(self.encoding, self.errors)
+ if six.PY2:
+ value.filename = value.filename.decode(self.encoding, self.errors)
+ else:
+ try:
+ value = value.decode(self.encoding, self.errors)
+ except AttributeError:
+ pass
+ return value
+
+ def __getitem__(self, key):
+ key = self._encode_key(key)
+ return self._decode_value(self.multi.__getitem__(key))
+
+ def __setitem__(self, key, value):
+ key = self._encode_key(key)
+ self.multi.__setitem__(key, value)
+
+ def add(self, key, value):
+ """
+ Add the key and value, not overwriting any previous value.
+ """
+ key = self._encode_key(key)
+ self.multi.add(key, value)
+
+ def getall(self, key):
+ """
+ Return a list of all values matching the key (may be an empty list)
+ """
+ key = self._encode_key(key)
+ return [self._decode_value(v) for v in self.multi.getall(key)]
+
+ def getone(self, key):
+ """
+ Get one value matching the key, raising a KeyError if multiple
+ values were found.
+ """
+ key = self._encode_key(key)
+ return self._decode_value(self.multi.getone(key))
+
+ def mixed(self):
+ """
+ Returns a dictionary where the values are either single
+ values, or a list of values when a key/value appears more than
+ once in this dictionary. This is similar to the kind of
+ dictionary often used to represent the variables in a web
+ request.
+ """
+ unicode_mixed = {}
+ for key, value in six.iteritems(self.multi.mixed()):
+ if isinstance(value, list):
+ value = [self._decode_value(value) for value in value]
+ else:
+ value = self._decode_value(value)
+ unicode_mixed[self._decode_key(key)] = value
+ return unicode_mixed
+
+ def dict_of_lists(self):
+ """
+ Returns a dictionary where each key is associated with a
+ list of values.
+ """
+ unicode_dict = {}
+ for key, value in six.iteritems(self.multi.dict_of_lists()):
+ value = [self._decode_value(value) for value in value]
+ unicode_dict[self._decode_key(key)] = value
+ return unicode_dict
+
+ def __delitem__(self, key):
+ key = self._encode_key(key)
+ self.multi.__delitem__(key)
+
+ def __contains__(self, key):
+ key = self._encode_key(key)
+ return self.multi.__contains__(key)
+
+ has_key = __contains__
+
+ def clear(self):
+ self.multi.clear()
+
+ def copy(self):
+ return UnicodeMultiDict(self.multi.copy(), self.encoding, self.errors,
+ decode_keys=self.decode_keys)
+
+ def setdefault(self, key, default=None):
+ key = self._encode_key(key)
+ return self._decode_value(self.multi.setdefault(key, default))
+
+ def pop(self, key, *args):
+ key = self._encode_key(key)
+ return self._decode_value(self.multi.pop(key, *args))
+
+ def popitem(self):
+ k, v = self.multi.popitem()
+ return (self._decode_key(k), self._decode_value(v))
+
+ def __repr__(self):
+ items = ', '.join(['(%r, %r)' % v for v in self.items()])
+ return '%s([%s])' % (self.__class__.__name__, items)
+
+ def __len__(self):
+ return self.multi.__len__()
+
+ ##
+ ## All the iteration:
+ ##
+
+ def keys(self):
+ return [self._decode_key(k) for k in self.multi.iterkeys()]
+
+ def iterkeys(self):
+ for k in self.multi.iterkeys():
+ yield self._decode_key(k)
+
+ __iter__ = iterkeys
+
+ def items(self):
+ return [(self._decode_key(k), self._decode_value(v)) for \
+ k, v in six.iteritems(self.multi)]
+
+ def iteritems(self):
+ for k, v in six.iteritems(self.multi):
+ yield (self._decode_key(k), self._decode_value(v))
+
+ def values(self):
+ return [self._decode_value(v) for v in self.multi.itervalues()]
+
+ def itervalues(self):
+ for v in self.multi.itervalues():
+ yield self._decode_value(v)
+
+__test__ = {
+ 'general': """
+ >>> d = MultiDict(a=1, b=2)
+ >>> d['a']
+ 1
+ >>> d.getall('c')
+ []
+ >>> d.add('a', 2)
+ >>> d['a']
+ 1
+ >>> d.getall('a')
+ [1, 2]
+ >>> d['b'] = 4
+ >>> d.getall('b')
+ [4]
+ >>> d.keys()
+ ['a', 'a', 'b']
+ >>> d.items()
+ [('a', 1), ('a', 2), ('b', 4)]
+ >>> d.mixed()
+ {'a': [1, 2], 'b': 4}
+ >>> MultiDict([('a', 'b')], c=2)
+ MultiDict([('a', 'b'), ('c', 2)])
+ """}
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/paste/util/quoting.py b/paste/util/quoting.py
new file mode 100644
index 0000000..df0d9da
--- /dev/null
+++ b/paste/util/quoting.py
@@ -0,0 +1,85 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+import cgi
+import six
+import re
+from six.moves import html_entities
+from six.moves.urllib.parse import quote, unquote
+
+
+__all__ = ['html_quote', 'html_unquote', 'url_quote', 'url_unquote',
+ 'strip_html']
+
+default_encoding = 'UTF-8'
+
+def html_quote(v, encoding=None):
+ r"""
+ Quote the value (turned to a string) as HTML. This quotes <, >,
+ and quotes:
+ """
+ encoding = encoding or default_encoding
+ if v is None:
+ return ''
+ elif isinstance(v, six.binary_type):
+ return cgi.escape(v, 1)
+ elif isinstance(v, six.text_type):
+ if six.PY3:
+ return cgi.escape(v, 1)
+ else:
+ return cgi.escape(v.encode(encoding), 1)
+ else:
+ if six.PY3:
+ return cgi.escape(six.text_type(v), 1)
+ else:
+ return cgi.escape(six.text_type(v).encode(encoding), 1)
+
+_unquote_re = re.compile(r'&([a-zA-Z]+);')
+def _entity_subber(match, name2c=html_entities.name2codepoint):
+ code = name2c.get(match.group(1))
+ if code:
+ return six.unichr(code)
+ else:
+ return match.group(0)
+
+def html_unquote(s, encoding=None):
+ r"""
+ Decode the value.
+
+ """
+ if isinstance(s, six.binary_type):
+ s = s.decode(encoding or default_encoding)
+ return _unquote_re.sub(_entity_subber, s)
+
+def strip_html(s):
+ # should this use html_unquote?
+ s = re.sub('<.*?>', '', s)
+ s = html_unquote(s)
+ return s
+
+def no_quote(s):
+ """
+ Quoting that doesn't do anything
+ """
+ return s
+
+_comment_quote_re = re.compile(r'\-\s*\>')
+# Everything but \r, \n, \t:
+_bad_chars_re = re.compile('[\x00-\x08\x0b-\x0c\x0e-\x1f]')
+def comment_quote(s):
+ """
+ Quote that makes sure text can't escape a comment
+ """
+ comment = str(s)
+ #comment = _bad_chars_re.sub('', comment)
+ #print('in ', repr(str(s)))
+ #print('out', repr(comment))
+ comment = _comment_quote_re.sub('-&gt;', comment)
+ return comment
+
+url_quote = quote
+url_unquote = unquote
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/paste/util/scgiserver.py b/paste/util/scgiserver.py
new file mode 100644
index 0000000..1c86c86
--- /dev/null
+++ b/paste/util/scgiserver.py
@@ -0,0 +1,172 @@
+"""
+SCGI-->WSGI application proxy, "SWAP".
+
+(Originally written by Titus Brown.)
+
+This lets an SCGI front-end like mod_scgi be used to execute WSGI
+application objects. To use it, subclass the SWAP class like so::
+
+ class TestAppHandler(swap.SWAP):
+ def __init__(self, *args, **kwargs):
+ self.prefix = '/canal'
+ self.app_obj = TestAppClass
+ swap.SWAP.__init__(self, *args, **kwargs)
+
+where 'TestAppClass' is the application object from WSGI and '/canal'
+is the prefix for what is served by the SCGI Web-server-side process.
+
+Then execute the SCGI handler "as usual" by doing something like this::
+
+ scgi_server.SCGIServer(TestAppHandler, port=4000).serve()
+
+and point mod_scgi (or whatever your SCGI front end is) at port 4000.
+
+Kudos to the WSGI folk for writing a nice PEP & the Quixote folk for
+writing a nice extensible SCGI server for Python!
+"""
+
+import six
+import sys
+import time
+from scgi import scgi_server
+
+def debug(msg):
+ timestamp = time.strftime("%Y-%m-%d %H:%M:%S",
+ time.localtime(time.time()))
+ sys.stderr.write("[%s] %s\n" % (timestamp, msg))
+
+class SWAP(scgi_server.SCGIHandler):
+ """
+ SCGI->WSGI application proxy: let an SCGI server execute WSGI
+ application objects.
+ """
+ app_obj = None
+ prefix = None
+
+ def __init__(self, *args, **kwargs):
+ assert self.app_obj, "must set app_obj"
+ assert self.prefix is not None, "must set prefix"
+ args = (self,) + args
+ scgi_server.SCGIHandler.__init__(*args, **kwargs)
+
+ def handle_connection(self, conn):
+ """
+ Handle an individual connection.
+ """
+ input = conn.makefile("r")
+ output = conn.makefile("w")
+
+ environ = self.read_env(input)
+ environ['wsgi.input'] = input
+ environ['wsgi.errors'] = sys.stderr
+ environ['wsgi.version'] = (1, 0)
+ environ['wsgi.multithread'] = False
+ environ['wsgi.multiprocess'] = True
+ environ['wsgi.run_once'] = False
+
+ # dunno how SCGI does HTTPS signalling; can't test it myself... @CTB
+ if environ.get('HTTPS','off') in ('on','1'):
+ environ['wsgi.url_scheme'] = 'https'
+ else:
+ environ['wsgi.url_scheme'] = 'http'
+
+ ## SCGI does some weird environ manglement. We need to set
+ ## SCRIPT_NAME from 'prefix' and then set PATH_INFO from
+ ## REQUEST_URI.
+
+ prefix = self.prefix
+ path = environ['REQUEST_URI'][len(prefix):].split('?', 1)[0]
+
+ environ['SCRIPT_NAME'] = prefix
+ environ['PATH_INFO'] = path
+
+ headers_set = []
+ headers_sent = []
+ chunks = []
+ def write(data):
+ chunks.append(data)
+
+ def start_response(status, response_headers, exc_info=None):
+ if exc_info:
+ try:
+ if headers_sent:
+ # Re-raise original exception if headers sent
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ finally:
+ exc_info = None # avoid dangling circular ref
+ elif headers_set:
+ raise AssertionError("Headers already set!")
+
+ headers_set[:] = [status, response_headers]
+ return write
+
+ ###
+
+ result = self.app_obj(environ, start_response)
+ try:
+ for data in result:
+ chunks.append(data)
+
+ # Before the first output, send the stored headers
+ if not headers_set:
+ # Error -- the app never called start_response
+ status = '500 Server Error'
+ response_headers = [('Content-type', 'text/html')]
+ chunks = ["XXX start_response never called"]
+ else:
+ status, response_headers = headers_sent[:] = headers_set
+
+ output.write('Status: %s\r\n' % status)
+ for header in response_headers:
+ output.write('%s: %s\r\n' % header)
+ output.write('\r\n')
+
+ for data in chunks:
+ output.write(data)
+ finally:
+ if hasattr(result,'close'):
+ result.close()
+
+ # SCGI backends use connection closing to signal 'fini'.
+ try:
+ input.close()
+ output.close()
+ conn.close()
+ except IOError as err:
+ debug("IOError while closing connection ignored: %s" % err)
+
+
+def serve_application(application, prefix, port=None, host=None, max_children=None):
+ """
+ Serve the specified WSGI application via SCGI proxy.
+
+ ``application``
+ The WSGI application to serve.
+
+ ``prefix``
+ The prefix for what is served by the SCGI Web-server-side process.
+
+ ``port``
+ Optional port to bind the SCGI proxy to. Defaults to SCGIServer's
+ default port value.
+
+ ``host``
+ Optional host to bind the SCGI proxy to. Defaults to SCGIServer's
+ default host value.
+
+ ``host``
+ Optional maximum number of child processes the SCGIServer will
+ spawn. Defaults to SCGIServer's default max_children value.
+ """
+ class SCGIAppHandler(SWAP):
+ def __init__ (self, *args, **kwargs):
+ self.prefix = prefix
+ self.app_obj = application
+ SWAP.__init__(self, *args, **kwargs)
+
+ kwargs = dict(handler_class=SCGIAppHandler)
+ for kwarg in ('host', 'port', 'max_children'):
+ if locals()[kwarg] is not None:
+ kwargs[kwarg] = locals()[kwarg]
+
+ scgi_server.SCGIServer(**kwargs).serve()
diff --git a/paste/util/template.py b/paste/util/template.py
new file mode 100644
index 0000000..5a63664
--- /dev/null
+++ b/paste/util/template.py
@@ -0,0 +1,756 @@
+"""
+A small templating language
+
+This implements a small templating language for use internally in
+Paste and Paste Script. This language implements if/elif/else,
+for/continue/break, expressions, and blocks of Python code. The
+syntax is::
+
+ {{any expression (function calls etc)}}
+ {{any expression | filter}}
+ {{for x in y}}...{{endfor}}
+ {{if x}}x{{elif y}}y{{else}}z{{endif}}
+ {{py:x=1}}
+ {{py:
+ def foo(bar):
+ return 'baz'
+ }}
+ {{default var = default_value}}
+ {{# comment}}
+
+You use this with the ``Template`` class or the ``sub`` shortcut.
+The ``Template`` class takes the template string and the name of
+the template (for errors) and a default namespace. Then (like
+``string.Template``) you can call the ``tmpl.substitute(**kw)``
+method to make a substitution (or ``tmpl.substitute(a_dict)``).
+
+``sub(content, **kw)`` substitutes the template immediately. You
+can use ``__name='tmpl.html'`` to set the name of the template.
+
+If there are syntax errors ``TemplateError`` will be raised.
+"""
+
+import re
+import six
+import sys
+import cgi
+from six.moves.urllib.parse import quote
+from paste.util.looper import looper
+
+__all__ = ['TemplateError', 'Template', 'sub', 'HTMLTemplate',
+ 'sub_html', 'html', 'bunch']
+
+token_re = re.compile(r'\{\{|\}\}')
+in_re = re.compile(r'\s+in\s+')
+var_re = re.compile(r'^[a-z_][a-z0-9_]*$', re.I)
+
+class TemplateError(Exception):
+ """Exception raised while parsing a template
+ """
+
+ def __init__(self, message, position, name=None):
+ self.message = message
+ self.position = position
+ self.name = name
+
+ def __str__(self):
+ msg = '%s at line %s column %s' % (
+ self.message, self.position[0], self.position[1])
+ if self.name:
+ msg += ' in %s' % self.name
+ return msg
+
+class _TemplateContinue(Exception):
+ pass
+
+class _TemplateBreak(Exception):
+ pass
+
+class Template(object):
+
+ default_namespace = {
+ 'start_braces': '{{',
+ 'end_braces': '}}',
+ 'looper': looper,
+ }
+
+ default_encoding = 'utf8'
+
+ def __init__(self, content, name=None, namespace=None):
+ self.content = content
+ self._unicode = isinstance(content, six.text_type)
+ self.name = name
+ self._parsed = parse(content, name=name)
+ if namespace is None:
+ namespace = {}
+ self.namespace = namespace
+
+ def from_filename(cls, filename, namespace=None, encoding=None):
+ f = open(filename, 'rb')
+ c = f.read()
+ f.close()
+ if encoding:
+ c = c.decode(encoding)
+ return cls(content=c, name=filename, namespace=namespace)
+
+ from_filename = classmethod(from_filename)
+
+ def __repr__(self):
+ return '<%s %s name=%r>' % (
+ self.__class__.__name__,
+ hex(id(self))[2:], self.name)
+
+ def substitute(self, *args, **kw):
+ if args:
+ if kw:
+ raise TypeError(
+ "You can only give positional *or* keyword arguments")
+ if len(args) > 1:
+ raise TypeError(
+ "You can only give on positional argument")
+ kw = args[0]
+ ns = self.default_namespace.copy()
+ ns.update(self.namespace)
+ ns.update(kw)
+ result = self._interpret(ns)
+ return result
+
+ def _interpret(self, ns):
+ __traceback_hide__ = True
+ parts = []
+ self._interpret_codes(self._parsed, ns, out=parts)
+ return ''.join(parts)
+
+ def _interpret_codes(self, codes, ns, out):
+ __traceback_hide__ = True
+ for item in codes:
+ if isinstance(item, six.string_types):
+ out.append(item)
+ else:
+ self._interpret_code(item, ns, out)
+
+ def _interpret_code(self, code, ns, out):
+ __traceback_hide__ = True
+ name, pos = code[0], code[1]
+ if name == 'py':
+ self._exec(code[2], ns, pos)
+ elif name == 'continue':
+ raise _TemplateContinue()
+ elif name == 'break':
+ raise _TemplateBreak()
+ elif name == 'for':
+ vars, expr, content = code[2], code[3], code[4]
+ expr = self._eval(expr, ns, pos)
+ self._interpret_for(vars, expr, content, ns, out)
+ elif name == 'cond':
+ parts = code[2:]
+ self._interpret_if(parts, ns, out)
+ elif name == 'expr':
+ parts = code[2].split('|')
+ base = self._eval(parts[0], ns, pos)
+ for part in parts[1:]:
+ func = self._eval(part, ns, pos)
+ base = func(base)
+ out.append(self._repr(base, pos))
+ elif name == 'default':
+ var, expr = code[2], code[3]
+ if var not in ns:
+ result = self._eval(expr, ns, pos)
+ ns[var] = result
+ elif name == 'comment':
+ return
+ else:
+ assert 0, "Unknown code: %r" % name
+
+ def _interpret_for(self, vars, expr, content, ns, out):
+ __traceback_hide__ = True
+ for item in expr:
+ if len(vars) == 1:
+ ns[vars[0]] = item
+ else:
+ if len(vars) != len(item):
+ raise ValueError(
+ 'Need %i items to unpack (got %i items)'
+ % (len(vars), len(item)))
+ for name, value in zip(vars, item):
+ ns[name] = value
+ try:
+ self._interpret_codes(content, ns, out)
+ except _TemplateContinue:
+ continue
+ except _TemplateBreak:
+ break
+
+ def _interpret_if(self, parts, ns, out):
+ __traceback_hide__ = True
+ # @@: if/else/else gets through
+ for part in parts:
+ assert not isinstance(part, six.string_types)
+ name, pos = part[0], part[1]
+ if name == 'else':
+ result = True
+ else:
+ result = self._eval(part[2], ns, pos)
+ if result:
+ self._interpret_codes(part[3], ns, out)
+ break
+
+ def _eval(self, code, ns, pos):
+ __traceback_hide__ = True
+ try:
+ value = eval(code, ns)
+ return value
+ except:
+ exc_info = sys.exc_info()
+ e = exc_info[1]
+ if getattr(e, 'args'):
+ arg0 = e.args[0]
+ else:
+ arg0 = str(e)
+ e.args = (self._add_line_info(arg0, pos),)
+ six.reraise(exc_info[0], e, exc_info[2])
+
+ def _exec(self, code, ns, pos):
+ __traceback_hide__ = True
+ try:
+ six.exec_(code, ns)
+ except:
+ exc_info = sys.exc_info()
+ e = exc_info[1]
+ e.args = (self._add_line_info(e.args[0], pos),)
+ six.reraise(exc_info[0], e, exc_info[2])
+
+ def _repr(self, value, pos):
+ __traceback_hide__ = True
+ try:
+ if value is None:
+ return ''
+ if self._unicode:
+ try:
+ value = six.text_type(value)
+ except UnicodeDecodeError:
+ value = str(value)
+ else:
+ value = str(value)
+ except:
+ exc_info = sys.exc_info()
+ e = exc_info[1]
+ e.args = (self._add_line_info(e.args[0], pos),)
+ six.reraise(exc_info[0], e, exc_info[2])
+ else:
+ if self._unicode and isinstance(value, six.binary_type):
+ if not self.decode_encoding:
+ raise UnicodeDecodeError(
+ 'Cannot decode str value %r into unicode '
+ '(no default_encoding provided)' % value)
+ value = value.decode(self.default_encoding)
+ elif not self._unicode and isinstance(value, six.text_type):
+ if not self.decode_encoding:
+ raise UnicodeEncodeError(
+ 'Cannot encode unicode value %r into str '
+ '(no default_encoding provided)' % value)
+ value = value.encode(self.default_encoding)
+ return value
+
+
+ def _add_line_info(self, msg, pos):
+ msg = "%s at line %s column %s" % (
+ msg, pos[0], pos[1])
+ if self.name:
+ msg += " in file %s" % self.name
+ return msg
+
+def sub(content, **kw):
+ name = kw.get('__name')
+ tmpl = Template(content, name=name)
+ return tmpl.substitute(kw)
+
+def paste_script_template_renderer(content, vars, filename=None):
+ tmpl = Template(content, name=filename)
+ return tmpl.substitute(vars)
+
+class bunch(dict):
+
+ def __init__(self, **kw):
+ for name, value in kw.items():
+ setattr(self, name, value)
+
+ def __setattr__(self, name, value):
+ self[name] = value
+
+ def __getattr__(self, name):
+ try:
+ return self[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(self, key):
+ if 'default' in self:
+ try:
+ return dict.__getitem__(self, key)
+ except KeyError:
+ return dict.__getitem__(self, 'default')
+ else:
+ return dict.__getitem__(self, key)
+
+ def __repr__(self):
+ items = [
+ (k, v) for k, v in self.items()]
+ items.sort()
+ return '<%s %s>' % (
+ self.__class__.__name__,
+ ' '.join(['%s=%r' % (k, v) for k, v in items]))
+
+############################################################
+## HTML Templating
+############################################################
+
+class html(object):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return self.value
+ def __repr__(self):
+ return '<%s %r>' % (
+ self.__class__.__name__, self.value)
+
+def html_quote(value):
+ if value is None:
+ return ''
+ if not isinstance(value, six.string_types):
+ if six.PY2 and hasattr(value, '__unicode__'):
+ value = unicode(value)
+ else:
+ value = str(value)
+ value = cgi.escape(value, 1)
+ if six.PY2 and isinstance(value, unicode):
+ value = value.encode('ascii', 'xmlcharrefreplace')
+ return value
+
+def url(v):
+ if not isinstance(v, six.string_types):
+ if six.PY2 and hasattr(v, '__unicode__'):
+ v = unicode(v)
+ else:
+ v = str(v)
+ if six.PY2 and isinstance(v, unicode):
+ v = v.encode('utf8')
+ return quote(v)
+
+def attr(**kw):
+ kw = sorted(kw.items())
+ parts = []
+ for name, value in kw:
+ if value is None:
+ continue
+ if name.endswith('_'):
+ name = name[:-1]
+ parts.append('%s="%s"' % (html_quote(name), html_quote(value)))
+ return html(' '.join(parts))
+
+class HTMLTemplate(Template):
+
+ default_namespace = Template.default_namespace.copy()
+ default_namespace.update(dict(
+ html=html,
+ attr=attr,
+ url=url,
+ ))
+
+ def _repr(self, value, pos):
+ plain = Template._repr(self, value, pos)
+ if isinstance(value, html):
+ return plain
+ else:
+ return html_quote(plain)
+
+def sub_html(content, **kw):
+ name = kw.get('__name')
+ tmpl = HTMLTemplate(content, name=name)
+ return tmpl.substitute(kw)
+
+
+############################################################
+## Lexing and Parsing
+############################################################
+
+def lex(s, name=None, trim_whitespace=True):
+ """
+ Lex a string into chunks:
+
+ >>> lex('hey')
+ ['hey']
+ >>> lex('hey {{you}}')
+ ['hey ', ('you', (1, 7))]
+ >>> lex('hey {{')
+ Traceback (most recent call last):
+ ...
+ TemplateError: No }} to finish last expression at line 1 column 7
+ >>> lex('hey }}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: }} outside expression at line 1 column 7
+ >>> lex('hey {{ {{')
+ Traceback (most recent call last):
+ ...
+ TemplateError: {{ inside expression at line 1 column 10
+
+ """
+ in_expr = False
+ chunks = []
+ last = 0
+ last_pos = (1, 1)
+ for match in token_re.finditer(s):
+ expr = match.group(0)
+ pos = find_position(s, match.end())
+ if expr == '{{' and in_expr:
+ raise TemplateError('{{ inside expression', position=pos,
+ name=name)
+ elif expr == '}}' and not in_expr:
+ raise TemplateError('}} outside expression', position=pos,
+ name=name)
+ if expr == '{{':
+ part = s[last:match.start()]
+ if part:
+ chunks.append(part)
+ in_expr = True
+ else:
+ chunks.append((s[last:match.start()], last_pos))
+ in_expr = False
+ last = match.end()
+ last_pos = pos
+ if in_expr:
+ raise TemplateError('No }} to finish last expression',
+ name=name, position=last_pos)
+ part = s[last:]
+ if part:
+ chunks.append(part)
+ if trim_whitespace:
+ chunks = trim_lex(chunks)
+ return chunks
+
+statement_re = re.compile(r'^(?:if |elif |else |for |py:)')
+single_statements = ['endif', 'endfor', 'continue', 'break']
+trail_whitespace_re = re.compile(r'\n[\t ]*$')
+lead_whitespace_re = re.compile(r'^[\t ]*\n')
+
+def trim_lex(tokens):
+ r"""
+ Takes a lexed set of tokens, and removes whitespace when there is
+ a directive on a line by itself:
+
+ >>> tokens = lex('{{if x}}\nx\n{{endif}}\ny', trim_whitespace=False)
+ >>> tokens
+ [('if x', (1, 3)), '\nx\n', ('endif', (3, 3)), '\ny']
+ >>> trim_lex(tokens)
+ [('if x', (1, 3)), 'x\n', ('endif', (3, 3)), 'y']
+ """
+ for i in range(len(tokens)):
+ current = tokens[i]
+ if isinstance(tokens[i], six.string_types):
+ # we don't trim this
+ continue
+ item = current[0]
+ if not statement_re.search(item) and item not in single_statements:
+ continue
+ if not i:
+ prev = ''
+ else:
+ prev = tokens[i-1]
+ if i+1 >= len(tokens):
+ next = ''
+ else:
+ next = tokens[i+1]
+ if (not isinstance(next, six.string_types)
+ or not isinstance(prev, six.string_types)):
+ continue
+ if ((not prev or trail_whitespace_re.search(prev))
+ and (not next or lead_whitespace_re.search(next))):
+ if prev:
+ m = trail_whitespace_re.search(prev)
+ # +1 to leave the leading \n on:
+ prev = prev[:m.start()+1]
+ tokens[i-1] = prev
+ if next:
+ m = lead_whitespace_re.search(next)
+ next = next[m.end():]
+ tokens[i+1] = next
+ return tokens
+
+
+def find_position(string, index):
+ """Given a string and index, return (line, column)"""
+ leading = string[:index].splitlines()
+ return (len(leading), len(leading[-1])+1)
+
+def parse(s, name=None):
+ r"""
+ Parses a string into a kind of AST
+
+ >>> parse('{{x}}')
+ [('expr', (1, 3), 'x')]
+ >>> parse('foo')
+ ['foo']
+ >>> parse('{{if x}}test{{endif}}')
+ [('cond', (1, 3), ('if', (1, 3), 'x', ['test']))]
+ >>> parse('series->{{for x in y}}x={{x}}{{endfor}}')
+ ['series->', ('for', (1, 11), ('x',), 'y', ['x=', ('expr', (1, 27), 'x')])]
+ >>> parse('{{for x, y in z:}}{{continue}}{{endfor}}')
+ [('for', (1, 3), ('x', 'y'), 'z', [('continue', (1, 21))])]
+ >>> parse('{{py:x=1}}')
+ [('py', (1, 3), 'x=1')]
+ >>> parse('{{if x}}a{{elif y}}b{{else}}c{{endif}}')
+ [('cond', (1, 3), ('if', (1, 3), 'x', ['a']), ('elif', (1, 12), 'y', ['b']), ('else', (1, 23), None, ['c']))]
+
+ Some exceptions::
+
+ >>> parse('{{continue}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: continue outside of for loop at line 1 column 3
+ >>> parse('{{if x}}foo')
+ Traceback (most recent call last):
+ ...
+ TemplateError: No {{endif}} at line 1 column 3
+ >>> parse('{{else}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: else outside of an if block at line 1 column 3
+ >>> parse('{{if x}}{{for x in y}}{{endif}}{{endfor}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: Unexpected endif at line 1 column 25
+ >>> parse('{{if}}{{endif}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: if with no expression at line 1 column 3
+ >>> parse('{{for x y}}{{endfor}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: Bad for (no "in") in 'x y' at line 1 column 3
+ >>> parse('{{py:x=1\ny=2}}')
+ Traceback (most recent call last):
+ ...
+ TemplateError: Multi-line py blocks must start with a newline at line 1 column 3
+ """
+ tokens = lex(s, name=name)
+ result = []
+ while tokens:
+ next, tokens = parse_expr(tokens, name)
+ result.append(next)
+ return result
+
+def parse_expr(tokens, name, context=()):
+ if isinstance(tokens[0], six.string_types):
+ return tokens[0], tokens[1:]
+ expr, pos = tokens[0]
+ expr = expr.strip()
+ if expr.startswith('py:'):
+ expr = expr[3:].lstrip(' \t')
+ if expr.startswith('\n'):
+ expr = expr[1:]
+ else:
+ if '\n' in expr:
+ raise TemplateError(
+ 'Multi-line py blocks must start with a newline',
+ position=pos, name=name)
+ return ('py', pos, expr), tokens[1:]
+ elif expr in ('continue', 'break'):
+ if 'for' not in context:
+ raise TemplateError(
+ 'continue outside of for loop',
+ position=pos, name=name)
+ return (expr, pos), tokens[1:]
+ elif expr.startswith('if '):
+ return parse_cond(tokens, name, context)
+ elif (expr.startswith('elif ')
+ or expr == 'else'):
+ raise TemplateError(
+ '%s outside of an if block' % expr.split()[0],
+ position=pos, name=name)
+ elif expr in ('if', 'elif', 'for'):
+ raise TemplateError(
+ '%s with no expression' % expr,
+ position=pos, name=name)
+ elif expr in ('endif', 'endfor'):
+ raise TemplateError(
+ 'Unexpected %s' % expr,
+ position=pos, name=name)
+ elif expr.startswith('for '):
+ return parse_for(tokens, name, context)
+ elif expr.startswith('default '):
+ return parse_default(tokens, name, context)
+ elif expr.startswith('#'):
+ return ('comment', pos, tokens[0][0]), tokens[1:]
+ return ('expr', pos, tokens[0][0]), tokens[1:]
+
+def parse_cond(tokens, name, context):
+ start = tokens[0][1]
+ pieces = []
+ context = context + ('if',)
+ while 1:
+ if not tokens:
+ raise TemplateError(
+ 'Missing {{endif}}',
+ position=start, name=name)
+ if (isinstance(tokens[0], tuple)
+ and tokens[0][0] == 'endif'):
+ return ('cond', start) + tuple(pieces), tokens[1:]
+ next, tokens = parse_one_cond(tokens, name, context)
+ pieces.append(next)
+
+def parse_one_cond(tokens, name, context):
+ (first, pos), tokens = tokens[0], tokens[1:]
+ content = []
+ if first.endswith(':'):
+ first = first[:-1]
+ if first.startswith('if '):
+ part = ('if', pos, first[3:].lstrip(), content)
+ elif first.startswith('elif '):
+ part = ('elif', pos, first[5:].lstrip(), content)
+ elif first == 'else':
+ part = ('else', pos, None, content)
+ else:
+ assert 0, "Unexpected token %r at %s" % (first, pos)
+ while 1:
+ if not tokens:
+ raise TemplateError(
+ 'No {{endif}}',
+ position=pos, name=name)
+ if (isinstance(tokens[0], tuple)
+ and (tokens[0][0] == 'endif'
+ or tokens[0][0].startswith('elif ')
+ or tokens[0][0] == 'else')):
+ return part, tokens
+ next, tokens = parse_expr(tokens, name, context)
+ content.append(next)
+
+def parse_for(tokens, name, context):
+ first, pos = tokens[0]
+ tokens = tokens[1:]
+ context = ('for',) + context
+ content = []
+ assert first.startswith('for ')
+ if first.endswith(':'):
+ first = first[:-1]
+ first = first[3:].strip()
+ match = in_re.search(first)
+ if not match:
+ raise TemplateError(
+ 'Bad for (no "in") in %r' % first,
+ position=pos, name=name)
+ vars = first[:match.start()]
+ if '(' in vars:
+ raise TemplateError(
+ 'You cannot have () in the variable section of a for loop (%r)'
+ % vars, position=pos, name=name)
+ vars = tuple([
+ v.strip() for v in first[:match.start()].split(',')
+ if v.strip()])
+ expr = first[match.end():]
+ while 1:
+ if not tokens:
+ raise TemplateError(
+ 'No {{endfor}}',
+ position=pos, name=name)
+ if (isinstance(tokens[0], tuple)
+ and tokens[0][0] == 'endfor'):
+ return ('for', pos, vars, expr, content), tokens[1:]
+ next, tokens = parse_expr(tokens, name, context)
+ content.append(next)
+
+def parse_default(tokens, name, context):
+ first, pos = tokens[0]
+ assert first.startswith('default ')
+ first = first.split(None, 1)[1]
+ parts = first.split('=', 1)
+ if len(parts) == 1:
+ raise TemplateError(
+ "Expression must be {{default var=value}}; no = found in %r" % first,
+ position=pos, name=name)
+ var = parts[0].strip()
+ if ',' in var:
+ raise TemplateError(
+ "{{default x, y = ...}} is not supported",
+ position=pos, name=name)
+ if not var_re.search(var):
+ raise TemplateError(
+ "Not a valid variable name for {{default}}: %r"
+ % var, position=pos, name=name)
+ expr = parts[1].strip()
+ return ('default', pos, var, expr), tokens[1:]
+
+_fill_command_usage = """\
+%prog [OPTIONS] TEMPLATE arg=value
+
+Use py:arg=value to set a Python value; otherwise all values are
+strings.
+"""
+
+def fill_command(args=None):
+ import sys, optparse, pkg_resources, os
+ if args is None:
+ args = sys.argv[1:]
+ dist = pkg_resources.get_distribution('Paste')
+ parser = optparse.OptionParser(
+ version=str(dist),
+ usage=_fill_command_usage)
+ parser.add_option(
+ '-o', '--output',
+ dest='output',
+ metavar="FILENAME",
+ help="File to write output to (default stdout)")
+ parser.add_option(
+ '--html',
+ dest='use_html',
+ action='store_true',
+ help="Use HTML style filling (including automatic HTML quoting)")
+ parser.add_option(
+ '--env',
+ dest='use_env',
+ action='store_true',
+ help="Put the environment in as top-level variables")
+ options, args = parser.parse_args(args)
+ if len(args) < 1:
+ print('You must give a template filename')
+ print(dir(parser))
+ assert 0
+ template_name = args[0]
+ args = args[1:]
+ vars = {}
+ if options.use_env:
+ vars.update(os.environ)
+ for value in args:
+ if '=' not in value:
+ print('Bad argument: %r' % value)
+ sys.exit(2)
+ name, value = value.split('=', 1)
+ if name.startswith('py:'):
+ name = name[:3]
+ value = eval(value)
+ vars[name] = value
+ if template_name == '-':
+ template_content = sys.stdin.read()
+ template_name = '<stdin>'
+ else:
+ f = open(template_name, 'rb')
+ template_content = f.read()
+ f.close()
+ if options.use_html:
+ TemplateClass = HTMLTemplate
+ else:
+ TemplateClass = Template
+ template = TemplateClass(template_content, name=template_name)
+ result = template.substitute(vars)
+ if options.output:
+ f = open(options.output, 'wb')
+ f.write(result)
+ f.close()
+ else:
+ sys.stdout.write(result)
+
+if __name__ == '__main__':
+ from paste.util.template import fill_command
+ fill_command()
+
+
diff --git a/paste/util/threadedprint.py b/paste/util/threadedprint.py
new file mode 100644
index 0000000..820311e
--- /dev/null
+++ b/paste/util/threadedprint.py
@@ -0,0 +1,250 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+threadedprint.py
+================
+
+:author: Ian Bicking
+:date: 12 Jul 2004
+
+Multi-threaded printing; allows the output produced via print to be
+separated according to the thread.
+
+To use this, you must install the catcher, like::
+
+ threadedprint.install()
+
+The installation optionally takes one of three parameters:
+
+default
+ The default destination for print statements (e.g., ``sys.stdout``).
+factory
+ A function that will produce the stream for a thread, given the
+ thread's name.
+paramwriter
+ Instead of writing to a file-like stream, this function will be
+ called like ``paramwriter(thread_name, text)`` for every write.
+
+The thread name is the value returned by
+``threading.currentThread().getName()``, a string (typically something
+like Thread-N).
+
+You can also submit file-like objects for specific threads, which will
+override any of these parameters. To do this, call ``register(stream,
+[threadName])``. ``threadName`` is optional, and if not provided the
+stream will be registered for the current thread.
+
+If no specific stream is registered for a thread, and no default has
+been provided, then an error will occur when anything is written to
+``sys.stdout`` (or printed).
+
+Note: the stream's ``write`` method will be called in the thread the
+text came from, so you should consider thread safety, especially if
+multiple threads share the same writer.
+
+Note: if you want access to the original standard out, use
+``sys.__stdout__``.
+
+You may also uninstall this, via::
+
+ threadedprint.uninstall()
+
+TODO
+----
+
+* Something with ``sys.stderr``.
+* Some default handlers. Maybe something that hooks into `logging`.
+* Possibly cache the results of ``factory`` calls. This would be a
+ semantic change.
+
+"""
+
+import threading
+import sys
+from paste.util import filemixin
+
+class PrintCatcher(filemixin.FileMixin):
+
+ def __init__(self, default=None, factory=None, paramwriter=None,
+ leave_stdout=False):
+ assert len(filter(lambda x: x is not None,
+ [default, factory, paramwriter])) <= 1, (
+ "You can only provide one of default, factory, or paramwriter")
+ if leave_stdout:
+ assert not default, (
+ "You cannot pass in both default (%r) and "
+ "leave_stdout=True" % default)
+ default = sys.stdout
+ if default:
+ self._defaultfunc = self._writedefault
+ elif factory:
+ self._defaultfunc = self._writefactory
+ elif paramwriter:
+ self._defaultfunc = self._writeparam
+ else:
+ self._defaultfunc = self._writeerror
+ self._default = default
+ self._factory = factory
+ self._paramwriter = paramwriter
+ self._catchers = {}
+
+ def write(self, v, currentThread=threading.currentThread):
+ name = currentThread().getName()
+ catchers = self._catchers
+ if not catchers.has_key(name):
+ self._defaultfunc(name, v)
+ else:
+ catcher = catchers[name]
+ catcher.write(v)
+
+ def seek(self, *args):
+ # Weird, but Google App Engine is seeking on stdout
+ name = threading.currentThread().getName()
+ catchers = self._catchers
+ if not name in catchers:
+ self._default.seek(*args)
+ else:
+ catchers[name].seek(*args)
+
+ def read(self, *args):
+ name = threading.currentThread().getName()
+ catchers = self._catchers
+ if not name in catchers:
+ self._default.read(*args)
+ else:
+ catchers[name].read(*args)
+
+
+ def _writedefault(self, name, v):
+ self._default.write(v)
+
+ def _writefactory(self, name, v):
+ self._factory(name).write(v)
+
+ def _writeparam(self, name, v):
+ self._paramwriter(name, v)
+
+ def _writeerror(self, name, v):
+ assert False, (
+ "There is no PrintCatcher output stream for the thread %r"
+ % name)
+
+ def register(self, catcher, name=None,
+ currentThread=threading.currentThread):
+ if name is None:
+ name = currentThread().getName()
+ self._catchers[name] = catcher
+
+ def deregister(self, name=None,
+ currentThread=threading.currentThread):
+ if name is None:
+ name = currentThread().getName()
+ assert self._catchers.has_key(name), (
+ "There is no PrintCatcher catcher for the thread %r" % name)
+ del self._catchers[name]
+
+_printcatcher = None
+_oldstdout = None
+
+def install(**kw):
+ global _printcatcher, _oldstdout, register, deregister
+ if (not _printcatcher or sys.stdout is not _printcatcher):
+ _oldstdout = sys.stdout
+ _printcatcher = sys.stdout = PrintCatcher(**kw)
+ register = _printcatcher.register
+ deregister = _printcatcher.deregister
+
+def uninstall():
+ global _printcatcher, _oldstdout, register, deregister
+ if _printcatcher:
+ sys.stdout = _oldstdout
+ _printcatcher = _oldstdout = None
+ register = not_installed_error
+ deregister = not_installed_error
+
+def not_installed_error(*args, **kw):
+ assert False, (
+ "threadedprint has not yet been installed (call "
+ "threadedprint.install())")
+
+register = deregister = not_installed_error
+
+class StdinCatcher(filemixin.FileMixin):
+
+ def __init__(self, default=None, factory=None, paramwriter=None):
+ assert len(filter(lambda x: x is not None,
+ [default, factory, paramwriter])) <= 1, (
+ "You can only provide one of default, factory, or paramwriter")
+ if default:
+ self._defaultfunc = self._readdefault
+ elif factory:
+ self._defaultfunc = self._readfactory
+ elif paramwriter:
+ self._defaultfunc = self._readparam
+ else:
+ self._defaultfunc = self._readerror
+ self._default = default
+ self._factory = factory
+ self._paramwriter = paramwriter
+ self._catchers = {}
+
+ def read(self, size=None, currentThread=threading.currentThread):
+ name = currentThread().getName()
+ catchers = self._catchers
+ if not catchers.has_key(name):
+ return self._defaultfunc(name, size)
+ else:
+ catcher = catchers[name]
+ return catcher.read(size)
+
+ def _readdefault(self, name, size):
+ self._default.read(size)
+
+ def _readfactory(self, name, size):
+ self._factory(name).read(size)
+
+ def _readparam(self, name, size):
+ self._paramreader(name, size)
+
+ def _readerror(self, name, size):
+ assert False, (
+ "There is no StdinCatcher output stream for the thread %r"
+ % name)
+
+ def register(self, catcher, name=None,
+ currentThread=threading.currentThread):
+ if name is None:
+ name = currentThread().getName()
+ self._catchers[name] = catcher
+
+ def deregister(self, catcher, name=None,
+ currentThread=threading.currentThread):
+ if name is None:
+ name = currentThread().getName()
+ assert self._catchers.has_key(name), (
+ "There is no StdinCatcher catcher for the thread %r" % name)
+ del self._catchers[name]
+
+_stdincatcher = None
+_oldstdin = None
+
+def install_stdin(**kw):
+ global _stdincatcher, _oldstdin, register_stdin, deregister_stdin
+ if not _stdincatcher:
+ _oldstdin = sys.stdin
+ _stdincatcher = sys.stdin = StdinCatcher(**kw)
+ register_stdin = _stdincatcher.register
+ deregister_stdin = _stdincatcher.deregister
+
+def uninstall_stdin():
+ global _stdincatcher, _oldstdin, register_stdin, deregister_stdin
+ if _stdincatcher:
+ sys.stdin = _oldstdin
+ _stdincatcher = _oldstdin = None
+ register_stdin = deregister_stdin = not_installed_error_stdin
+
+def not_installed_error_stdin(*args, **kw):
+ assert False, (
+ "threadedprint has not yet been installed for stdin (call "
+ "threadedprint.install_stdin())")
diff --git a/paste/util/threadinglocal.py b/paste/util/threadinglocal.py
new file mode 100644
index 0000000..06f2643
--- /dev/null
+++ b/paste/util/threadinglocal.py
@@ -0,0 +1,43 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""
+Implementation of thread-local storage, for Python versions that don't
+have thread local storage natively.
+"""
+
+try:
+ import threading
+except ImportError:
+ # No threads, so "thread local" means process-global
+ class local(object):
+ pass
+else:
+ try:
+ local = threading.local
+ except AttributeError:
+ # Added in 2.4, but now we'll have to define it ourselves
+ import thread
+ class local(object):
+
+ def __init__(self):
+ self.__dict__['__objs'] = {}
+
+ def __getattr__(self, attr, g=thread.get_ident):
+ try:
+ return self.__dict__['__objs'][g()][attr]
+ except KeyError:
+ raise AttributeError(
+ "No variable %s defined for the thread %s"
+ % (attr, g()))
+
+ def __setattr__(self, attr, value, g=thread.get_ident):
+ self.__dict__['__objs'].setdefault(g(), {})[attr] = value
+
+ def __delattr__(self, attr, g=thread.get_ident):
+ try:
+ del self.__dict__['__objs'][g()][attr]
+ except KeyError:
+ raise AttributeError(
+ "No variable %s defined for thread %s"
+ % (attr, g()))
+
diff --git a/paste/wsgilib.py b/paste/wsgilib.py
new file mode 100644
index 0000000..d5862e7
--- /dev/null
+++ b/paste/wsgilib.py
@@ -0,0 +1,604 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+"""
+A module of many disparate routines.
+"""
+
+from __future__ import print_function
+
+# functions which moved to paste.request and paste.response
+# Deprecated around 15 Dec 2005
+from paste.request import get_cookies, parse_querystring, parse_formvars
+from paste.request import construct_url, path_info_split, path_info_pop
+from paste.response import HeaderDict, has_header, header_value, remove_header
+from paste.response import error_body_response, error_response, error_response_app
+
+from traceback import print_exception
+import six
+import sys
+from six.moves import cStringIO as StringIO
+from six.moves.urllib.parse import unquote, urlsplit
+import warnings
+
+__all__ = ['add_close', 'add_start_close', 'capture_output', 'catch_errors',
+ 'catch_errors_app', 'chained_app_iters', 'construct_url',
+ 'dump_environ', 'encode_unicode_app_iter', 'error_body_response',
+ 'error_response', 'get_cookies', 'has_header', 'header_value',
+ 'interactive', 'intercept_output', 'path_info_pop',
+ 'path_info_split', 'raw_interactive', 'send_file']
+
+class add_close(object):
+ """
+ An an iterable that iterates over app_iter, then calls
+ close_func.
+ """
+
+ def __init__(self, app_iterable, close_func):
+ self.app_iterable = app_iterable
+ self.app_iter = iter(app_iterable)
+ self.close_func = close_func
+ self._closed = False
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ # Python 3 uses __next__ instead of next
+ def __next__(self):
+ return next(self.app_iter)
+
+ def close(self):
+ self._closed = True
+ if hasattr(self.app_iterable, 'close'):
+ self.app_iterable.close()
+ self.close_func()
+
+ def __del__(self):
+ if not self._closed:
+ # We can't raise an error or anything at this stage
+ print("Error: app_iter.close() was not called when finishing "
+ "WSGI request. finalization function %s not called"
+ % self.close_func, file=sys.stderr)
+
+class add_start_close(object):
+ """
+ An an iterable that iterates over app_iter, calls start_func
+ before the first item is returned, then calls close_func at the
+ end.
+ """
+
+ def __init__(self, app_iterable, start_func, close_func=None):
+ self.app_iterable = app_iterable
+ self.app_iter = iter(app_iterable)
+ self.first = True
+ self.start_func = start_func
+ self.close_func = close_func
+ self._closed = False
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if self.first:
+ self.start_func()
+ self.first = False
+ return next(self.app_iter)
+ __next__ = next
+
+ def close(self):
+ self._closed = True
+ if hasattr(self.app_iterable, 'close'):
+ self.app_iterable.close()
+ if self.close_func is not None:
+ self.close_func()
+
+ def __del__(self):
+ if not self._closed:
+ # We can't raise an error or anything at this stage
+ print("Error: app_iter.close() was not called when finishing "
+ "WSGI request. finalization function %s not called"
+ % self.close_func, file=sys.stderr)
+
+class chained_app_iters(object):
+
+ """
+ Chains several app_iters together, also delegating .close() to each
+ of them.
+ """
+
+ def __init__(self, *chained):
+ self.app_iters = chained
+ self.chained = [iter(item) for item in chained]
+ self._closed = False
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ if len(self.chained) == 1:
+ return self.chained[0].next()
+ else:
+ try:
+ return self.chained[0].next()
+ except StopIteration:
+ self.chained.pop(0)
+ return self.next()
+
+ def close(self):
+ self._closed = True
+ got_exc = None
+ for app_iter in self.app_iters:
+ try:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ except:
+ got_exc = sys.exc_info()
+ if got_exc:
+ six.reraise(got_exc[0], got_exc[1], got_exc[2])
+
+ def __del__(self):
+ if not self._closed:
+ # We can't raise an error or anything at this stage
+ print("Error: app_iter.close() was not called when finishing "
+ "WSGI request. finalization function %s not called"
+ % self.close_func, file=sys.stderr)
+
+class encode_unicode_app_iter(object):
+ """
+ Encodes an app_iterable's unicode responses as strings
+ """
+
+ def __init__(self, app_iterable, encoding=sys.getdefaultencoding(),
+ errors='strict'):
+ self.app_iterable = app_iterable
+ self.app_iter = iter(app_iterable)
+ self.encoding = encoding
+ self.errors = errors
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ content = next(self.app_iter)
+ if isinstance(content, six.text_type):
+ content = content.encode(self.encoding, self.errors)
+ return content
+ __next__ = next
+
+ def close(self):
+ if hasattr(self.app_iterable, 'close'):
+ self.app_iterable.close()
+
+def catch_errors(application, environ, start_response, error_callback,
+ ok_callback=None):
+ """
+ Runs the application, and returns the application iterator (which should be
+ passed upstream). If an error occurs then error_callback will be called with
+ exc_info as its sole argument. If no errors occur and ok_callback is given,
+ then it will be called with no arguments.
+ """
+ try:
+ app_iter = application(environ, start_response)
+ except:
+ error_callback(sys.exc_info())
+ raise
+ if type(app_iter) in (list, tuple):
+ # These won't produce exceptions
+ if ok_callback:
+ ok_callback()
+ return app_iter
+ else:
+ return _wrap_app_iter(app_iter, error_callback, ok_callback)
+
+class _wrap_app_iter(object):
+
+ def __init__(self, app_iterable, error_callback, ok_callback):
+ self.app_iterable = app_iterable
+ self.app_iter = iter(app_iterable)
+ self.error_callback = error_callback
+ self.ok_callback = ok_callback
+ if hasattr(self.app_iterable, 'close'):
+ self.close = self.app_iterable.close
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ try:
+ return self.app_iter.next()
+ except StopIteration:
+ if self.ok_callback:
+ self.ok_callback()
+ raise
+ except:
+ self.error_callback(sys.exc_info())
+ raise
+
+def catch_errors_app(application, environ, start_response, error_callback_app,
+ ok_callback=None, catch=Exception):
+ """
+ Like ``catch_errors``, except error_callback_app should be a
+ callable that will receive *three* arguments -- ``environ``,
+ ``start_response``, and ``exc_info``. It should call
+ ``start_response`` (*with* the exc_info argument!) and return an
+ iterator.
+ """
+ try:
+ app_iter = application(environ, start_response)
+ except catch:
+ return error_callback_app(environ, start_response, sys.exc_info())
+ if type(app_iter) in (list, tuple):
+ # These won't produce exceptions
+ if ok_callback is not None:
+ ok_callback()
+ return app_iter
+ else:
+ return _wrap_app_iter_app(
+ environ, start_response, app_iter,
+ error_callback_app, ok_callback, catch=catch)
+
+class _wrap_app_iter_app(object):
+
+ def __init__(self, environ, start_response, app_iterable,
+ error_callback_app, ok_callback, catch=Exception):
+ self.environ = environ
+ self.start_response = start_response
+ self.app_iterable = app_iterable
+ self.app_iter = iter(app_iterable)
+ self.error_callback_app = error_callback_app
+ self.ok_callback = ok_callback
+ self.catch = catch
+ if hasattr(self.app_iterable, 'close'):
+ self.close = self.app_iterable.close
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ try:
+ return self.app_iter.next()
+ except StopIteration:
+ if self.ok_callback:
+ self.ok_callback()
+ raise
+ except self.catch:
+ if hasattr(self.app_iterable, 'close'):
+ try:
+ self.app_iterable.close()
+ except:
+ # @@: Print to wsgi.errors?
+ pass
+ new_app_iterable = self.error_callback_app(
+ self.environ, self.start_response, sys.exc_info())
+ app_iter = iter(new_app_iterable)
+ if hasattr(new_app_iterable, 'close'):
+ self.close = new_app_iterable.close
+ self.next = app_iter.next
+ return self.next()
+
+def raw_interactive(application, path='', raise_on_wsgi_error=False,
+ **environ):
+ """
+ Runs the application in a fake environment.
+ """
+ assert "path_info" not in environ, "argument list changed"
+ if raise_on_wsgi_error:
+ errors = ErrorRaiser()
+ else:
+ errors = six.BytesIO()
+ basic_environ = {
+ # mandatory CGI variables
+ 'REQUEST_METHOD': 'GET', # always mandatory
+ 'SCRIPT_NAME': '', # may be empty if app is at the root
+ 'PATH_INFO': '', # may be empty if at root of app
+ 'SERVER_NAME': 'localhost', # always mandatory
+ 'SERVER_PORT': '80', # always mandatory
+ 'SERVER_PROTOCOL': 'HTTP/1.0',
+ # mandatory wsgi variables
+ 'wsgi.version': (1, 0),
+ 'wsgi.url_scheme': 'http',
+ 'wsgi.input': six.BytesIO(),
+ 'wsgi.errors': errors,
+ 'wsgi.multithread': False,
+ 'wsgi.multiprocess': False,
+ 'wsgi.run_once': False,
+ }
+ if path:
+ (_, _, path_info, query, fragment) = urlsplit(str(path))
+ path_info = unquote(path_info)
+ # urlsplit returns unicode so coerce it back to str
+ path_info, query = str(path_info), str(query)
+ basic_environ['PATH_INFO'] = path_info
+ if query:
+ basic_environ['QUERY_STRING'] = query
+ for name, value in environ.items():
+ name = name.replace('__', '.')
+ basic_environ[name] = value
+ if ('SERVER_NAME' in basic_environ
+ and 'HTTP_HOST' not in basic_environ):
+ basic_environ['HTTP_HOST'] = basic_environ['SERVER_NAME']
+ istream = basic_environ['wsgi.input']
+ if isinstance(istream, bytes):
+ basic_environ['wsgi.input'] = six.BytesIO(istream)
+ basic_environ['CONTENT_LENGTH'] = len(istream)
+ data = {}
+ output = []
+ headers_set = []
+ headers_sent = []
+ def start_response(status, headers, exc_info=None):
+ if exc_info:
+ try:
+ if headers_sent:
+ # Re-raise original exception only if headers sent
+ six.reraise(exc_info[0], exc_info[1], exc_info[2])
+ finally:
+ # avoid dangling circular reference
+ exc_info = None
+ elif headers_set:
+ # You cannot set the headers more than once, unless the
+ # exc_info is provided.
+ raise AssertionError("Headers already set and no exc_info!")
+ headers_set.append(True)
+ data['status'] = status
+ data['headers'] = headers
+ return output.append
+ app_iter = application(basic_environ, start_response)
+ try:
+ try:
+ for s in app_iter:
+ if not isinstance(s, six.binary_type):
+ raise ValueError(
+ "The app_iter response can only contain bytes (not "
+ "unicode); got: %r" % s)
+ headers_sent.append(True)
+ if not headers_set:
+ raise AssertionError("Content sent w/o headers!")
+ output.append(s)
+ except TypeError as e:
+ # Typically "iteration over non-sequence", so we want
+ # to give better debugging information...
+ e.args = ((e.args[0] + ' iterable: %r' % app_iter),) + e.args[1:]
+ raise
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ return (data['status'], data['headers'], b''.join(output),
+ errors.getvalue())
+
+class ErrorRaiser(object):
+
+ def flush(self):
+ pass
+
+ def write(self, value):
+ if not value:
+ return
+ raise AssertionError(
+ "No errors should be written (got: %r)" % value)
+
+ def writelines(self, seq):
+ raise AssertionError(
+ "No errors should be written (got lines: %s)" % list(seq))
+
+ def getvalue(self):
+ return ''
+
+def interactive(*args, **kw):
+ """
+ Runs the application interatively, wrapping `raw_interactive` but
+ returning the output in a formatted way.
+ """
+ status, headers, content, errors = raw_interactive(*args, **kw)
+ full = StringIO()
+ if errors:
+ full.write('Errors:\n')
+ full.write(errors.strip())
+ full.write('\n----------end errors\n')
+ full.write(status + '\n')
+ for name, value in headers:
+ full.write('%s: %s\n' % (name, value))
+ full.write('\n')
+ full.write(content)
+ return full.getvalue()
+interactive.proxy = 'raw_interactive'
+
+def dump_environ(environ, start_response):
+ """
+ Application which simply dumps the current environment
+ variables out as a plain text response.
+ """
+ output = []
+ keys = list(environ.keys())
+ keys.sort()
+ for k in keys:
+ v = str(environ[k]).replace("\n","\n ")
+ output.append("%s: %s\n" % (k, v))
+ output.append("\n")
+ content_length = environ.get("CONTENT_LENGTH", '')
+ if content_length:
+ output.append(environ['wsgi.input'].read(int(content_length)))
+ output.append("\n")
+ output = "".join(output)
+ if six.PY3:
+ output = output.encode('utf8')
+ headers = [('Content-Type', 'text/plain'),
+ ('Content-Length', str(len(output)))]
+ start_response("200 OK", headers)
+ return [output]
+
+def send_file(filename):
+ warnings.warn(
+ "wsgilib.send_file has been moved to paste.fileapp.FileApp",
+ DeprecationWarning, 2)
+ from paste import fileapp
+ return fileapp.FileApp(filename)
+
+def capture_output(environ, start_response, application):
+ """
+ Runs application with environ and start_response, and captures
+ status, headers, and body.
+
+ Sends status and header, but *not* body. Returns (status,
+ headers, body). Typically this is used like:
+
+ .. code-block:: python
+
+ def dehtmlifying_middleware(application):
+ def replacement_app(environ, start_response):
+ status, headers, body = capture_output(
+ environ, start_response, application)
+ content_type = header_value(headers, 'content-type')
+ if (not content_type
+ or not content_type.startswith('text/html')):
+ return [body]
+ body = re.sub(r'<.*?>', '', body)
+ return [body]
+ return replacement_app
+
+ """
+ warnings.warn(
+ 'wsgilib.capture_output has been deprecated in favor '
+ 'of wsgilib.intercept_output',
+ DeprecationWarning, 2)
+ data = []
+ output = StringIO()
+ def replacement_start_response(status, headers, exc_info=None):
+ if data:
+ data[:] = []
+ data.append(status)
+ data.append(headers)
+ start_response(status, headers, exc_info)
+ return output.write
+ app_iter = application(environ, replacement_start_response)
+ try:
+ for item in app_iter:
+ output.write(item)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ if not data:
+ data.append(None)
+ if len(data) < 2:
+ data.append(None)
+ data.append(output.getvalue())
+ return data
+
+def intercept_output(environ, application, conditional=None,
+ start_response=None):
+ """
+ Runs application with environ and captures status, headers, and
+ body. None are sent on; you must send them on yourself (unlike
+ ``capture_output``)
+
+ Typically this is used like:
+
+ .. code-block:: python
+
+ def dehtmlifying_middleware(application):
+ def replacement_app(environ, start_response):
+ status, headers, body = intercept_output(
+ environ, application)
+ start_response(status, headers)
+ content_type = header_value(headers, 'content-type')
+ if (not content_type
+ or not content_type.startswith('text/html')):
+ return [body]
+ body = re.sub(r'<.*?>', '', body)
+ return [body]
+ return replacement_app
+
+ A third optional argument ``conditional`` should be a function
+ that takes ``conditional(status, headers)`` and returns False if
+ the request should not be intercepted. In that case
+ ``start_response`` will be called and ``(None, None, app_iter)``
+ will be returned. You must detect that in your code and return
+ the app_iter, like:
+
+ .. code-block:: python
+
+ def dehtmlifying_middleware(application):
+ def replacement_app(environ, start_response):
+ status, headers, body = intercept_output(
+ environ, application,
+ lambda s, h: header_value(headers, 'content-type').startswith('text/html'),
+ start_response)
+ if status is None:
+ return body
+ start_response(status, headers)
+ body = re.sub(r'<.*?>', '', body)
+ return [body]
+ return replacement_app
+ """
+ if conditional is not None and start_response is None:
+ raise TypeError(
+ "If you provide conditional you must also provide "
+ "start_response")
+ data = []
+ output = StringIO()
+ def replacement_start_response(status, headers, exc_info=None):
+ if conditional is not None and not conditional(status, headers):
+ data.append(None)
+ return start_response(status, headers, exc_info)
+ if data:
+ data[:] = []
+ data.append(status)
+ data.append(headers)
+ return output.write
+ app_iter = application(environ, replacement_start_response)
+ if data[0] is None:
+ return (None, None, app_iter)
+ try:
+ for item in app_iter:
+ output.write(item)
+ finally:
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ if not data:
+ data.append(None)
+ if len(data) < 2:
+ data.append(None)
+ data.append(output.getvalue())
+ return data
+
+## Deprecation warning wrapper:
+
+class ResponseHeaderDict(HeaderDict):
+
+ def __init__(self, *args, **kw):
+ warnings.warn(
+ "The class wsgilib.ResponseHeaderDict has been moved "
+ "to paste.response.HeaderDict",
+ DeprecationWarning, 2)
+ HeaderDict.__init__(self, *args, **kw)
+
+def _warn_deprecated(new_func):
+ new_name = new_func.func_name
+ new_path = new_func.func_globals['__name__'] + '.' + new_name
+ def replacement(*args, **kw):
+ warnings.warn(
+ "The function wsgilib.%s has been moved to %s"
+ % (new_name, new_path),
+ DeprecationWarning, 2)
+ return new_func(*args, **kw)
+ try:
+ replacement.func_name = new_func.func_name
+ except:
+ pass
+ return replacement
+
+# Put warnings wrapper in place for all public functions that
+# were imported from elsewhere:
+
+for _name in __all__:
+ _func = globals()[_name]
+ if (hasattr(_func, 'func_globals')
+ and _func.func_globals['__name__'] != __name__):
+ globals()[_name] = _warn_deprecated(_func)
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
+
diff --git a/paste/wsgiwrappers.py b/paste/wsgiwrappers.py
new file mode 100644
index 0000000..674054f
--- /dev/null
+++ b/paste/wsgiwrappers.py
@@ -0,0 +1,590 @@
+# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+"""WSGI Wrappers for a Request and Response
+
+The WSGIRequest and WSGIResponse objects are light wrappers to make it easier
+to deal with an incoming request and sending a response.
+"""
+import re
+import warnings
+from pprint import pformat
+try:
+ # Python 3
+ from http.cookies import SimpleCookie
+except ImportError:
+ # Python 2
+ from Cookie import SimpleCookie
+import six
+
+from paste.request import EnvironHeaders, get_cookie_dict, \
+ parse_dict_querystring, parse_formvars
+from paste.util.multidict import MultiDict, UnicodeMultiDict
+from paste.registry import StackedObjectProxy
+from paste.response import HeaderDict
+from paste.wsgilib import encode_unicode_app_iter
+from paste.httpheaders import ACCEPT_LANGUAGE
+from paste.util.mimeparse import desired_matches
+
+__all__ = ['WSGIRequest', 'WSGIResponse']
+
+_CHARSET_RE = re.compile(r';\s*charset=([^;]*)', re.I)
+
+class DeprecatedSettings(StackedObjectProxy):
+ def _push_object(self, obj):
+ warnings.warn('paste.wsgiwrappers.settings is deprecated: Please use '
+ 'paste.wsgiwrappers.WSGIRequest.defaults instead',
+ DeprecationWarning, 3)
+ WSGIResponse.defaults._push_object(obj)
+ StackedObjectProxy._push_object(self, obj)
+
+# settings is deprecated: use WSGIResponse.defaults instead
+settings = DeprecatedSettings(default=dict())
+
+class environ_getter(object):
+ """For delegating an attribute to a key in self.environ."""
+ # @@: Also __set__? Should setting be allowed?
+ def __init__(self, key, default='', default_factory=None):
+ self.key = key
+ self.default = default
+ self.default_factory = default_factory
+ def __get__(self, obj, type=None):
+ if type is None:
+ return self
+ if self.key not in obj.environ:
+ if self.default_factory:
+ val = obj.environ[self.key] = self.default_factory()
+ return val
+ else:
+ return self.default
+ return obj.environ[self.key]
+
+ def __repr__(self):
+ return '<Proxy for WSGI environ %r key>' % self.key
+
+class WSGIRequest(object):
+ """WSGI Request API Object
+
+ This object represents a WSGI request with a more friendly interface.
+ This does not expose every detail of the WSGI environment, and attempts
+ to express nothing beyond what is available in the environment
+ dictionary.
+
+ The only state maintained in this object is the desired ``charset``,
+ its associated ``errors`` handler, and the ``decode_param_names``
+ option.
+
+ The incoming parameter values will be automatically coerced to unicode
+ objects of the ``charset`` encoding when ``charset`` is set. The
+ incoming parameter names are not decoded to unicode unless the
+ ``decode_param_names`` option is enabled.
+
+ When unicode is expected, ``charset`` will overridden by the the
+ value of the ``Content-Type`` header's charset parameter if one was
+ specified by the client.
+
+ The class variable ``defaults`` specifies default values for
+ ``charset``, ``errors``, and ``langauge``. These can be overridden for the
+ current request via the registry.
+
+ The ``language`` default value is considered the fallback during i18n
+ translations to ensure in odd cases that mixed languages don't occur should
+ the ``language`` file contain the string but not another language in the
+ accepted languages list. The ``language`` value only applies when getting
+ a list of accepted languages from the HTTP Accept header.
+
+ This behavior is duplicated from Aquarium, and may seem strange but is
+ very useful. Normally, everything in the code is in "en-us". However,
+ the "en-us" translation catalog is usually empty. If the user requests
+ ``["en-us", "zh-cn"]`` and a translation isn't found for a string in
+ "en-us", you don't want gettext to fallback to "zh-cn". You want it to
+ just use the string itself. Hence, if a string isn't found in the
+ ``language`` catalog, the string in the source code will be used.
+
+ *All* other state is kept in the environment dictionary; this is
+ essential for interoperability.
+
+ You are free to subclass this object.
+
+ """
+ defaults = StackedObjectProxy(default=dict(charset=None, errors='replace',
+ decode_param_names=False,
+ language='en-us'))
+ def __init__(self, environ):
+ self.environ = environ
+ # This isn't "state" really, since the object is derivative:
+ self.headers = EnvironHeaders(environ)
+
+ defaults = self.defaults._current_obj()
+ self.charset = defaults.get('charset')
+ if self.charset:
+ # There's a charset: params will be coerced to unicode. In that
+ # case, attempt to use the charset specified by the browser
+ browser_charset = self.determine_browser_charset()
+ if browser_charset:
+ self.charset = browser_charset
+ self.errors = defaults.get('errors', 'strict')
+ self.decode_param_names = defaults.get('decode_param_names', False)
+ self._languages = None
+
+ body = environ_getter('wsgi.input')
+ scheme = environ_getter('wsgi.url_scheme')
+ method = environ_getter('REQUEST_METHOD')
+ script_name = environ_getter('SCRIPT_NAME')
+ path_info = environ_getter('PATH_INFO')
+
+ def urlvars(self):
+ """
+ Return any variables matched in the URL (e.g.,
+ ``wsgiorg.routing_args``).
+ """
+ if 'paste.urlvars' in self.environ:
+ return self.environ['paste.urlvars']
+ elif 'wsgiorg.routing_args' in self.environ:
+ return self.environ['wsgiorg.routing_args'][1]
+ else:
+ return {}
+ urlvars = property(urlvars, doc=urlvars.__doc__)
+
+ def is_xhr(self):
+ """Returns a boolean if X-Requested-With is present and a XMLHttpRequest"""
+ return self.environ.get('HTTP_X_REQUESTED_WITH', '') == 'XMLHttpRequest'
+ is_xhr = property(is_xhr, doc=is_xhr.__doc__)
+
+ def host(self):
+ """Host name provided in HTTP_HOST, with fall-back to SERVER_NAME"""
+ return self.environ.get('HTTP_HOST', self.environ.get('SERVER_NAME'))
+ host = property(host, doc=host.__doc__)
+
+ def languages(self):
+ """Return a list of preferred languages, most preferred first.
+
+ The list may be empty.
+ """
+ if self._languages is not None:
+ return self._languages
+ acceptLanguage = self.environ.get('HTTP_ACCEPT_LANGUAGE')
+ langs = ACCEPT_LANGUAGE.parse(self.environ)
+ fallback = self.defaults.get('language', 'en-us')
+ if not fallback:
+ return langs
+ if fallback not in langs:
+ langs.append(fallback)
+ index = langs.index(fallback)
+ langs[index+1:] = []
+ self._languages = langs
+ return self._languages
+ languages = property(languages, doc=languages.__doc__)
+
+ def _GET(self):
+ return parse_dict_querystring(self.environ)
+
+ def GET(self):
+ """
+ Dictionary-like object representing the QUERY_STRING
+ parameters. Always present, if possibly empty.
+
+ If the same key is present in the query string multiple times, a
+ list of its values can be retrieved from the ``MultiDict`` via
+ the ``getall`` method.
+
+ Returns a ``MultiDict`` container or a ``UnicodeMultiDict`` when
+ ``charset`` is set.
+ """
+ params = self._GET()
+ if self.charset:
+ params = UnicodeMultiDict(params, encoding=self.charset,
+ errors=self.errors,
+ decode_keys=self.decode_param_names)
+ return params
+ GET = property(GET, doc=GET.__doc__)
+
+ def _POST(self):
+ return parse_formvars(self.environ, include_get_vars=False,
+ encoding=self.charset, errors=self.errors)
+
+ def POST(self):
+ """Dictionary-like object representing the POST body.
+
+ Most values are encoded strings, or unicode strings when
+ ``charset`` is set. There may also be FieldStorage objects
+ representing file uploads. If this is not a POST request, or the
+ body is not encoded fields (e.g., an XMLRPC request) then this
+ will be empty.
+
+ This will consume wsgi.input when first accessed if applicable,
+ but the raw version will be put in
+ environ['paste.parsed_formvars'].
+
+ Returns a ``MultiDict`` container or a ``UnicodeMultiDict`` when
+ ``charset`` is set.
+ """
+ params = self._POST()
+ if self.charset:
+ params = UnicodeMultiDict(params, encoding=self.charset,
+ errors=self.errors,
+ decode_keys=self.decode_param_names)
+ return params
+ POST = property(POST, doc=POST.__doc__)
+
+ def params(self):
+ """Dictionary-like object of keys from POST, GET, URL dicts
+
+ Return a key value from the parameters, they are checked in the
+ following order: POST, GET, URL
+
+ Additional methods supported:
+
+ ``getlist(key)``
+ Returns a list of all the values by that key, collected from
+ POST, GET, URL dicts
+
+ Returns a ``MultiDict`` container or a ``UnicodeMultiDict`` when
+ ``charset`` is set.
+ """
+ params = MultiDict()
+ params.update(self._POST())
+ params.update(self._GET())
+ if self.charset:
+ params = UnicodeMultiDict(params, encoding=self.charset,
+ errors=self.errors,
+ decode_keys=self.decode_param_names)
+ return params
+ params = property(params, doc=params.__doc__)
+
+ def cookies(self):
+ """Dictionary of cookies keyed by cookie name.
+
+ Just a plain dictionary, may be empty but not None.
+
+ """
+ return get_cookie_dict(self.environ)
+ cookies = property(cookies, doc=cookies.__doc__)
+
+ def determine_browser_charset(self):
+ """
+ Determine the encoding as specified by the browser via the
+ Content-Type's charset parameter, if one is set
+ """
+ charset_match = _CHARSET_RE.search(self.headers.get('Content-Type', ''))
+ if charset_match:
+ return charset_match.group(1)
+
+ def match_accept(self, mimetypes):
+ """Return a list of specified mime-types that the browser's HTTP Accept
+ header allows in the order provided."""
+ return desired_matches(mimetypes,
+ self.environ.get('HTTP_ACCEPT', '*/*'))
+
+ def __repr__(self):
+ """Show important attributes of the WSGIRequest"""
+ pf = pformat
+ msg = '<%s.%s object at 0x%x method=%s,' % \
+ (self.__class__.__module__, self.__class__.__name__,
+ id(self), pf(self.method))
+ msg += '\nscheme=%s, host=%s, script_name=%s, path_info=%s,' % \
+ (pf(self.scheme), pf(self.host), pf(self.script_name),
+ pf(self.path_info))
+ msg += '\nlanguages=%s,' % pf(self.languages)
+ if self.charset:
+ msg += ' charset=%s, errors=%s,' % (pf(self.charset),
+ pf(self.errors))
+ msg += '\nGET=%s,' % pf(self.GET)
+ msg += '\nPOST=%s,' % pf(self.POST)
+ msg += '\ncookies=%s>' % pf(self.cookies)
+ return msg
+
+class WSGIResponse(object):
+ """A basic HTTP response with content, headers, and out-bound cookies
+
+ The class variable ``defaults`` specifies default values for
+ ``content_type``, ``charset`` and ``errors``. These can be overridden
+ for the current request via the registry.
+
+ """
+ defaults = StackedObjectProxy(
+ default=dict(content_type='text/html', charset='utf-8',
+ errors='strict', headers={'Cache-Control':'no-cache'})
+ )
+ def __init__(self, content=b'', mimetype=None, code=200):
+ self._iter = None
+ self._is_str_iter = True
+
+ self.content = content
+ self.headers = HeaderDict()
+ self.cookies = SimpleCookie()
+ self.status_code = code
+
+ defaults = self.defaults._current_obj()
+ if not mimetype:
+ mimetype = defaults.get('content_type', 'text/html')
+ charset = defaults.get('charset')
+ if charset:
+ mimetype = '%s; charset=%s' % (mimetype, charset)
+ self.headers.update(defaults.get('headers', {}))
+ self.headers['Content-Type'] = mimetype
+ self.errors = defaults.get('errors', 'strict')
+
+ def __str__(self):
+ """Returns a rendition of the full HTTP message, including headers.
+
+ When the content is an iterator, the actual content is replaced with the
+ output of str(iterator) (to avoid exhausting the iterator).
+ """
+ if self._is_str_iter:
+ content = ''.join(self.get_content())
+ else:
+ content = str(self.content)
+ return '\n'.join(['%s: %s' % (key, value)
+ for key, value in self.headers.headeritems()]) \
+ + '\n\n' + content
+
+ def __call__(self, environ, start_response):
+ """Convenience call to return output and set status information
+
+ Conforms to the WSGI interface for calling purposes only.
+
+ Example usage:
+
+ .. code-block:: python
+
+ def wsgi_app(environ, start_response):
+ response = WSGIResponse()
+ response.write("Hello world")
+ response.headers['Content-Type'] = 'latin1'
+ return response(environ, start_response)
+
+ """
+ status_text = STATUS_CODE_TEXT[self.status_code]
+ status = '%s %s' % (self.status_code, status_text)
+ response_headers = self.headers.headeritems()
+ for c in self.cookies.values():
+ response_headers.append(('Set-Cookie', c.output(header='')))
+ start_response(status, response_headers)
+ is_file = isinstance(self.content, file)
+ if 'wsgi.file_wrapper' in environ and is_file:
+ return environ['wsgi.file_wrapper'](self.content)
+ elif is_file:
+ return iter(lambda: self.content.read(), '')
+ return self.get_content()
+
+ def determine_charset(self):
+ """
+ Determine the encoding as specified by the Content-Type's charset
+ parameter, if one is set
+ """
+ charset_match = _CHARSET_RE.search(self.headers.get('Content-Type', ''))
+ if charset_match:
+ return charset_match.group(1)
+
+ def has_header(self, header):
+ """
+ Case-insensitive check for a header
+ """
+ warnings.warn('WSGIResponse.has_header is deprecated, use '
+ 'WSGIResponse.headers.has_key instead', DeprecationWarning,
+ 2)
+ return self.headers.has_key(header)
+
+ def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
+ domain=None, secure=None, httponly=None):
+ """
+ Define a cookie to be sent via the outgoing HTTP headers
+ """
+ self.cookies[key] = value
+ for var_name, var_value in [
+ ('max_age', max_age), ('path', path), ('domain', domain),
+ ('secure', secure), ('expires', expires), ('httponly', httponly)]:
+ if var_value is not None and var_value is not False:
+ self.cookies[key][var_name.replace('_', '-')] = var_value
+
+ def delete_cookie(self, key, path='/', domain=None):
+ """
+ Notify the browser the specified cookie has expired and should be
+ deleted (via the outgoing HTTP headers)
+ """
+ self.cookies[key] = ''
+ if path is not None:
+ self.cookies[key]['path'] = path
+ if domain is not None:
+ self.cookies[key]['domain'] = domain
+ self.cookies[key]['expires'] = 0
+ self.cookies[key]['max-age'] = 0
+
+ def _set_content(self, content):
+ if not isinstance(content, (six.binary_type, six.text_type)):
+ self._iter = content
+ if isinstance(content, list):
+ self._is_str_iter = True
+ else:
+ self._is_str_iter = False
+ else:
+ self._iter = [content]
+ self._is_str_iter = True
+ content = property(lambda self: self._iter, _set_content,
+ doc='Get/set the specified content, where content can '
+ 'be: a string, a list of strings, a generator function '
+ 'that yields strings, or an iterable object that '
+ 'produces strings.')
+
+ def get_content(self):
+ """
+ Returns the content as an iterable of strings, encoding each element of
+ the iterator from a Unicode object if necessary.
+ """
+ charset = self.determine_charset()
+ if charset:
+ return encode_unicode_app_iter(self.content, charset, self.errors)
+ else:
+ return self.content
+
+ def wsgi_response(self):
+ """
+ Return this WSGIResponse as a tuple of WSGI formatted data, including:
+ (status, headers, iterable)
+ """
+ status_text = STATUS_CODE_TEXT[self.status_code]
+ status = '%s %s' % (self.status_code, status_text)
+ response_headers = self.headers.headeritems()
+ for c in self.cookies.values():
+ response_headers.append(('Set-Cookie', c.output(header='')))
+ return status, response_headers, self.get_content()
+
+ # The remaining methods partially implement the file-like object interface.
+ # See http://docs.python.org/lib/bltin-file-objects.html
+ def write(self, content):
+ if not self._is_str_iter:
+ raise IOError("This %s instance's content is not writable: (content "
+ 'is an iterator)' % self.__class__.__name__)
+ self.content.append(content)
+
+ def flush(self):
+ pass
+
+ def tell(self):
+ if not self._is_str_iter:
+ raise IOError('This %s instance cannot tell its position: (content '
+ 'is an iterator)' % self.__class__.__name__)
+ return sum([len(chunk) for chunk in self._iter])
+
+ ########################################
+ ## Content-type and charset
+
+ def charset__get(self):
+ """
+ Get/set the charset (in the Content-Type)
+ """
+ header = self.headers.get('content-type')
+ if not header:
+ return None
+ match = _CHARSET_RE.search(header)
+ if match:
+ return match.group(1)
+ return None
+
+ def charset__set(self, charset):
+ if charset is None:
+ del self.charset
+ return
+ try:
+ header = self.headers.pop('content-type')
+ except KeyError:
+ raise AttributeError(
+ "You cannot set the charset when no content-type is defined")
+ match = _CHARSET_RE.search(header)
+ if match:
+ header = header[:match.start()] + header[match.end():]
+ header += '; charset=%s' % charset
+ self.headers['content-type'] = header
+
+ def charset__del(self):
+ try:
+ header = self.headers.pop('content-type')
+ except KeyError:
+ # Don't need to remove anything
+ return
+ match = _CHARSET_RE.search(header)
+ if match:
+ header = header[:match.start()] + header[match.end():]
+ self.headers['content-type'] = header
+
+ charset = property(charset__get, charset__set, charset__del, doc=charset__get.__doc__)
+
+ def content_type__get(self):
+ """
+ Get/set the Content-Type header (or None), *without* the
+ charset or any parameters.
+
+ If you include parameters (or ``;`` at all) when setting the
+ content_type, any existing parameters will be deleted;
+ otherwise they will be preserved.
+ """
+ header = self.headers.get('content-type')
+ if not header:
+ return None
+ return header.split(';', 1)[0]
+
+ def content_type__set(self, value):
+ if ';' not in value:
+ header = self.headers.get('content-type', '')
+ if ';' in header:
+ params = header.split(';', 1)[1]
+ value += ';' + params
+ self.headers['content-type'] = value
+
+ def content_type__del(self):
+ try:
+ del self.headers['content-type']
+ except KeyError:
+ pass
+
+ content_type = property(content_type__get, content_type__set,
+ content_type__del, doc=content_type__get.__doc__)
+
+## @@ I'd love to remove this, but paste.httpexceptions.get_exception
+## doesn't seem to work...
+# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
+STATUS_CODE_TEXT = {
+ 100: 'CONTINUE',
+ 101: 'SWITCHING PROTOCOLS',
+ 200: 'OK',
+ 201: 'CREATED',
+ 202: 'ACCEPTED',
+ 203: 'NON-AUTHORITATIVE INFORMATION',
+ 204: 'NO CONTENT',
+ 205: 'RESET CONTENT',
+ 206: 'PARTIAL CONTENT',
+ 226: 'IM USED',
+ 300: 'MULTIPLE CHOICES',
+ 301: 'MOVED PERMANENTLY',
+ 302: 'FOUND',
+ 303: 'SEE OTHER',
+ 304: 'NOT MODIFIED',
+ 305: 'USE PROXY',
+ 306: 'RESERVED',
+ 307: 'TEMPORARY REDIRECT',
+ 400: 'BAD REQUEST',
+ 401: 'UNAUTHORIZED',
+ 402: 'PAYMENT REQUIRED',
+ 403: 'FORBIDDEN',
+ 404: 'NOT FOUND',
+ 405: 'METHOD NOT ALLOWED',
+ 406: 'NOT ACCEPTABLE',
+ 407: 'PROXY AUTHENTICATION REQUIRED',
+ 408: 'REQUEST TIMEOUT',
+ 409: 'CONFLICT',
+ 410: 'GONE',
+ 411: 'LENGTH REQUIRED',
+ 412: 'PRECONDITION FAILED',
+ 413: 'REQUEST ENTITY TOO LARGE',
+ 414: 'REQUEST-URI TOO LONG',
+ 415: 'UNSUPPORTED MEDIA TYPE',
+ 416: 'REQUESTED RANGE NOT SATISFIABLE',
+ 417: 'EXPECTATION FAILED',
+ 429: 'TOO MANY REQUESTS',
+ 500: 'INTERNAL SERVER ERROR',
+ 501: 'NOT IMPLEMENTED',
+ 502: 'BAD GATEWAY',
+ 503: 'SERVICE UNAVAILABLE',
+ 504: 'GATEWAY TIMEOUT',
+ 505: 'HTTP VERSION NOT SUPPORTED',
+}
diff --git a/regen-docs b/regen-docs
new file mode 100755
index 0000000..c98763d
--- /dev/null
+++ b/regen-docs
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+mkdir -p docs/_static docs/_build
+sphinx-build -E -b html docs/ docs/_build || exit 1
+if [ "$1" = "publish" ] ; then
+ cd docs/
+ echo "Uploading files..."
+ scp -r _build/* ianb@webwareforpython.org:/home/paste/htdocs/
+fi
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..e60fe4f
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,8 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
+[aliases]
+distribute = register sdist bdist_egg upload pudge publish
+
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..28ca0df
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,116 @@
+# Procedure to release a new version:
+#
+# - run tests: run tox
+# - update version in setup.py (__version__)
+# - update tag_build in setup.cfg
+# - check that "python setup.py sdist" contains all files tracked by
+# the SCM (Mercurial): update MANIFEST.in if needed
+# - update changelog: docs/news.txt
+#
+# - hg ci
+# - hg tag VERSION
+# - hg push
+# - python2 setup.py register sdist bdist_wheel upload
+# - python3 setup.py bdist_wheel upload
+#
+# - increment version in setup.py (__version__)
+# - hg ci && hg push
+
+__version__ = '2.0.3'
+
+from setuptools import setup, find_packages
+import sys, os
+sys.path.insert(0, os.path.join(os.path.dirname(__file__),
+ 'paste', 'util'))
+import finddata
+
+with open("README.rst") as fp:
+ README = fp.read()
+
+setup(name="Paste",
+ version=__version__,
+ description="Tools for using a Web Server Gateway Interface stack",
+ long_description=README,
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: MIT License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Topic :: Internet :: WWW/HTTP",
+ "Topic :: Internet :: WWW/HTTP :: Dynamic Content",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Topic :: Internet :: WWW/HTTP :: WSGI",
+ "Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
+ "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware",
+ "Topic :: Internet :: WWW/HTTP :: WSGI :: Server",
+ "Framework :: Paste",
+ ],
+ keywords='web application server wsgi',
+ author="Ian Bicking",
+ author_email="ianb@colorstudy.com",
+ url="http://pythonpaste.org",
+ license="MIT",
+ packages=find_packages(exclude=['ez_setup', 'examples', 'packages', 'tests*']),
+ package_data=finddata.find_package_data(
+ exclude_directories=finddata.standard_exclude_directories + ('tests',)),
+ namespace_packages=['paste'],
+ zip_safe=False,
+ test_suite='nose.collector',
+ install_requires=['six>=1.4.0'],
+ tests_require=['nose>=0.11'],
+ extras_require={
+ 'subprocess': [],
+ 'hotshot': [],
+ 'Flup': ['flup'],
+ 'Paste': [],
+ 'openid': ['python-openid'],
+ },
+ entry_points="""
+ [paste.app_factory]
+ cgi = paste.cgiapp:make_cgi_application [subprocess]
+ static = paste.urlparser:make_static
+ pkg_resources = paste.urlparser:make_pkg_resources
+ urlparser = paste.urlparser:make_url_parser
+ proxy = paste.proxy:make_proxy
+ test = paste.debug.debugapp:make_test_app
+ test_slow = paste.debug.debugapp:make_slow_app
+ transparent_proxy = paste.proxy:make_transparent_proxy
+ watch_threads = paste.debug.watchthreads:make_watch_threads
+
+ [paste.composite_factory]
+ urlmap = paste.urlmap:urlmap_factory
+ cascade = paste.cascade:make_cascade
+
+ [paste.filter_app_factory]
+ error_catcher = paste.exceptions.errormiddleware:make_error_middleware
+ cgitb = paste.cgitb_catcher:make_cgitb_middleware
+ flup_session = paste.flup_session:make_session_middleware [Flup]
+ gzip = paste.gzipper:make_gzip_middleware
+ httpexceptions = paste.httpexceptions:make_middleware
+ lint = paste.lint:make_middleware
+ printdebug = paste.debug.prints:PrintDebugMiddleware
+ profile = paste.debug.profile:make_profile_middleware [hotshot]
+ recursive = paste.recursive:make_recursive_middleware
+ # This isn't good enough to deserve the name egg:Paste#session:
+ paste_session = paste.session:make_session_middleware
+ wdg_validate = paste.debug.wdg_validate:make_wdg_validate_middleware [subprocess]
+ evalerror = paste.evalexception.middleware:make_eval_exception
+ auth_tkt = paste.auth.auth_tkt:make_auth_tkt_middleware
+ auth_basic = paste.auth.basic:make_basic
+ auth_digest = paste.auth.digest:make_digest
+ auth_form = paste.auth.form:make_form
+ grantip = paste.auth.grantip:make_grantip
+ openid = paste.auth.open_id:make_open_id_middleware [openid]
+ pony = paste.pony:make_pony
+ cowbell = paste.cowbell:make_cowbell
+ errordocument = paste.errordocument:make_errordocument
+ auth_cookie = paste.auth.cookie:make_auth_cookie
+ translogger = paste.translogger:make_filter
+ config = paste.config:make_config_filter
+ registry = paste.registry:make_registry_manager
+
+ [paste.server_runner]
+ http = paste.httpserver:server_runner
+ """,
+ )
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..3e2e36a
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,7 @@
+import sys
+import os
+
+sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
+
+import pkg_resources
+pkg_resources.require('Paste')
diff --git a/tests/cgiapp_data/error.cgi b/tests/cgiapp_data/error.cgi
new file mode 100755
index 0000000..e11c766
--- /dev/null
+++ b/tests/cgiapp_data/error.cgi
@@ -0,0 +1,3 @@
+#!/usr/bin/env python
+
+print('hey you!')
diff --git a/tests/cgiapp_data/form.cgi b/tests/cgiapp_data/form.cgi
new file mode 100755
index 0000000..c4c562d
--- /dev/null
+++ b/tests/cgiapp_data/form.cgi
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+
+import cgi
+import six
+
+print('Content-type: text/plain')
+print('')
+
+if six.PY3:
+ # Python 3: cgi.FieldStorage keeps some field names as unicode and some as
+ # the repr() of byte strings, duh.
+
+ class FieldStorage(cgi.FieldStorage):
+
+ def _key_candidates(self, key):
+ yield key
+
+ try:
+ # assume bytes, coerce to str
+ try:
+ yield key.decode(self.encoding)
+ except UnicodeDecodeError:
+ pass
+ except AttributeError:
+ # assume str, coerce to bytes
+ try:
+ yield key.encode(self.encoding)
+ except UnicodeEncodeError:
+ pass
+
+ def __getitem__(self, key):
+
+ superobj = super(FieldStorage, self)
+
+ error = None
+
+ for candidate in self._key_candidates(key):
+ if isinstance(candidate, bytes):
+ # ouch
+ candidate = repr(candidate)
+ try:
+ return superobj.__getitem__(candidate)
+ except KeyError as e:
+ if error is None:
+ error = e
+
+ # fall through, re-raise the first KeyError
+ raise error
+
+ def __contains__(self, key):
+ superobj = super(FieldStorage, self)
+
+ for candidate in self._key_candidates(key):
+ if superobj.__contains__(candidate):
+ return True
+ return False
+
+else: # PY2
+
+ FieldStorage = cgi.FieldStorage
+
+
+form = FieldStorage()
+
+print('Filename: %s' % form['up'].filename)
+print('Name: %s' % form['name'].value)
+print('Content: %s' % form['up'].file.read())
diff --git a/tests/cgiapp_data/ok.cgi b/tests/cgiapp_data/ok.cgi
new file mode 100755
index 0000000..d03f0b9
--- /dev/null
+++ b/tests/cgiapp_data/ok.cgi
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+print('Content-type: text/html; charset=UTF-8')
+print('Status: 200 Okay')
+print('')
+print('This is the body')
diff --git a/tests/cgiapp_data/stderr.cgi b/tests/cgiapp_data/stderr.cgi
new file mode 100755
index 0000000..d2520b6
--- /dev/null
+++ b/tests/cgiapp_data/stderr.cgi
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+from __future__ import print_function
+import sys
+print('Status: 500 Server Error')
+print('Content-type: text/html')
+print()
+print('There was an error')
+print('some data on the error', file=sys.stderr)
diff --git a/tests/test_auth/__init__.py b/tests/test_auth/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/test_auth/__init__.py
diff --git a/tests/test_auth/test_auth_cookie.py b/tests/test_auth/test_auth_cookie.py
new file mode 100644
index 0000000..38e37b8
--- /dev/null
+++ b/tests/test_auth/test_auth_cookie.py
@@ -0,0 +1,46 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from six.moves import xrange
+import six
+
+from paste.auth import cookie
+from paste.wsgilib import raw_interactive, dump_environ
+from paste.response import header_value
+from paste.httpexceptions import *
+
+def build(application,setenv, *args, **kwargs):
+ def setter(environ, start_response):
+ save = environ['paste.auth.cookie'].append
+ for (k,v) in setenv.items():
+ save(k)
+ environ[k] = v
+ return application(environ, start_response)
+ return cookie.middleware(setter,*args,**kwargs)
+
+def test_noop():
+ app = build(dump_environ,{})
+ (status,headers,content,errors) = \
+ raw_interactive(app)
+ assert not header_value(headers,'Set-Cookie')
+
+def test_basic(key='key', val='bingles'):
+ app = build(dump_environ,{key:val})
+ (status,headers,content,errors) = \
+ raw_interactive(app)
+ value = header_value(headers,'Set-Cookie')
+ assert "Path=/;" in value
+ assert "expires=" not in value
+ cookie = value.split(";")[0]
+ (status,headers,content,errors) = \
+ raw_interactive(app,{'HTTP_COOKIE': cookie})
+ expected = ("%s: %s" % (key,val.replace("\n","\n ")))
+ if six.PY3:
+ expected = expected.encode('utf8')
+ assert expected in content
+
+def test_roundtrip():
+ roundtrip = str('').join(map(chr, xrange(256)))
+ test_basic(roundtrip,roundtrip)
+
diff --git a/tests/test_auth/test_auth_digest.py b/tests/test_auth/test_auth_digest.py
new file mode 100644
index 0000000..1d44038
--- /dev/null
+++ b/tests/test_auth/test_auth_digest.py
@@ -0,0 +1,93 @@
+# (c) 2005 Clark C. Evans
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from paste.auth.digest import *
+from paste.wsgilib import raw_interactive
+from paste.httpexceptions import *
+from paste.httpheaders import AUTHORIZATION, WWW_AUTHENTICATE, REMOTE_USER
+import os
+import six
+
+def application(environ, start_response):
+ content = REMOTE_USER(environ)
+ start_response("200 OK",(('Content-Type', 'text/plain'),
+ ('Content-Length', len(content))))
+
+ if six.PY3:
+ content = content.encode('utf8')
+ return [content]
+
+realm = "tag:clarkevans.com,2005:testing"
+
+def backwords(environ, realm, username):
+ """ dummy password hash, where user password is just reverse """
+ password = list(username)
+ password.reverse()
+ password = "".join(password)
+ return digest_password(realm, username, password)
+
+application = AuthDigestHandler(application,realm,backwords)
+application = HTTPExceptionHandler(application)
+
+def check(username, password, path="/"):
+ """ perform two-stage authentication to verify login """
+ (status,headers,content,errors) = \
+ raw_interactive(application,path, accept='text/html')
+ assert status.startswith("401")
+ challenge = WWW_AUTHENTICATE(headers)
+ response = AUTHORIZATION(username=username, password=password,
+ challenge=challenge, path=path)
+ assert "Digest" in response and username in response
+ (status,headers,content,errors) = \
+ raw_interactive(application,path,
+ HTTP_AUTHORIZATION=response)
+ if status.startswith("200"):
+ return content
+ if status.startswith("401"):
+ return None
+ assert False, "Unexpected Status: %s" % status
+
+def test_digest():
+ assert b'bing' == check("bing","gnib")
+ assert check("bing","bad") is None
+
+#
+# The following code uses sockets to test the functionality,
+# to enable use:
+#
+# $ TEST_SOCKET py.test
+#
+
+if os.environ.get("TEST_SOCKET",""):
+ from six.moves.urllib.error import HTTPError
+ from six.moves.urllib.request import build_opener, HTTPDigestAuthHandler
+ from paste.debug.testserver import serve
+ server = serve(application)
+
+ def authfetch(username,password,path="/",realm=realm):
+ server.accept(2)
+ import socket
+ socket.setdefaulttimeout(5)
+ uri = ("http://%s:%s" % server.server_address) + path
+ auth = HTTPDigestAuthHandler()
+ auth.add_password(realm,uri,username,password)
+ opener = build_opener(auth)
+ result = opener.open(uri)
+ return result.read()
+
+ def test_success():
+ assert "bing" == authfetch('bing','gnib')
+
+ def test_failure():
+ # urllib tries 5 more times before it gives up
+ server.accept(5)
+ try:
+ authfetch('bing','wrong')
+ assert False, "this should raise an exception"
+ except HTTPError as e:
+ assert e.code == 401
+
+ def test_shutdown():
+ server.stop()
+
diff --git a/tests/test_cgiapp.py b/tests/test_cgiapp.py
new file mode 100644
index 0000000..900e83e
--- /dev/null
+++ b/tests/test_cgiapp.py
@@ -0,0 +1,59 @@
+import os
+import sys
+from nose.tools import assert_raises
+from paste.cgiapp import CGIApplication, CGIError
+from paste.fixture import *
+
+data_dir = os.path.join(os.path.dirname(__file__), 'cgiapp_data')
+
+# these CGI scripts can't work on Windows or Jython
+if sys.platform != 'win32' and not sys.platform.startswith('java'):
+
+ # Ensure the CGI scripts are called with the same python interpreter. Put a
+ # symlink to the interpreter executable into the path...
+ def setup_module():
+ global oldpath, pyexelink
+ oldpath = os.environ.get('PATH', None)
+ os.environ['PATH'] = data_dir + os.path.pathsep + oldpath
+ pyexelink = os.path.join(data_dir, "python")
+ try:
+ os.unlink(pyexelink)
+ except OSError:
+ pass
+ os.symlink(sys.executable, pyexelink)
+
+ # ... and clean up again.
+ def teardown_module():
+ global oldpath, pyexelink
+ os.unlink(pyexelink)
+ if oldpath is not None:
+ os.environ['PATH'] = oldpath
+ else:
+ del os.environ['PATH']
+
+ def test_ok():
+ app = TestApp(CGIApplication({}, script='ok.cgi', path=[data_dir]))
+ res = app.get('')
+ assert res.header('content-type') == 'text/html; charset=UTF-8'
+ assert res.full_status == '200 Okay'
+ assert 'This is the body' in res
+
+ def test_form():
+ app = TestApp(CGIApplication({}, script='form.cgi', path=[data_dir]))
+ res = app.post('', params={'name': b'joe'},
+ upload_files=[('up', 'file.txt', b'x'*10000)])
+ assert 'file.txt' in res
+ assert 'joe' in res
+ assert 'x'*10000 in res
+
+ def test_error():
+ app = TestApp(CGIApplication({}, script='error.cgi', path=[data_dir]))
+ assert_raises(CGIError, app.get, '', status=500)
+
+ def test_stderr():
+ app = TestApp(CGIApplication({}, script='stderr.cgi', path=[data_dir]))
+ res = app.get('', expect_errors=True)
+ assert res.status == 500
+ assert 'error' in res
+ assert b'some data' in res.errors
+
diff --git a/tests/test_cgitb_catcher.py b/tests/test_cgitb_catcher.py
new file mode 100644
index 0000000..a63f7d8
--- /dev/null
+++ b/tests/test_cgitb_catcher.py
@@ -0,0 +1,78 @@
+from paste.fixture import *
+from paste.cgitb_catcher import CgitbMiddleware
+from paste import lint
+from .test_exceptions.test_error_middleware import clear_middleware
+
+def do_request(app, expect_status=500):
+ app = lint.middleware(app)
+ app = CgitbMiddleware(app, {}, display=True)
+ app = clear_middleware(app)
+ testapp = TestApp(app)
+ res = testapp.get('', status=expect_status,
+ expect_errors=True)
+ return res
+
+
+############################################################
+## Applications that raise exceptions
+############################################################
+
+def bad_app():
+ "No argument list!"
+ return None
+
+def start_response_app(environ, start_response):
+ "raise error before start_response"
+ raise ValueError("hi")
+
+def after_start_response_app(environ, start_response):
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ raise ValueError('error2')
+
+def iter_app(environ, start_response):
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return yielder([b'this', b' is ', b' a', None])
+
+def yielder(args):
+ for arg in args:
+ if arg is None:
+ raise ValueError("None raises error")
+ yield arg
+
+############################################################
+## Tests
+############################################################
+
+def test_makes_exception():
+ res = do_request(bad_app)
+ print(res)
+ if six.PY3:
+ assert 'bad_app() takes 0 positional arguments but 2 were given' in res
+ else:
+ assert 'bad_app() takes no arguments (2 given' in res
+ assert 'iterator = application(environ, start_response_wrapper)' in res
+ assert 'lint.py' in res
+ assert 'cgitb_catcher.py' in res
+
+def test_start_res():
+ res = do_request(start_response_app)
+ print(res)
+ assert 'ValueError: hi' in res
+ assert 'test_cgitb_catcher.py' in res
+ assert 'line 26, in start_response_app' in res
+
+def test_after_start():
+ res = do_request(after_start_response_app, 200)
+ print(res)
+ assert 'ValueError: error2' in res
+ assert 'line 30' in res
+
+def test_iter_app():
+ res = do_request(iter_app, 200)
+ print(res)
+ assert 'None raises error' in res
+ assert 'yielder' in res
+
+
+
+
diff --git a/tests/test_config.py b/tests/test_config.py
new file mode 100644
index 0000000..8119157
--- /dev/null
+++ b/tests/test_config.py
@@ -0,0 +1,85 @@
+# (c) 2007 Philip Jenvey; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+from nose.tools import assert_raises
+from paste.config import CONFIG, ConfigMiddleware
+from paste.fixture import TestApp
+import six
+
+test_key = 'test key'
+
+def reset_config():
+ while True:
+ try:
+ CONFIG._pop_object()
+ except IndexError:
+ break
+
+def app_with_config(environ, start_response):
+ start_response('200 OK', [('Content-type','text/plain')])
+ lines = ['Variable is: %s\n' % CONFIG[test_key],
+ 'Variable is (in environ): %s' % environ['paste.config'][test_key]]
+ if six.PY3:
+ lines = [line.encode('utf8') for line in lines]
+ return lines
+
+class NestingAppWithConfig(object):
+ def __init__(self, app):
+ self.app = app
+
+ def __call__(self, environ, start_response):
+ response = self.app(environ, start_response)
+ assert isinstance(response, list)
+ supplement = ['Nesting variable is: %s' % CONFIG[test_key],
+ 'Nesting variable is (in environ): %s' % \
+ environ['paste.config'][test_key]]
+ if six.PY3:
+ supplement = [line.encode('utf8') for line in supplement]
+ response.extend(supplement)
+ return response
+
+def test_request_config():
+ try:
+ config = {test_key: 'test value'}
+ app = ConfigMiddleware(app_with_config, config)
+ res = TestApp(app).get('/')
+ assert 'Variable is: test value' in res
+ assert 'Variable is (in environ): test value' in res
+ finally:
+ reset_config()
+
+def test_request_config_multi():
+ try:
+ config = {test_key: 'test value'}
+ app = ConfigMiddleware(app_with_config, config)
+ config = {test_key: 'nesting value'}
+ app = ConfigMiddleware(NestingAppWithConfig(app), config)
+ res = TestApp(app).get('/')
+ assert 'Variable is: test value' in res
+ assert 'Variable is (in environ): test value' in res
+ assert 'Nesting variable is: nesting value' in res
+ print(res)
+ assert 'Nesting variable is (in environ): nesting value' in res
+ finally:
+ reset_config()
+
+def test_process_config(request_app=test_request_config):
+ try:
+ process_config = {test_key: 'bar', 'process_var': 'foo'}
+ CONFIG.push_process_config(process_config)
+
+ assert CONFIG[test_key] == 'bar'
+ assert CONFIG['process_var'] == 'foo'
+
+ request_app()
+
+ assert CONFIG[test_key] == 'bar'
+ assert CONFIG['process_var'] == 'foo'
+ CONFIG.pop_process_config()
+
+ assert_raises(AttributeError, lambda: 'process_var' not in CONFIG)
+ assert_raises(IndexError, CONFIG.pop_process_config)
+ finally:
+ reset_config()
+
+def test_process_config_multi():
+ test_process_config(test_request_config_multi)
diff --git a/tests/test_doctests.py b/tests/test_doctests.py
new file mode 100644
index 0000000..d59d666
--- /dev/null
+++ b/tests/test_doctests.py
@@ -0,0 +1,63 @@
+import six
+import doctest
+from paste.util.import_string import simple_import
+import os
+
+filenames = [
+ 'tests/test_template.txt',
+ ]
+
+modules = [
+ 'paste.util.template',
+ 'paste.util.looper',
+ # This one opens up httpserver, which is bad:
+ #'paste.auth.cookie',
+ #'paste.auth.multi',
+ #'paste.auth.digest',
+ #'paste.auth.basic',
+ #'paste.auth.form',
+ #'paste.progress',
+ 'paste.exceptions.serial_number_generator',
+ 'paste.evalexception.evalcontext',
+ 'paste.util.dateinterval',
+ 'paste.util.quoting',
+ 'paste.wsgilib',
+ 'paste.url',
+ 'paste.request',
+ ]
+
+options = doctest.ELLIPSIS | doctest.REPORT_ONLY_FIRST_FAILURE
+if six.PY3:
+ options |= doctest.IGNORE_EXCEPTION_DETAIL
+
+def test_doctests():
+ for filename in filenames:
+ filename = os.path.join(
+ os.path.dirname(os.path.dirname(__file__)),
+ filename)
+ yield do_doctest, filename
+
+def do_doctest(filename):
+ failure, total = doctest.testfile(
+ filename, module_relative=False,
+ optionflags=options)
+ assert not failure, "Failure in %r" % filename
+
+def test_doctest_mods():
+ for module in modules:
+ yield do_doctest_mod, module
+
+def do_doctest_mod(module):
+ module = simple_import(module)
+ failure, total = doctest.testmod(
+ module, optionflags=options)
+ assert not failure, "Failure in %r" % module
+
+if __name__ == '__main__':
+ import sys
+ import doctest
+ args = sys.argv[1:]
+ if not args:
+ args = filenames
+ for filename in args:
+ doctest.testfile(filename, module_relative=False)
diff --git a/tests/test_errordocument.py b/tests/test_errordocument.py
new file mode 100644
index 0000000..efeae61
--- /dev/null
+++ b/tests/test_errordocument.py
@@ -0,0 +1,92 @@
+from paste.errordocument import forward
+from paste.fixture import *
+from paste.recursive import RecursiveMiddleware
+
+def simple_app(environ, start_response):
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return [b'requested page returned']
+
+def not_found_app(environ, start_response):
+ start_response("404 Not found", [('Content-type', 'text/plain')])
+ return [b'requested page returned']
+
+def test_ok():
+ app = TestApp(simple_app)
+ res = app.get('')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'requested page returned' in res
+
+def error_docs_app(environ, start_response):
+ if environ['PATH_INFO'] == '/not_found':
+ start_response("404 Not found", [('Content-type', 'text/plain')])
+ return [b'Not found']
+ elif environ['PATH_INFO'] == '/error':
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return [b'Page not found']
+ else:
+ return simple_app(environ, start_response)
+
+def test_error_docs_app():
+ app = TestApp(error_docs_app)
+ res = app.get('')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'requested page returned' in res
+ res = app.get('/error')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'Page not found' in res
+ res = app.get('/not_found', status=404)
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '404 Not found'
+ assert 'Not found' in res
+
+def test_forward():
+ app = forward(error_docs_app, codes={404:'/error'})
+ app = TestApp(RecursiveMiddleware(app))
+ res = app.get('')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'requested page returned' in res
+ res = app.get('/error')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'Page not found' in res
+ res = app.get('/not_found', status=404)
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '404 Not found'
+ # Note changed response
+ assert 'Page not found' in res
+
+def auth_required_app(environ, start_response):
+ start_response('401 Unauthorized', [('content-type', 'text/plain'), ('www-authenticate', 'Basic realm="Foo"')])
+ return ['Sign in!']
+
+def auth_docs_app(environ, start_response):
+ if environ['PATH_INFO'] == '/auth':
+ return auth_required_app(environ, start_response)
+ elif environ['PATH_INFO'] == '/auth_doc':
+ start_response("200 OK", [('Content-type', 'text/html')])
+ return [b'<html>Login!</html>']
+ else:
+ return simple_app(environ, start_response)
+
+def test_auth_docs_app():
+ wsgi_app = forward(auth_docs_app, codes={401: '/auth_doc'})
+ app = TestApp(wsgi_app)
+ res = app.get('/auth_doc')
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/auth', status=401)
+ assert res.header('content-type') == 'text/html'
+ assert res.header('www-authenticate') == 'Basic realm="Foo"'
+ assert res.body == b'<html>Login!</html>'
+
+def test_bad_error():
+ def app(environ, start_response):
+ start_response('404 Not Found', [('content-type', 'text/plain')])
+ return ['not found']
+ app = forward(app, {404: '/404.html'})
+ app = TestApp(app)
+ resp = app.get('/test', expect_errors=True)
+ print(resp)
diff --git a/tests/test_exceptions/__init__.py b/tests/test_exceptions/__init__.py
new file mode 100644
index 0000000..792d600
--- /dev/null
+++ b/tests/test_exceptions/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/tests/test_exceptions/test_error_middleware.py b/tests/test_exceptions/test_error_middleware.py
new file mode 100644
index 0000000..95ab177
--- /dev/null
+++ b/tests/test_exceptions/test_error_middleware.py
@@ -0,0 +1,109 @@
+from paste.fixture import *
+from paste.exceptions.errormiddleware import ErrorMiddleware
+from paste import lint
+from paste.util.quoting import strip_html
+#
+# For some strange reason, these 4 lines cannot be removed or the regression
+# test breaks; is it counting the number of lines in the file somehow?
+#
+
+def do_request(app, expect_status=500):
+ app = lint.middleware(app)
+ app = ErrorMiddleware(app, {}, debug=True)
+ app = clear_middleware(app)
+ testapp = TestApp(app)
+ res = testapp.get('', status=expect_status,
+ expect_errors=True)
+ return res
+
+def clear_middleware(app):
+ """
+ The fixture sets paste.throw_errors, which suppresses exactly what
+ we want to test in this case. This wrapper also strips exc_info
+ on the *first* call to start_response (but not the second, or
+ subsequent calls.
+ """
+ def clear_throw_errors(environ, start_response):
+ headers_sent = []
+ def replacement(status, headers, exc_info=None):
+ if headers_sent:
+ return start_response(status, headers, exc_info)
+ headers_sent.append(True)
+ return start_response(status, headers)
+ if 'paste.throw_errors' in environ:
+ del environ['paste.throw_errors']
+ return app(environ, replacement)
+ return clear_throw_errors
+
+
+############################################################
+## Applications that raise exceptions
+############################################################
+
+def bad_app():
+ "No argument list!"
+ return None
+
+def unicode_bad_app(environ, start_response):
+ raise ValueError(u"\u1000")
+
+def start_response_app(environ, start_response):
+ "raise error before start_response"
+ raise ValueError("hi")
+
+def after_start_response_app(environ, start_response):
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ raise ValueError('error2')
+
+def iter_app(environ, start_response):
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return yielder([b'this', b' is ', b' a', None])
+
+def yielder(args):
+ for arg in args:
+ if arg is None:
+ raise ValueError("None raises error")
+ yield arg
+
+############################################################
+## Tests
+############################################################
+
+def test_makes_exception():
+ res = do_request(bad_app)
+ assert '<html' in res
+ res = strip_html(str(res))
+ if six.PY3:
+ assert 'bad_app() takes 0 positional arguments but 2 were given' in res
+ else:
+ assert 'bad_app() takes no arguments (2 given' in res, repr(res)
+ assert 'iterator = application(environ, start_response_wrapper)' in res
+ assert 'paste.lint' in res
+ assert 'paste.exceptions.errormiddleware' in res
+
+def test_unicode_exception():
+ res = do_request(unicode_bad_app)
+
+
+def test_start_res():
+ res = do_request(start_response_app)
+ res = strip_html(str(res))
+ assert 'ValueError: hi' in res
+ assert 'test_error_middleware' in res
+ assert ':52 in start_response_app' in res
+
+def test_after_start():
+ res = do_request(after_start_response_app, 200)
+ res = strip_html(str(res))
+ #print res
+ assert 'ValueError: error2' in res
+
+def test_iter_app():
+ res = do_request(lint.middleware(iter_app), 200)
+ #print res
+ assert 'None raises error' in res
+ assert 'yielder' in res
+
+
+
+
diff --git a/tests/test_exceptions/test_formatter.py b/tests/test_exceptions/test_formatter.py
new file mode 100644
index 0000000..9c53a9a
--- /dev/null
+++ b/tests/test_exceptions/test_formatter.py
@@ -0,0 +1,183 @@
+from paste.exceptions import formatter
+from paste.exceptions import collector
+import sys
+import os
+import difflib
+
+class Mock(object):
+ def __init__(self, **kw):
+ for name, value in kw.items():
+ setattr(self, name, value)
+
+class Supplement(Mock):
+
+ object = 'test_object'
+ source_url = 'http://whatever.com'
+ info = 'This is some supplemental information'
+ args = ()
+ def getInfo(self):
+ return self.info
+
+ def __call__(self, *args):
+ self.args = args
+ return self
+
+class BadSupplement(Supplement):
+
+ def getInfo(self):
+ raise ValueError("This supplemental info is buggy")
+
+def call_error(sup):
+ 1 + 2
+ __traceback_supplement__ = (sup, ())
+ assert 0, "I am an error"
+
+def raise_error(sup='default'):
+ if sup == 'default':
+ sup = Supplement()
+ for i in range(10):
+ __traceback_info__ = i
+ if i == 5:
+ call_error(sup=sup)
+
+def hide(t, inner, *args, **kw):
+ __traceback_hide__ = t
+ return inner(*args, **kw)
+
+def pass_through(info, inner, *args, **kw):
+ """
+ To add another frame to the call; detectable because
+ __tracback_info__ is set to `info`
+ """
+ __traceback_info__ = info
+ return inner(*args, **kw)
+
+def format(type='html', **ops):
+ data = collector.collect_exception(*sys.exc_info())
+ report = getattr(formatter, 'format_' + type)(data, **ops)
+ return report
+
+formats = ('text', 'html')
+
+def test_excersize():
+ for f in formats:
+ try:
+ raise_error()
+ except:
+ format(f)
+
+def test_content():
+ for f in formats:
+ try:
+ raise_error()
+ except:
+ result = format(f)
+ print(result)
+ assert 'test_object' in result
+ assert 'http://whatever.com' in result
+ assert 'This is some supplemental information' in result
+ assert 'raise_error' in result
+ assert 'call_error' in result
+ assert '5' in result
+ assert 'test_content' in result
+ else:
+ assert 0
+
+def test_trim():
+ current = os.path.abspath(os.getcwd())
+ for f in formats:
+ try:
+ raise_error()
+ except:
+ result = format(f, trim_source_paths=[(current, '.')])
+ assert current not in result
+ assert ('%stest_formatter.py' % os.sep) in result, ValueError(repr(result))
+ else:
+ assert 0
+
+def test_hide():
+ for f in formats:
+ try:
+ hide(True, raise_error)
+ except:
+ result = format(f)
+ print(result)
+ assert 'in hide_inner' not in result
+ assert 'inner(*args, **kw)' not in result
+ else:
+ assert 0
+
+def print_diff(s1, s2):
+ differ = difflib.Differ()
+ result = list(differ.compare(s1.splitlines(), s2.splitlines()))
+ print('\n'.join(result))
+
+def test_hide_supppressed():
+ """
+ When an error occurs and __traceback_stop__ is true for the
+ erroneous frame, then that setting should be ignored.
+ """
+ for f in ['html']: #formats:
+ results = []
+ for hide_value in (False, 'after'):
+ try:
+ pass_through(
+ 'a',
+ hide,
+ hide_value,
+ pass_through,
+ 'b',
+ raise_error)
+ except:
+ results.append(format(f))
+ else:
+ assert 0
+ if results[0] != results[1]:
+ print_diff(results[0], results[1])
+ assert 0
+
+def test_hide_after():
+ for f in formats:
+ try:
+ pass_through(
+ 'AABB',
+ hide, 'after',
+ pass_through, 'CCDD',
+ # A little whitespace to keep this line out of the
+ # content part of the report
+
+
+ hide, 'reset',
+ raise_error)
+ except:
+ result = format(f)
+ assert 'AABB' in result
+ assert 'CCDD' not in result
+ assert 'raise_error' in result
+ else:
+ assert 0
+
+def test_hide_before():
+ for f in formats:
+ try:
+ pass_through(
+ 'AABB',
+ hide, 'before',
+ raise_error)
+ except:
+ result = format(f)
+ print(result)
+ assert 'AABB' not in result
+ assert 'raise_error' in result
+ else:
+ assert 0
+
+def test_make_wrappable():
+ assert '<wbr>' in formatter.make_wrappable('x'*1000)
+ # I'm just going to test that this doesn't excede the stack limit:
+ formatter.make_wrappable(';'*2000)
+ assert (formatter.make_wrappable('this that the other')
+ == 'this that the other')
+ assert (formatter.make_wrappable('this that ' + ('x'*50) + ';' + ('y'*50) + ' and the other')
+ == 'this that '+('x'*50) + ';<wbr>' + ('y'*50) + ' and the other')
+
diff --git a/tests/test_exceptions/test_httpexceptions.py b/tests/test_exceptions/test_httpexceptions.py
new file mode 100644
index 0000000..24e00dd
--- /dev/null
+++ b/tests/test_exceptions/test_httpexceptions.py
@@ -0,0 +1,97 @@
+# (c) 2005 Ian Bicking, Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+"""
+WSGI Exception Middleware
+
+Regression Test Suite
+"""
+from nose.tools import assert_raises
+from paste.httpexceptions import *
+from paste.response import header_value
+import six
+
+
+def test_HTTPMove():
+ """ make sure that location is a mandatory attribute of Redirects """
+ assert_raises(AssertionError, HTTPFound)
+ assert_raises(AssertionError, HTTPTemporaryRedirect,
+ headers=[('l0cation','/bing')])
+ assert isinstance(HTTPMovedPermanently("This is a message",
+ headers=[('Location','/bing')])
+ ,HTTPRedirection)
+ assert isinstance(HTTPUseProxy(headers=[('LOCATION','/bing')])
+ ,HTTPRedirection)
+ assert isinstance(HTTPFound('/foobar'),HTTPRedirection)
+
+def test_badapp():
+ """ verify that the middleware handles previously-started responses """
+ def badapp(environ, start_response):
+ start_response("200 OK",[])
+ raise HTTPBadRequest("Do not do this at home.")
+ newapp = HTTPExceptionHandler(badapp)
+ assert b'Bad Request' in b''.join(newapp({'HTTP_ACCEPT': 'text/html'},
+ (lambda a, b, c=None: None)))
+
+def test_unicode():
+ """ verify unicode output """
+ tstr = u"\0xCAFE"
+ def badapp(environ, start_response):
+ start_response("200 OK",[])
+ raise HTTPBadRequest(tstr)
+ newapp = HTTPExceptionHandler(badapp)
+ assert tstr.encode("utf-8") in b''.join(newapp({'HTTP_ACCEPT':
+ 'text/html'},
+ (lambda a, b, c=None: None)))
+ assert tstr.encode("utf-8") in b''.join(newapp({'HTTP_ACCEPT':
+ 'text/plain'},
+ (lambda a, b, c=None: None)))
+
+def test_template():
+ """ verify that html() and plain() output methods work """
+ e = HTTPInternalServerError()
+ e.template = 'A %(ping)s and <b>%(pong)s</b> message.'
+ assert str(e).startswith("500 Internal Server Error")
+ assert e.plain({'ping': 'fun', 'pong': 'happy'}) == (
+ '500 Internal Server Error\r\n'
+ 'A fun and happy message.\r\n')
+ assert '<p>A fun and <b>happy</b> message.</p>' in \
+ e.html({'ping': 'fun', 'pong': 'happy'})
+
+def test_redapp():
+ """ check that redirect returns the correct, expected results """
+ saved = []
+ def saveit(status, headers, exc_info = None):
+ saved.append((status,headers))
+ def redapp(environ, start_response):
+ raise HTTPFound("/bing/foo")
+ app = HTTPExceptionHandler(redapp)
+ result = list(app({'HTTP_ACCEPT': 'text/html'},saveit))
+ assert b'<a href="/bing/foo">' in result[0]
+ assert "302 Found" == saved[0][0]
+ if six.PY3:
+ assert "text/html; charset=utf8" == header_value(saved[0][1], 'content-type')
+ else:
+ assert "text/html" == header_value(saved[0][1], 'content-type')
+ assert "/bing/foo" == header_value(saved[0][1],'location')
+ result = list(app({'HTTP_ACCEPT': 'text/plain'},saveit))
+ assert "text/plain; charset=utf8" == header_value(saved[1][1],'content-type')
+ assert "/bing/foo" == header_value(saved[1][1],'location')
+
+def test_misc():
+ assert get_exception(301) == HTTPMovedPermanently
+ redirect = HTTPFound("/some/path")
+ assert isinstance(redirect,HTTPException)
+ assert isinstance(redirect,HTTPRedirection)
+ assert not isinstance(redirect,HTTPError)
+ notfound = HTTPNotFound()
+ assert isinstance(notfound,HTTPException)
+ assert isinstance(notfound,HTTPError)
+ assert isinstance(notfound,HTTPClientError)
+ assert not isinstance(notfound,HTTPServerError)
+ notimpl = HTTPNotImplemented()
+ assert isinstance(notimpl,HTTPException)
+ assert isinstance(notimpl,HTTPError)
+ assert isinstance(notimpl,HTTPServerError)
+ assert not isinstance(notimpl,HTTPClientError)
+
diff --git a/tests/test_exceptions/test_reporter.py b/tests/test_exceptions/test_reporter.py
new file mode 100644
index 0000000..a40666e
--- /dev/null
+++ b/tests/test_exceptions/test_reporter.py
@@ -0,0 +1,50 @@
+import sys
+import os
+from paste.exceptions.reporter import *
+from paste.exceptions import collector
+
+def setup_file(fn, content=None):
+ dir = os.path.join(os.path.dirname(__file__), 'reporter_output')
+ fn = os.path.join(dir, fn)
+ if os.path.exists(dir):
+ if os.path.exists(fn):
+ os.unlink(fn)
+ else:
+ os.mkdir(dir)
+ if content is not None:
+ f = open(fn, 'wb')
+ f.write(content)
+ f.close()
+ return fn
+
+def test_logger():
+ fn = setup_file('test_logger.log')
+ rep = LogReporter(
+ filename=fn,
+ show_hidden_frames=False)
+ try:
+ int('a')
+ except:
+ exc_data = collector.collect_exception(*sys.exc_info())
+ else:
+ assert 0
+ rep.report(exc_data)
+ content = open(fn).read()
+ assert len(content.splitlines()) == 4, len(content.splitlines())
+ assert 'ValueError' in content
+ assert 'int' in content
+ assert 'test_reporter.py' in content
+ assert 'test_logger' in content
+
+ try:
+ 1 / 0
+ except:
+ exc_data = collector.collect_exception(*sys.exc_info())
+ else:
+ assert 0
+ rep.report(exc_data)
+ content = open(fn).read()
+ print(content)
+ assert len(content.splitlines()) == 8
+ assert 'ZeroDivisionError' in content
+
diff --git a/tests/test_fileapp.py b/tests/test_fileapp.py
new file mode 100644
index 0000000..bdd7510
--- /dev/null
+++ b/tests/test_fileapp.py
@@ -0,0 +1,242 @@
+# (c) 2005 Ian Bicking, Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+import time
+import random
+import os
+import tempfile
+try:
+ # Python 3
+ from email.utils import parsedate_tz, mktime_tz
+except ImportError:
+ # Python 2
+ from rfc822 import parsedate_tz, mktime_tz
+import six
+
+from paste import fileapp
+from paste.fileapp import *
+from paste.fixture import *
+
+# NOTE(haypo): don't use string.letters because the order of lower and upper
+# case letters changes when locale.setlocale() is called for the first time
+LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+
+def test_data():
+ harness = TestApp(DataApp(b'mycontent'))
+ res = harness.get("/")
+ assert 'application/octet-stream' == res.header('content-type')
+ assert '9' == res.header('content-length')
+ assert "<Response 200 OK 'mycontent'>" == repr(res)
+ harness.app.set_content(b"bingles")
+ assert "<Response 200 OK 'bingles'>" == repr(harness.get("/"))
+
+def test_cache():
+ def build(*args,**kwargs):
+ app = DataApp(b"SomeContent")
+ app.cache_control(*args,**kwargs)
+ return TestApp(app).get("/")
+ res = build()
+ assert 'public' == res.header('cache-control')
+ assert not res.header('expires',None)
+ res = build(private=True)
+ assert 'private' == res.header('cache-control')
+ assert mktime_tz(parsedate_tz(res.header('expires'))) < time.time()
+ res = build(no_cache=True)
+ assert 'no-cache' == res.header('cache-control')
+ assert mktime_tz(parsedate_tz(res.header('expires'))) < time.time()
+ res = build(max_age=60,s_maxage=30)
+ assert 'public, max-age=60, s-maxage=30' == res.header('cache-control')
+ expires = mktime_tz(parsedate_tz(res.header('expires')))
+ assert expires > time.time()+58 and expires < time.time()+61
+ res = build(private=True, max_age=60, no_transform=True, no_store=True)
+ assert 'private, no-store, no-transform, max-age=60' == \
+ res.header('cache-control')
+ expires = mktime_tz(parsedate_tz(res.header('expires')))
+ assert mktime_tz(parsedate_tz(res.header('expires'))) < time.time()
+
+def test_disposition():
+ def build(*args,**kwargs):
+ app = DataApp(b"SomeContent")
+ app.content_disposition(*args,**kwargs)
+ return TestApp(app).get("/")
+ res = build()
+ assert 'attachment' == res.header('content-disposition')
+ assert 'application/octet-stream' == res.header('content-type')
+ res = build(filename="bing.txt")
+ assert 'attachment; filename="bing.txt"' == \
+ res.header('content-disposition')
+ assert 'text/plain' == res.header('content-type')
+ res = build(inline=True)
+ assert 'inline' == res.header('content-disposition')
+ assert 'application/octet-stream' == res.header('content-type')
+ res = build(inline=True, filename="/some/path/bing.txt")
+ assert 'inline; filename="bing.txt"' == \
+ res.header('content-disposition')
+ assert 'text/plain' == res.header('content-type')
+ try:
+ res = build(inline=True,attachment=True)
+ except AssertionError:
+ pass
+ else:
+ assert False, "should be an exception"
+
+def test_modified():
+ harness = TestApp(DataApp(b'mycontent'))
+ res = harness.get("/")
+ assert "<Response 200 OK 'mycontent'>" == repr(res)
+ last_modified = res.header('last-modified')
+ res = harness.get("/",headers={'if-modified-since': last_modified})
+ assert "<Response 304 Not Modified ''>" == repr(res)
+ res = harness.get("/",headers={'if-modified-since': last_modified + \
+ '; length=1506'})
+ assert "<Response 304 Not Modified ''>" == repr(res)
+ res = harness.get("/",status=400,
+ headers={'if-modified-since': 'garbage'})
+ assert 400 == res.status and b"ill-formed timestamp" in res.body
+ res = harness.get("/",status=400,
+ headers={'if-modified-since':
+ 'Thu, 22 Dec 2030 01:01:01 GMT'})
+ assert 400 == res.status and b"check your system clock" in res.body
+
+def test_file():
+ tempfile = "test_fileapp.%s.txt" % (random.random())
+ content = LETTERS * 20
+ if six.PY3:
+ content = content.encode('utf8')
+ with open(tempfile, "wb") as fp:
+ fp.write(content)
+ try:
+ app = fileapp.FileApp(tempfile)
+ res = TestApp(app).get("/")
+ assert len(content) == int(res.header('content-length'))
+ assert 'text/plain' == res.header('content-type')
+ assert content == res.body
+ assert content == app.content # this is cashed
+ lastmod = res.header('last-modified')
+ print("updating", tempfile)
+ file = open(tempfile,"a+")
+ file.write("0123456789")
+ file.close()
+ res = TestApp(app).get("/",headers={'Cache-Control': 'max-age=0'})
+ assert len(content)+10 == int(res.header('content-length'))
+ assert 'text/plain' == res.header('content-type')
+ assert content + b"0123456789" == res.body
+ assert app.content # we are still cached
+ file = open(tempfile,"a+")
+ file.write("X" * fileapp.CACHE_SIZE) # exceed the cashe size
+ file.write("YZ")
+ file.close()
+ res = TestApp(app).get("/",headers={'Cache-Control': 'max-age=0'})
+ newsize = fileapp.CACHE_SIZE + len(content)+12
+ assert newsize == int(res.header('content-length'))
+ assert newsize == len(res.body)
+ assert res.body.startswith(content) and res.body.endswith(b'XYZ')
+ assert not app.content # we are no longer cached
+ finally:
+ os.unlink(tempfile)
+
+def test_dir():
+ tmpdir = tempfile.mkdtemp()
+ try:
+ tmpfile = os.path.join(tmpdir, 'file')
+ tmpsubdir = os.path.join(tmpdir, 'dir')
+ fp = open(tmpfile, 'w')
+ fp.write('abcd')
+ fp.close()
+ os.mkdir(tmpsubdir)
+ try:
+ app = fileapp.DirectoryApp(tmpdir)
+ for path in ['/', '', '//', '/..', '/.', '/../..']:
+ assert TestApp(app).get(path, status=403).status == 403, ValueError(path)
+ for path in ['/~', '/foo', '/dir', '/dir/']:
+ assert TestApp(app).get(path, status=404).status == 404, ValueError(path)
+ assert TestApp(app).get('/file').body == b'abcd'
+ finally:
+ os.remove(tmpfile)
+ os.rmdir(tmpsubdir)
+ finally:
+ os.rmdir(tmpdir)
+
+def _excercize_range(build,content):
+ # full content request, but using ranges'
+ res = build("bytes=0-%d" % (len(content)-1))
+ assert res.header('accept-ranges') == 'bytes'
+ assert res.body == content
+ assert res.header('content-length') == str(len(content))
+ res = build("bytes=-%d" % (len(content)-1))
+ assert res.body == content
+ assert res.header('content-length') == str(len(content))
+ res = build("bytes=0-")
+ assert res.body == content
+ assert res.header('content-length') == str(len(content))
+ # partial content requests
+ res = build("bytes=0-9", status=206)
+ assert res.body == content[:10]
+ assert res.header('content-length') == '10'
+ res = build("bytes=%d-" % (len(content)-1), status=206)
+ assert res.body == b'Z'
+ assert res.header('content-length') == '1'
+ res = build("bytes=%d-%d" % (3,17), status=206)
+ assert res.body == content[3:18]
+ assert res.header('content-length') == '15'
+
+def test_range():
+ content = LETTERS * 5
+ if six.PY3:
+ content = content.encode('utf8')
+ def build(range, status=206):
+ app = DataApp(content)
+ return TestApp(app).get("/",headers={'Range': range}, status=status)
+ _excercize_range(build,content)
+ build('bytes=0-%d' % (len(content)+1), 416)
+
+def test_file_range():
+ tempfile = "test_fileapp.%s.txt" % (random.random())
+ content = LETTERS * (1+(fileapp.CACHE_SIZE // len(LETTERS)))
+ if six.PY3:
+ content = content.encode('utf8')
+ assert len(content) > fileapp.CACHE_SIZE
+ with open(tempfile, "wb") as fp:
+ fp.write(content)
+ try:
+ def build(range, status=206):
+ app = fileapp.FileApp(tempfile)
+ return TestApp(app).get("/",headers={'Range': range},
+ status=status)
+ _excercize_range(build,content)
+ for size in (13,len(LETTERS), len(LETTERS)-1):
+ fileapp.BLOCK_SIZE = size
+ _excercize_range(build,content)
+ finally:
+ os.unlink(tempfile)
+
+def test_file_cache():
+ filename = os.path.join(os.path.dirname(__file__),
+ 'urlparser_data', 'secured.txt')
+ app = TestApp(fileapp.FileApp(filename))
+ res = app.get('/')
+ etag = res.header('ETag')
+ last_mod = res.header('Last-Modified')
+ res = app.get('/', headers={'If-Modified-Since': last_mod},
+ status=304)
+ res = app.get('/', headers={'If-None-Match': etag},
+ status=304)
+ res = app.get('/', headers={'If-None-Match': 'asdf'},
+ status=200)
+ res = app.get('/', headers={'If-Modified-Since': 'Sat, 1 Jan 2005 12:00:00 GMT'},
+ status=200)
+ res = app.get('/', headers={'If-Modified-Since': last_mod + '; length=100'},
+ status=304)
+ res = app.get('/', headers={'If-Modified-Since': 'invalid date'},
+ status=400)
+
+def test_methods():
+ filename = os.path.join(os.path.dirname(__file__),
+ 'urlparser_data', 'secured.txt')
+ app = TestApp(fileapp.FileApp(filename))
+ get_res = app.get('')
+ res = app.get('', extra_environ={'REQUEST_METHOD': 'HEAD'})
+ assert res.headers == get_res.headers
+ assert not res.body
+ app.post('', status=405) # Method Not Allowed
+
diff --git a/tests/test_fixture.py b/tests/test_fixture.py
new file mode 100644
index 0000000..ba56488
--- /dev/null
+++ b/tests/test_fixture.py
@@ -0,0 +1,28 @@
+from paste.debug.debugapp import SimpleApplication
+from paste.fixture import TestApp
+
+def test_fixture():
+ app = TestApp(SimpleApplication())
+ res = app.get('/', params={'a': ['1', '2']})
+ assert (res.request.environ['QUERY_STRING'] ==
+ 'a=1&a=2')
+ res = app.put('/')
+ assert (res.request.environ['REQUEST_METHOD'] ==
+ 'PUT')
+ res = app.delete('/')
+ assert (res.request.environ['REQUEST_METHOD'] ==
+ 'DELETE')
+ class FakeDict(object):
+ def items(self):
+ return [('a', '10'), ('a', '20')]
+ res = app.post('/params', params=FakeDict())
+
+ # test multiple cookies in one request
+ app.cookies['one'] = 'first';
+ app.cookies['two'] = 'second';
+ app.cookies['three'] = '';
+ res = app.get('/')
+ hc = res.request.environ['HTTP_COOKIE'].split('; ');
+ assert ('one=first' in hc)
+ assert ('two=second' in hc)
+ assert ('three=' in hc)
diff --git a/tests/test_grantip.py b/tests/test_grantip.py
new file mode 100644
index 0000000..2ddf7f1
--- /dev/null
+++ b/tests/test_grantip.py
@@ -0,0 +1,37 @@
+from paste.auth import grantip
+from paste.fixture import *
+
+def test_make_app():
+ def application(environ, start_response):
+ start_response('200 OK', [('content-type', 'text/plain')])
+ lines = [
+ str(environ.get('REMOTE_USER')),
+ ':',
+ str(environ.get('REMOTE_USER_TOKENS')),
+ ]
+ if six.PY3:
+ lines = [line.encode('utf8') for line in lines]
+ return lines
+ ip_map = {
+ '127.0.0.1': (None, 'system'),
+ '192.168.0.0/16': (None, 'worker'),
+ '192.168.0.5<->192.168.0.8': ('bob', 'editor'),
+ '192.168.0.8': ('__remove__', '-worker'),
+ }
+ app = grantip.GrantIPMiddleware(application, ip_map)
+ app = TestApp(app)
+ return app
+
+def test_req():
+ app = test_make_app()
+ def doit(remote_addr):
+ res = app.get('/', extra_environ={'REMOTE_ADDR': remote_addr})
+ return res.body
+ assert doit('127.0.0.1') == b'None:system'
+ assert doit('192.168.15.12') == b'None:worker'
+ assert doit('192.168.0.4') == b'None:worker'
+ result = doit('192.168.0.5')
+ assert result.startswith(b'bob:')
+ assert b'editor' in result and b'worker' in result
+ assert result.count(b',') == 1
+ assert doit('192.168.0.8') == b'None:editor'
diff --git a/tests/test_gzipper.py b/tests/test_gzipper.py
new file mode 100644
index 0000000..54b7901
--- /dev/null
+++ b/tests/test_gzipper.py
@@ -0,0 +1,19 @@
+from paste.fixture import TestApp
+from paste.gzipper import middleware
+import gzip
+import six
+
+def simple_app(environ, start_response):
+ start_response('200 OK', [('content-type', 'text/plain')])
+ return [b'this is a test']
+
+wsgi_app = middleware(simple_app)
+app = TestApp(wsgi_app)
+
+def test_gzip():
+ res = app.get(
+ '/', extra_environ=dict(HTTP_ACCEPT_ENCODING='gzip'))
+ assert int(res.header('content-length')) == len(res.body)
+ assert res.body != b'this is a test'
+ actual = gzip.GzipFile(fileobj=six.BytesIO(res.body)).read()
+ assert actual == b'this is a test'
diff --git a/tests/test_httpheaders.py b/tests/test_httpheaders.py
new file mode 100644
index 0000000..8c560d2
--- /dev/null
+++ b/tests/test_httpheaders.py
@@ -0,0 +1,159 @@
+from paste.httpheaders import *
+import time
+
+def _test_generic(collection):
+ assert 'bing' == VIA(collection)
+ REFERER.update(collection,'internal:/some/path')
+ assert 'internal:/some/path' == REFERER(collection)
+ CACHE_CONTROL.update(collection,max_age=1234)
+ CONTENT_DISPOSITION.update(collection,filename="bingles.txt")
+ PRAGMA.update(collection,"test","multi",'valued="items"')
+ assert 'public, max-age=1234' == CACHE_CONTROL(collection)
+ assert 'attachment; filename="bingles.txt"' == \
+ CONTENT_DISPOSITION(collection)
+ assert 'test, multi, valued="items"' == PRAGMA(collection)
+ VIA.delete(collection)
+
+
+def test_environ():
+ collection = {'HTTP_VIA':'bing', 'wsgi.version': '1.0' }
+ _test_generic(collection)
+ assert collection == {'wsgi.version': '1.0',
+ 'HTTP_PRAGMA': 'test, multi, valued="items"',
+ 'HTTP_REFERER': 'internal:/some/path',
+ 'HTTP_CONTENT_DISPOSITION': 'attachment; filename="bingles.txt"',
+ 'HTTP_CACHE_CONTROL': 'public, max-age=1234'
+ }
+
+def test_environ_cgi():
+ environ = {'CONTENT_TYPE': 'text/plain', 'wsgi.version': '1.0',
+ 'HTTP_CONTENT_TYPE': 'ignored/invalid',
+ 'CONTENT_LENGTH': '200'}
+ assert 'text/plain' == CONTENT_TYPE(environ)
+ assert '200' == CONTENT_LENGTH(environ)
+ CONTENT_TYPE.update(environ,'new/type')
+ assert 'new/type' == CONTENT_TYPE(environ)
+ CONTENT_TYPE.delete(environ)
+ assert '' == CONTENT_TYPE(environ)
+ assert 'ignored/invalid' == environ['HTTP_CONTENT_TYPE']
+
+def test_response_headers():
+ collection = [('via', 'bing')]
+ _test_generic(collection)
+ normalize_headers(collection)
+ assert collection == [
+ ('Cache-Control', 'public, max-age=1234'),
+ ('Pragma', 'test, multi, valued="items"'),
+ ('Referer', 'internal:/some/path'),
+ ('Content-Disposition', 'attachment; filename="bingles.txt"')
+ ]
+
+def test_cache_control():
+ assert 'public' == CACHE_CONTROL()
+ assert 'public' == CACHE_CONTROL(public=True)
+ assert 'private' == CACHE_CONTROL(private=True)
+ assert 'no-cache' == CACHE_CONTROL(no_cache=True)
+ assert 'private, no-store' == CACHE_CONTROL(private=True, no_store=True)
+ assert 'public, max-age=60' == CACHE_CONTROL(max_age=60)
+ assert 'public, max-age=86400' == \
+ CACHE_CONTROL(max_age=CACHE_CONTROL.ONE_DAY)
+ CACHE_CONTROL.extensions['community'] = str
+ assert 'public, community="bingles"' == \
+ CACHE_CONTROL(community="bingles")
+ headers = []
+ CACHE_CONTROL.apply(headers,max_age=60)
+ assert 'public, max-age=60' == CACHE_CONTROL(headers)
+ assert EXPIRES.parse(headers) > time.time()
+ assert EXPIRES.parse(headers) < time.time() + 60
+
+def test_content_disposition():
+ assert 'attachment' == CONTENT_DISPOSITION()
+ assert 'attachment' == CONTENT_DISPOSITION(attachment=True)
+ assert 'inline' == CONTENT_DISPOSITION(inline=True)
+ assert 'inline; filename="test.txt"' == \
+ CONTENT_DISPOSITION(inline=True, filename="test.txt")
+ assert 'attachment; filename="test.txt"' == \
+ CONTENT_DISPOSITION(filename="/some/path/test.txt")
+ headers = []
+ CONTENT_DISPOSITION.apply(headers,filename="test.txt")
+ assert 'text/plain' == CONTENT_TYPE(headers)
+ CONTENT_DISPOSITION.apply(headers,filename="test")
+ assert 'text/plain' == CONTENT_TYPE(headers)
+ CONTENT_DISPOSITION.apply(headers,filename="test.html")
+ assert 'text/plain' == CONTENT_TYPE(headers)
+ headers = [('Content-Type', 'application/octet-stream')]
+ CONTENT_DISPOSITION.apply(headers,filename="test.txt")
+ assert 'text/plain' == CONTENT_TYPE(headers)
+ assert headers == [
+ ('Content-Type', 'text/plain'),
+ ('Content-Disposition', 'attachment; filename="test.txt"')
+ ]
+
+def test_range():
+ assert ('bytes',[(0,300)]) == RANGE.parse("bytes=0-300")
+ assert ('bytes',[(0,300)]) == RANGE.parse("bytes = -300")
+ assert ('bytes',[(0,None)]) == RANGE.parse("bytes= -")
+ assert ('bytes',[(0,None)]) == RANGE.parse("bytes=0 - ")
+ assert ('bytes',[(300,None)]) == RANGE.parse(" BYTES=300-")
+ assert ('bytes',[(4,5),(6,7)]) == RANGE.parse(" Bytes = 4 - 5,6 - 07 ")
+ assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
+ assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
+ assert ('bytes',[(0,5),(7,None)]) == RANGE.parse(" bytes=-5,7-")
+ assert None == RANGE.parse("")
+ assert None == RANGE.parse("bytes=0,300")
+ assert None == RANGE.parse("bytes=-7,5-")
+
+def test_copy():
+ environ = {'HTTP_VIA':'bing', 'wsgi.version': '1.0' }
+ response_headers = []
+ VIA.update(response_headers,environ)
+ assert response_headers == [('Via', 'bing')]
+
+def test_sorting():
+ # verify the HTTP_HEADERS are set with their canonical form
+ sample = [WWW_AUTHENTICATE, VIA, ACCEPT, DATE,
+ ACCEPT_CHARSET, AGE, ALLOW, CACHE_CONTROL,
+ CONTENT_ENCODING, ETAG, CONTENT_TYPE, FROM,
+ EXPIRES, RANGE, UPGRADE, VARY, ALLOW]
+ sample.sort()
+ sample = [str(x) for x in sample]
+ assert sample == [
+ # general headers first
+ 'Cache-Control', 'Date', 'Upgrade', 'Via',
+ # request headers next
+ 'Accept', 'Accept-Charset', 'From', 'Range',
+ # response headers following
+ 'Age', 'ETag', 'Vary', 'WWW-Authenticate',
+ # entity headers (/w expected duplicate)
+ 'Allow', 'Allow', 'Content-Encoding', 'Content-Type', 'Expires'
+ ]
+
+def test_normalize():
+ response_headers = [
+ ('www-authenticate','Response AuthMessage'),
+ ('unknown-header','Unknown Sorted Last'),
+ ('Via','General Bingles'),
+ ('aLLoW','Entity Allow Something'),
+ ('ETAG','Response 34234'),
+ ('expires','Entity An-Expiration-Date'),
+ ('date','General A-Date')]
+ normalize_headers(response_headers, strict=False)
+ assert response_headers == [
+ ('Date', 'General A-Date'),
+ ('Via', 'General Bingles'),
+ ('ETag', 'Response 34234'),
+ ('WWW-Authenticate', 'Response AuthMessage'),
+ ('Allow', 'Entity Allow Something'),
+ ('Expires', 'Entity An-Expiration-Date'),
+ ('Unknown-Header', 'Unknown Sorted Last')]
+
+def test_if_modified_since():
+ from paste.httpexceptions import HTTPBadRequest
+ date = 'Thu, 34 Jul 3119 29:34:18 GMT'
+ try:
+ x = IF_MODIFIED_SINCE.parse({'HTTP_IF_MODIFIED_SINCE': date,
+ 'wsgi.version': (1, 0)})
+ except HTTPBadRequest:
+ pass
+ else:
+ assert 0
diff --git a/tests/test_httpserver.py b/tests/test_httpserver.py
new file mode 100644
index 0000000..3d72c79
--- /dev/null
+++ b/tests/test_httpserver.py
@@ -0,0 +1,45 @@
+import email
+
+from paste.httpserver import WSGIHandler
+from six.moves import StringIO
+
+
+class MockServer(object):
+ server_address = ('127.0.0.1', 80)
+
+
+class MockSocket(object):
+ def makefile(self, mode, bufsize):
+ return StringIO()
+
+
+def test_environ():
+ mock_socket = MockSocket()
+ mock_client_address = '1.2.3.4'
+ mock_server = MockServer()
+
+ wsgi_handler = WSGIHandler(mock_socket, mock_client_address, mock_server)
+ wsgi_handler.command = 'GET'
+ wsgi_handler.path = '/path'
+ wsgi_handler.request_version = 'HTTP/1.0'
+ wsgi_handler.headers = email.message_from_string('Host: mywebsite')
+
+ wsgi_handler.wsgi_setup()
+
+ assert wsgi_handler.wsgi_environ['HTTP_HOST'] == 'mywebsite'
+
+
+def test_environ_with_multiple_values():
+ mock_socket = MockSocket()
+ mock_client_address = '1.2.3.4'
+ mock_server = MockServer()
+
+ wsgi_handler = WSGIHandler(mock_socket, mock_client_address, mock_server)
+ wsgi_handler.command = 'GET'
+ wsgi_handler.path = '/path'
+ wsgi_handler.request_version = 'HTTP/1.0'
+ wsgi_handler.headers = email.message_from_string('Host: host1\nHost: host2')
+
+ wsgi_handler.wsgi_setup()
+
+ assert wsgi_handler.wsgi_environ['HTTP_HOST'] == 'host1,host2'
diff --git a/tests/test_import_string.py b/tests/test_import_string.py
new file mode 100644
index 0000000..262cbdd
--- /dev/null
+++ b/tests/test_import_string.py
@@ -0,0 +1,16 @@
+from paste.util.import_string import *
+import sys
+import os
+
+def test_simple():
+ for func in eval_import, simple_import:
+ assert func('sys') is sys
+ assert func('sys.version') is sys.version
+ assert func('os.path.join') is os.path.join
+
+def test_complex():
+ assert eval_import('sys:version') is sys.version
+ assert eval_import('os:getcwd()') == os.getcwd()
+ assert (eval_import('sys:version.split()[0]') ==
+ sys.version.split()[0])
+
diff --git a/tests/test_multidict.py b/tests/test_multidict.py
new file mode 100644
index 0000000..50a746f
--- /dev/null
+++ b/tests/test_multidict.py
@@ -0,0 +1,162 @@
+# -*- coding: utf-8 -*-
+# (c) 2007 Ian Bicking and Philip Jenvey; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+import cgi
+import six
+from six.moves import StringIO
+
+from nose.tools import assert_raises
+
+from paste.util.multidict import MultiDict, UnicodeMultiDict
+
+def test_dict():
+ d = MultiDict({'a': 1})
+ assert d.items() == [('a', 1)]
+
+ d['b'] = 2
+ d['c'] = 3
+ assert d.items() == [('a', 1), ('b', 2), ('c', 3)]
+
+ d['b'] = 4
+ assert d.items() == [('a', 1), ('c', 3), ('b', 4)]
+
+ d.add('b', 5)
+ assert_raises(KeyError, d.getone, "b")
+ assert d.getall('b') == [4, 5]
+ assert d.items() == [('a', 1), ('c', 3), ('b', 4), ('b', 5)]
+
+ del d['b']
+ assert d.items() == [('a', 1), ('c', 3)]
+ assert d.pop('xxx', 5) == 5
+ assert d.getone('a') == 1
+ assert d.popitem() == ('c', 3)
+ assert d.items() == [('a', 1)]
+
+ item = []
+ assert d.setdefault('z', item) is item
+ assert d.items() == [('a', 1), ('z', item)]
+
+ assert d.setdefault('y', 6) == 6
+
+ assert d.mixed() == {'a': 1, 'y': 6, 'z': item}
+ assert d.dict_of_lists() == {'a': [1], 'y': [6], 'z': [item]}
+
+ assert 'a' in d
+ dcopy = d.copy()
+ assert dcopy is not d
+ assert dcopy == d
+ d['x'] = 'x test'
+ assert dcopy != d
+
+ d[(1, None)] = (None, 1)
+ assert d.items() == [('a', 1), ('z', []), ('y', 6), ('x', 'x test'),
+ ((1, None), (None, 1))]
+
+def test_unicode_dict():
+ _test_unicode_dict()
+ _test_unicode_dict(decode_param_names=True)
+
+def _test_unicode_dict(decode_param_names=False):
+ d = UnicodeMultiDict(MultiDict({b'a': 'a test'}))
+ d.encoding = 'utf-8'
+ d.errors = 'ignore'
+
+ if decode_param_names:
+ key_str = six.text_type
+ k = lambda key: key
+ d.decode_keys = True
+ else:
+ key_str = six.binary_type
+ k = lambda key: key.encode()
+
+ def assert_unicode(obj):
+ assert isinstance(obj, six.text_type)
+
+ def assert_key_str(obj):
+ assert isinstance(obj, key_str)
+
+ def assert_unicode_item(obj):
+ key, value = obj
+ assert isinstance(key, key_str)
+ assert isinstance(value, six.text_type)
+
+ assert d.items() == [(k('a'), u'a test')]
+ map(assert_key_str, d.keys())
+ map(assert_unicode, d.values())
+
+ d[b'b'] = b'2 test'
+ d[b'c'] = b'3 test'
+ assert d.items() == [(k('a'), u'a test'), (k('b'), u'2 test'), (k('c'), u'3 test')]
+ list(map(assert_unicode_item, d.items()))
+
+ d[k('b')] = b'4 test'
+ assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test')], d.items()
+ list(map(assert_unicode_item, d.items()))
+
+ d.add(k('b'), b'5 test')
+ assert_raises(KeyError, d.getone, k("b"))
+ assert d.getall(k('b')) == [u'4 test', u'5 test']
+ map(assert_unicode, d.getall('b'))
+ assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test'),
+ (k('b'), u'5 test')]
+ list(map(assert_unicode_item, d.items()))
+
+ del d[k('b')]
+ assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test')]
+ list(map(assert_unicode_item, d.items()))
+ assert d.pop('xxx', u'5 test') == u'5 test'
+ assert isinstance(d.pop('xxx', u'5 test'), six.text_type)
+ assert d.getone(k('a')) == u'a test'
+ assert isinstance(d.getone(k('a')), six.text_type)
+ assert d.popitem() == (k('c'), u'3 test')
+ d[k('c')] = b'3 test'
+ assert_unicode_item(d.popitem())
+ assert d.items() == [(k('a'), u'a test')]
+ list(map(assert_unicode_item, d.items()))
+
+ item = []
+ assert d.setdefault(k('z'), item) is item
+ items = d.items()
+ assert items == [(k('a'), u'a test'), (k('z'), item)]
+ assert isinstance(items[1][0], key_str)
+ assert isinstance(items[1][1], list)
+
+ assert isinstance(d.setdefault(k('y'), b'y test'), six.text_type)
+ assert isinstance(d[k('y')], six.text_type)
+
+ assert d.mixed() == {k('a'): u'a test', k('y'): u'y test', k('z'): item}
+ assert d.dict_of_lists() == {k('a'): [u'a test'], k('y'): [u'y test'],
+ k('z'): [item]}
+ del d[k('z')]
+ list(map(assert_unicode_item, six.iteritems(d.mixed())))
+ list(map(assert_unicode_item, [(key, value[0]) for \
+ key, value in six.iteritems(d.dict_of_lists())]))
+
+ assert k('a') in d
+ dcopy = d.copy()
+ assert dcopy is not d
+ assert dcopy == d
+ d[k('x')] = 'x test'
+ assert dcopy != d
+
+ d[(1, None)] = (None, 1)
+ assert d.items() == [(k('a'), u'a test'), (k('y'), u'y test'), (k('x'), u'x test'),
+ ((1, None), (None, 1))]
+ item = d.items()[-1]
+ assert isinstance(item[0], tuple)
+ assert isinstance(item[1], tuple)
+
+ fs = cgi.FieldStorage()
+ fs.name = 'thefile'
+ fs.filename = 'hello.txt'
+ fs.file = StringIO('hello')
+ d[k('f')] = fs
+ ufs = d[k('f')]
+ assert isinstance(ufs, cgi.FieldStorage)
+ assert ufs is not fs
+ assert ufs.name == fs.name
+ assert isinstance(ufs.name, str if six.PY3 else key_str)
+ assert ufs.filename == fs.filename
+ assert isinstance(ufs.filename, six.text_type)
+ assert isinstance(ufs.value, str)
+ assert ufs.value == 'hello'
diff --git a/tests/test_profilemiddleware.py b/tests/test_profilemiddleware.py
new file mode 100644
index 0000000..4c189f8
--- /dev/null
+++ b/tests/test_profilemiddleware.py
@@ -0,0 +1,29 @@
+from paste.fixture import *
+try:
+ from paste.debug.profile import *
+ disable = False
+except ImportError:
+ disable = True
+
+if not disable:
+ def simple_app(environ, start_response):
+ start_response('200 OK', [('content-type', 'text/html')])
+ return ['all ok']
+
+ def long_func():
+ for i in range(1000):
+ pass
+ return 'test'
+
+ def test_profile():
+ app = TestApp(ProfileMiddleware(simple_app, {}))
+ res = app.get('/')
+ # The original app:
+ res.mustcontain('all ok')
+ # The profile information:
+ res.mustcontain('<pre')
+
+ def test_decorator():
+ value = profile_decorator()(long_func)()
+ assert value == 'test'
+
diff --git a/tests/test_proxy.py b/tests/test_proxy.py
new file mode 100644
index 0000000..44db9f3
--- /dev/null
+++ b/tests/test_proxy.py
@@ -0,0 +1,12 @@
+from paste import proxy
+from paste.fixture import TestApp
+
+def test_paste_website():
+ # Not the most robust test...
+ # need to test things like POSTing to pages, and getting from pages
+ # that don't set content-length.
+ app = proxy.Proxy('http://pythonpaste.org')
+ app = TestApp(app)
+ res = app.get('/')
+ assert 'documentation' in res
+
diff --git a/tests/test_recursive.py b/tests/test_recursive.py
new file mode 100644
index 0000000..1cb1984
--- /dev/null
+++ b/tests/test_recursive.py
@@ -0,0 +1,105 @@
+from .test_errordocument import simple_app
+from paste.fixture import *
+from paste.recursive import RecursiveMiddleware, ForwardRequestException
+
+def error_docs_app(environ, start_response):
+ if environ['PATH_INFO'] == '/not_found':
+ start_response("404 Not found", [('Content-type', 'text/plain')])
+ return [b'Not found']
+ elif environ['PATH_INFO'] == '/error':
+ start_response("200 OK", [('Content-type', 'text/plain')])
+ return [b'Page not found']
+ elif environ['PATH_INFO'] == '/recurse':
+ raise ForwardRequestException('/recurse')
+ else:
+ return simple_app(environ, start_response)
+
+class Middleware(object):
+ def __init__(self, app, url='/error'):
+ self.app = app
+ self.url = url
+ def __call__(self, environ, start_response):
+ raise ForwardRequestException(self.url)
+
+def forward(app):
+ app = TestApp(RecursiveMiddleware(app))
+ res = app.get('')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'requested page returned' in res
+ res = app.get('/error')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'Page not found' in res
+ res = app.get('/not_found')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'Page not found' in res
+ try:
+ res = app.get('/recurse')
+ except AssertionError as e:
+ if str(e).startswith('Forwarding loop detected'):
+ pass
+ else:
+ raise AssertionError('Failed to detect forwarding loop')
+
+def test_ForwardRequest_url():
+ class TestForwardRequestMiddleware(Middleware):
+ def __call__(self, environ, start_response):
+ if environ['PATH_INFO'] != '/not_found':
+ return self.app(environ, start_response)
+ raise ForwardRequestException(self.url)
+ forward(TestForwardRequestMiddleware(error_docs_app))
+
+def test_ForwardRequest_environ():
+ class TestForwardRequestMiddleware(Middleware):
+ def __call__(self, environ, start_response):
+ if environ['PATH_INFO'] != '/not_found':
+ return self.app(environ, start_response)
+ environ['PATH_INFO'] = self.url
+ raise ForwardRequestException(environ=environ)
+ forward(TestForwardRequestMiddleware(error_docs_app))
+
+def test_ForwardRequest_factory():
+
+ from paste.errordocument import StatusKeeper
+
+ class TestForwardRequestMiddleware(Middleware):
+ def __call__(self, environ, start_response):
+ if environ['PATH_INFO'] != '/not_found':
+ return self.app(environ, start_response)
+ environ['PATH_INFO'] = self.url
+ def factory(app):
+ return StatusKeeper(app, status='404 Not Found', url='/error', headers=[])
+ raise ForwardRequestException(factory=factory)
+
+ app = TestForwardRequestMiddleware(error_docs_app)
+ app = TestApp(RecursiveMiddleware(app))
+ res = app.get('')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'requested page returned' in res
+ res = app.get('/error')
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '200 OK'
+ assert 'Page not found' in res
+ res = app.get('/not_found', status=404)
+ assert res.header('content-type') == 'text/plain'
+ assert res.full_status == '404 Not Found' # Different status
+ assert 'Page not found' in res
+ try:
+ res = app.get('/recurse')
+ except AssertionError as e:
+ if str(e).startswith('Forwarding loop detected'):
+ pass
+ else:
+ raise AssertionError('Failed to detect forwarding loop')
+
+# Test Deprecated Code
+def test_ForwardRequestException():
+ class TestForwardRequestExceptionMiddleware(Middleware):
+ def __call__(self, environ, start_response):
+ if environ['PATH_INFO'] != '/not_found':
+ return self.app(environ, start_response)
+ raise ForwardRequestException(path_info=self.url)
+ forward(TestForwardRequestExceptionMiddleware(error_docs_app))
diff --git a/tests/test_registry.py b/tests/test_registry.py
new file mode 100644
index 0000000..23cd9b6
--- /dev/null
+++ b/tests/test_registry.py
@@ -0,0 +1,314 @@
+# (c) 2005 Ben Bangert
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from nose.tools import assert_raises
+
+from paste.fixture import *
+from paste.registry import *
+from paste.registry import Registry
+from paste.evalexception.middleware import EvalException
+
+regobj = StackedObjectProxy()
+secondobj = StackedObjectProxy(default=dict(hi='people'))
+
+def simpleapp(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ return [b'Hello world!\n']
+
+def simpleapp_withregistry(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ body = 'Hello world!Value is %s\n' % regobj.keys()
+ if six.PY3:
+ body = body.encode('utf8')
+ return [body]
+
+def simpleapp_withregistry_default(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ body = 'Hello world!Value is %s\n' % secondobj
+ if six.PY3:
+ body = body.encode('utf8')
+ return [body]
+
+
+class RegistryUsingApp(object):
+ def __init__(self, var, value, raise_exc=False):
+ self.var = var
+ self.value = value
+ self.raise_exc = raise_exc
+
+ def __call__(self, environ, start_response):
+ if 'paste.registry' in environ:
+ environ['paste.registry'].register(self.var, self.value)
+ if self.raise_exc:
+ raise self.raise_exc
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ body = 'Hello world!\nThe variable is %s' % str(regobj)
+ if six.PY3:
+ body = body.encode('utf8')
+ return [body]
+
+class RegistryUsingIteratorApp(object):
+ def __init__(self, var, value):
+ self.var = var
+ self.value = value
+
+ def __call__(self, environ, start_response):
+ if 'paste.registry' in environ:
+ environ['paste.registry'].register(self.var, self.value)
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ body = 'Hello world!\nThe variable is %s' % str(regobj)
+ if six.PY3:
+ body = body.encode('utf8')
+ return iter([body])
+
+class RegistryMiddleMan(object):
+ def __init__(self, app, var, value, depth):
+ self.app = app
+ self.var = var
+ self.value = value
+ self.depth = depth
+
+ def __call__(self, environ, start_response):
+ if 'paste.registry' in environ:
+ environ['paste.registry'].register(self.var, self.value)
+ line = ('\nInserted by middleware!\nInsertValue at depth %s is %s'
+ % (self.depth, str(regobj)))
+ if six.PY3:
+ line = line.encode('utf8')
+ app_response = [line]
+ app_iter = None
+ app_iter = self.app(environ, start_response)
+ if type(app_iter) in (list, tuple):
+ app_response.extend(app_iter)
+ else:
+ response = []
+ for line in app_iter:
+ response.append(line)
+ if hasattr(app_iter, 'close'):
+ app_iter.close()
+ app_response.extend(response)
+ line = ('\nAppended by middleware!\nAppendValue at \
+ depth %s is %s' % (self.depth, str(regobj)))
+ if six.PY3:
+ line = line.encode('utf8')
+ app_response.append(line)
+ return app_response
+
+
+def test_simple():
+ app = TestApp(simpleapp)
+ response = app.get('/')
+ assert 'Hello world' in response
+
+def test_solo_registry():
+ obj = {'hi':'people'}
+ wsgiapp = RegistryUsingApp(regobj, obj)
+ wsgiapp = RegistryManager(wsgiapp)
+ app = TestApp(wsgiapp)
+ res = app.get('/')
+ assert 'Hello world' in res
+ assert 'The variable is' in res
+ assert "{'hi': 'people'}" in res
+
+def test_registry_no_object_error():
+ app = TestApp(simpleapp_withregistry)
+ assert_raises(TypeError, app.get, '/')
+
+def test_with_default_object():
+ app = TestApp(simpleapp_withregistry_default)
+ res = app.get('/')
+ print(res)
+ assert 'Hello world' in res
+ assert "Value is {'hi': 'people'}" in res
+
+def test_double_registry():
+ obj = {'hi':'people'}
+ secondobj = {'bye':'friends'}
+ wsgiapp = RegistryUsingApp(regobj, obj)
+ wsgiapp = RegistryManager(wsgiapp)
+ wsgiapp = RegistryMiddleMan(wsgiapp, regobj, secondobj, 0)
+ wsgiapp = RegistryManager(wsgiapp)
+ app = TestApp(wsgiapp)
+ res = app.get('/')
+ assert 'Hello world' in res
+ assert 'The variable is' in res
+ assert "{'hi': 'people'}" in res
+ assert "InsertValue at depth 0 is {'bye': 'friends'}" in res
+ assert "AppendValue at depth 0 is {'bye': 'friends'}" in res
+
+def test_really_deep_registry():
+ keylist = ['fred', 'wilma', 'barney', 'homer', 'marge', 'bart', 'lisa',
+ 'maggie']
+ valuelist = range(0, len(keylist))
+ obj = {'hi':'people'}
+ wsgiapp = RegistryUsingApp(regobj, obj)
+ wsgiapp = RegistryManager(wsgiapp)
+ for depth in valuelist:
+ newobj = {keylist[depth]: depth}
+ wsgiapp = RegistryMiddleMan(wsgiapp, regobj, newobj, depth)
+ wsgiapp = RegistryManager(wsgiapp)
+ app = TestApp(wsgiapp)
+ res = app.get('/')
+ assert 'Hello world' in res
+ assert 'The variable is' in res
+ assert "{'hi': 'people'}" in res
+ for depth in valuelist:
+ assert "InsertValue at depth %s is {'%s': %s}" % \
+ (depth, keylist[depth], depth) in res
+ for depth in valuelist:
+ assert "AppendValue at depth %s is {'%s': %s}" % \
+ (depth, keylist[depth], depth) in res
+
+def test_iterating_response():
+ obj = {'hi':'people'}
+ secondobj = {'bye':'friends'}
+ wsgiapp = RegistryUsingIteratorApp(regobj, obj)
+ wsgiapp = RegistryManager(wsgiapp)
+ wsgiapp = RegistryMiddleMan(wsgiapp, regobj, secondobj, 0)
+ wsgiapp = RegistryManager(wsgiapp)
+ app = TestApp(wsgiapp)
+ res = app.get('/')
+ assert 'Hello world' in res
+ assert 'The variable is' in res
+ assert "{'hi': 'people'}" in res
+ assert "InsertValue at depth 0 is {'bye': 'friends'}" in res
+ assert "AppendValue at depth 0 is {'bye': 'friends'}" in res
+
+def _test_restorer(stack, data):
+ # We need to test the request's specific Registry. Initialize it here so we
+ # can use it later (RegistryManager will re-use one preexisting in the
+ # environ)
+ registry = Registry()
+ extra_environ={'paste.throw_errors': False,
+ 'paste.registry': registry}
+ request_id = restorer.get_request_id(extra_environ)
+ app = TestApp(stack)
+ res = app.get('/', extra_environ=extra_environ, expect_errors=True)
+
+ # Ensure all the StackedObjectProxies are empty after the RegistryUsingApp
+ # raises an Exception
+ for stacked, proxied_obj, test_cleanup in data:
+ only_key = list(proxied_obj.keys())[0]
+ try:
+ assert only_key not in stacked
+ assert False
+ except TypeError:
+ # Definitely empty
+ pass
+
+ # Ensure the StackedObjectProxies & Registry 'work' in the simulated
+ # EvalException context
+ replace = {'replace': 'dict'}
+ new = {'new': 'object'}
+ restorer.restoration_begin(request_id)
+ try:
+ for stacked, proxied_obj, test_cleanup in data:
+ # Ensure our original data magically re-appears in this context
+ only_key, only_val = list(proxied_obj.items())[0]
+ assert only_key in stacked and stacked[only_key] == only_val
+
+ # Ensure the Registry still works
+ registry.prepare()
+ registry.register(stacked, new)
+ assert 'new' in stacked and stacked['new'] == 'object'
+ registry.cleanup()
+
+ # Back to the original (pre-prepare())
+ assert only_key in stacked and stacked[only_key] == only_val
+
+ registry.replace(stacked, replace)
+ assert 'replace' in stacked and stacked['replace'] == 'dict'
+
+ if test_cleanup:
+ registry.cleanup()
+ try:
+ stacked._current_obj()
+ assert False
+ except TypeError:
+ # Definitely empty
+ pass
+ finally:
+ restorer.restoration_end()
+
+def _restorer_data():
+ S = StackedObjectProxy
+ d = [[S(name='first'), dict(top='of the registry stack'), False],
+ [S(name='second'), dict(middle='of the stack'), False],
+ [S(name='third'), dict(bottom='of the STACK.'), False]]
+ return d
+
+def _set_cleanup_test(data):
+ """Instruct _test_restorer to check registry cleanup at this level of the stack
+ """
+ data[2] = True
+
+def test_restorer_basic():
+ data = _restorer_data()[0]
+ wsgiapp = RegistryUsingApp(data[0], data[1], raise_exc=Exception())
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data)
+ wsgiapp = EvalException(wsgiapp)
+ _test_restorer(wsgiapp, [data])
+
+def test_restorer_basic_manager_outside():
+ data = _restorer_data()[0]
+ wsgiapp = RegistryUsingApp(data[0], data[1], raise_exc=Exception())
+ wsgiapp = EvalException(wsgiapp)
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data)
+ _test_restorer(wsgiapp, [data])
+
+def test_restorer_middleman_nested_evalexception():
+ data = _restorer_data()[:2]
+ wsgiapp = RegistryUsingApp(data[0][0], data[0][1], raise_exc=Exception())
+ wsgiapp = EvalException(wsgiapp)
+ wsgiapp = RegistryMiddleMan(wsgiapp, data[1][0], data[1][1], 0)
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[1])
+ _test_restorer(wsgiapp, data)
+
+def test_restorer_nested_middleman():
+ data = _restorer_data()[:2]
+ wsgiapp = RegistryUsingApp(data[0][0], data[0][1], raise_exc=Exception())
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[0])
+ wsgiapp = RegistryMiddleMan(wsgiapp, data[1][0], data[1][1], 0)
+ wsgiapp = EvalException(wsgiapp)
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[1])
+ _test_restorer(wsgiapp, data)
+
+def test_restorer_middlemen_nested_evalexception():
+ data = _restorer_data()
+ wsgiapp = RegistryUsingApp(data[0][0], data[0][1], raise_exc=Exception())
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[0])
+ wsgiapp = EvalException(wsgiapp)
+ wsgiapp = RegistryMiddleMan(wsgiapp, data[1][0], data[1][1], 0)
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[1])
+ wsgiapp = RegistryMiddleMan(wsgiapp, data[2][0], data[2][1], 1)
+ wsgiapp = RegistryManager(wsgiapp)
+ _set_cleanup_test(data[2])
+ _test_restorer(wsgiapp, data)
+
+def test_restorer_disabled():
+ # Ensure restoration_begin/end work safely when there's no Registry
+ wsgiapp = TestApp(simpleapp)
+ wsgiapp.get('/')
+ try:
+ restorer.restoration_begin(1)
+ finally:
+ restorer.restoration_end()
+ # A second call should do nothing
+ restorer.restoration_end()
diff --git a/tests/test_request.py b/tests/test_request.py
new file mode 100644
index 0000000..072304d
--- /dev/null
+++ b/tests/test_request.py
@@ -0,0 +1,66 @@
+# (c) 2005 Ben Bangert
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+from paste.fixture import *
+from paste.request import *
+from paste.wsgiwrappers import WSGIRequest
+import six
+
+def simpleapp(environ, start_response):
+ status = '200 OK'
+ response_headers = [('Content-type','text/plain')]
+ start_response(status, response_headers)
+ request = WSGIRequest(environ)
+ body = [
+ 'Hello world!\n', 'The get is %s' % str(request.GET),
+ ' and Val is %s\n' % request.GET.get('name'),
+ 'The languages are: %s\n' % request.languages,
+ 'The accepttypes is: %s\n' % request.match_accept(['text/html', 'application/xml'])]
+ if six.PY3:
+ body = [line.encode('utf8') for line in body]
+ return body
+
+def test_gets():
+ app = TestApp(simpleapp)
+ res = app.get('/')
+ assert 'Hello' in res
+ assert "get is MultiDict([])" in res
+
+ res = app.get('/?name=george')
+ res.mustcontain("get is MultiDict([('name', 'george')])")
+ res.mustcontain("Val is george")
+
+def test_language_parsing():
+ app = TestApp(simpleapp)
+ res = app.get('/')
+ assert "The languages are: ['en-us']" in res
+
+ res = app.get('/', headers={'Accept-Language':'da, en-gb;q=0.8, en;q=0.7'})
+ assert "languages are: ['da', 'en-gb', 'en', 'en-us']" in res
+
+ res = app.get('/', headers={'Accept-Language':'en-gb;q=0.8, da, en;q=0.7'})
+ assert "languages are: ['da', 'en-gb', 'en', 'en-us']" in res
+
+def test_mime_parsing():
+ app = TestApp(simpleapp)
+ res = app.get('/', headers={'Accept':'text/html'})
+ assert "accepttypes is: ['text/html']" in res
+
+ res = app.get('/', headers={'Accept':'application/xml'})
+ assert "accepttypes is: ['application/xml']" in res
+
+ res = app.get('/', headers={'Accept':'application/xml,*/*'})
+ assert "accepttypes is: ['text/html', 'application/xml']" in res
+
+def test_bad_cookie():
+ env = {}
+ env['HTTP_COOKIE'] = '070-it-:><?0'
+ assert get_cookie_dict(env) == {}
+ env['HTTP_COOKIE'] = 'foo=bar'
+ assert get_cookie_dict(env) == {'foo': 'bar'}
+ env['HTTP_COOKIE'] = '...'
+ assert get_cookie_dict(env) == {}
+ env['HTTP_COOKIE'] = '=foo'
+ assert get_cookie_dict(env) == {}
+ env['HTTP_COOKIE'] = '?='
+ assert get_cookie_dict(env) == {}
diff --git a/tests/test_request_form.py b/tests/test_request_form.py
new file mode 100644
index 0000000..cf43721
--- /dev/null
+++ b/tests/test_request_form.py
@@ -0,0 +1,36 @@
+import six
+
+from paste.request import *
+from paste.util.multidict import MultiDict
+
+def test_parse_querystring():
+ e = {'QUERY_STRING': 'a=1&b=2&c=3&b=4'}
+ d = parse_querystring(e)
+ assert d == [('a', '1'), ('b', '2'), ('c', '3'), ('b', '4')]
+ assert e['paste.parsed_querystring'] == (
+ (d, e['QUERY_STRING']))
+ e = {'QUERY_STRING': 'a&b&c=&d=1'}
+ d = parse_querystring(e)
+ assert d == [('a', ''), ('b', ''), ('c', ''), ('d', '1')]
+
+def make_post(body):
+ e = {
+ 'CONTENT_TYPE': 'application/x-www-form-urlencoded',
+ 'CONTENT_LENGTH': str(len(body)),
+ 'REQUEST_METHOD': 'POST',
+ 'wsgi.input': six.BytesIO(body),
+ }
+ return e
+
+def test_parsevars():
+ e = make_post(b'a=1&b=2&c=3&b=4')
+ #cur_input = e['wsgi.input']
+ d = parse_formvars(e)
+ assert isinstance(d, MultiDict)
+ assert d == MultiDict([('a', '1'), ('b', '2'), ('c', '3'), ('b', '4')])
+ assert e['paste.parsed_formvars'] == (
+ (d, e['wsgi.input']))
+ # XXX: http://trac.pythonpaste.org/pythonpaste/ticket/125
+ #assert e['wsgi.input'] is not cur_input
+ #cur_input.seek(0)
+ #assert e['wsgi.input'].read() == cur_input.read()
diff --git a/tests/test_response.py b/tests/test_response.py
new file mode 100644
index 0000000..71f6f97
--- /dev/null
+++ b/tests/test_response.py
@@ -0,0 +1,11 @@
+from paste.response import *
+
+def test_replace_header():
+ h = [('content-type', 'text/plain'),
+ ('x-blah', 'foobar')]
+ replace_header(h, 'content-length', '10')
+ assert h[-1] == ('content-length', '10')
+ replace_header(h, 'Content-Type', 'text/html')
+ assert ('content-type', 'text/html') in h
+ assert ('content-type', 'text/plain') not in h
+
diff --git a/tests/test_session.py b/tests/test_session.py
new file mode 100644
index 0000000..b67bda5
--- /dev/null
+++ b/tests/test_session.py
@@ -0,0 +1,56 @@
+from paste.session import SessionMiddleware
+from paste.fixture import TestApp
+import six
+
+info = []
+
+def wsgi_app(environ, start_response):
+ pi = environ.get('PATH_INFO', '')
+ if pi in ('/get1', '/get2'):
+ if pi == '/get1':
+ sess = environ['paste.session.factory']()
+ start_response('200 OK', [('content-type', 'text/plain')])
+ if pi == '/get2':
+ sess = environ['paste.session.factory']()
+ if 'info' in sess:
+ body = str(sess['info'])
+ if six.PY3:
+ body = body.encode('utf8')
+ return [body]
+ else:
+ return [b'no-info']
+ if pi in ('/put1', '/put2'):
+ if pi == '/put1':
+ sess = environ['paste.session.factory']()
+ sess['info'] = info[0]
+ start_response('200 OK', [('content-type', 'text/plain')])
+ if pi == '/put2':
+ sess = environ['paste.session.factory']()
+ sess['info'] = info[0]
+ return [b'foo']
+
+wsgi_app = SessionMiddleware(wsgi_app)
+
+def test_app1():
+ app = TestApp(wsgi_app)
+ res = app.get('/get1')
+ assert res.body == b'no-info'
+ res = app.get('/get2')
+ assert res.body ==b'no-info'
+ info[:] = ['test']
+ res = app.get('/put1')
+ res = app.get('/get1')
+ assert res.body == b'test'
+ res = app.get('/get2')
+ assert res.body == b'test'
+
+def test_app2():
+ app = TestApp(wsgi_app)
+ info[:] = ['fluff']
+ res = app.get('/put2')
+ res = app.get('/get1')
+ assert res.body == b'fluff'
+ res = app.get('/get2')
+ assert res.body == b'fluff'
+
+
diff --git a/tests/test_template.txt b/tests/test_template.txt
new file mode 100644
index 0000000..1313d34
--- /dev/null
+++ b/tests/test_template.txt
@@ -0,0 +1,136 @@
+The templating language is fairly simple, just {{stuff}}. For
+example::
+
+ >>> from paste.util.template import Template, sub
+ >>> sub('Hi {{name}}', name='Ian')
+ 'Hi Ian'
+ >>> Template('Hi {{repr(name)}}').substitute(name='Ian')
+ "Hi 'Ian'"
+ >>> Template('Hi {{name+1}}').substitute(name='Ian') # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ TypeError: cannot concatenate 'str' and 'int' objects at line 1 column 6
+
+It also has Django-style piping::
+
+ >>> sub('Hi {{name|repr}}', name='Ian')
+ "Hi 'Ian'"
+
+Note that None shows up as an empty string::
+
+ >>> sub('Hi {{name}}', name=None)
+ 'Hi '
+
+And if/elif/else::
+
+ >>> t = Template('{{if x}}{{y}}{{else}}{{z}}{{endif}}')
+ >>> t.substitute(x=1, y=2, z=3)
+ '2'
+ >>> t.substitute(x=0, y=2, z=3)
+ '3'
+ >>> t = Template('{{if x > 0}}positive{{elif x < 0}}negative{{else}}zero{{endif}}')
+ >>> t.substitute(x=1), t.substitute(x=-10), t.substitute(x=0)
+ ('positive', 'negative', 'zero')
+
+Plus a for loop::
+
+ >>> t = Template('{{for i in x}}i={{i}}\n{{endfor}}')
+ >>> t.substitute(x=range(3))
+ 'i=0\ni=1\ni=2\n'
+ >>> t = Template('{{for a, b in sorted(z.items()):}}{{a}}={{b}},{{endfor}}')
+ >>> t.substitute(z={1: 2, 3: 4})
+ '1=2,3=4,'
+ >>> t = Template('{{for i in x}}{{if not i}}{{break}}'
+ ... '{{endif}}{{i}} {{endfor}}')
+ >>> t.substitute(x=[1, 2, 0, 3, 4])
+ '1 2 '
+ >>> t = Template('{{for i in x}}{{if not i}}{{continue}}'
+ ... '{{endif}}{{i}} {{endfor}}')
+ >>> t.substitute(x=[1, 2, 0, 3, 0, 4])
+ '1 2 3 4 '
+
+Also Python blocks::
+
+ >>> sub('{{py:\nx=1\n}}{{x}}')
+ '1'
+
+And some syntax errors::
+
+ >>> t = Template('{{if x}}', name='foo.html')
+ Traceback (most recent call last):
+ ...
+ TemplateError: No {{endif}} at line 1 column 3 in foo.html
+ >>> t = Template('{{for x}}', name='foo2.html')
+ Traceback (most recent call last):
+ ...
+ TemplateError: Bad for (no "in") in 'x' at line 1 column 3 in foo2.html
+
+There's also an HTMLTemplate that uses HTMLisms::
+
+ >>> from paste.util.template import HTMLTemplate, sub_html, html
+ >>> sub_html('hi {{name}}', name='<foo>')
+ 'hi &lt;foo&gt;'
+
+But if you don't want quoting to happen you can do::
+
+ >>> sub_html('hi {{name}}', name=html('<foo>'))
+ 'hi <foo>'
+ >>> sub_html('hi {{name|html}}', name='<foo>')
+ 'hi <foo>'
+
+Also a couple handy functions;:
+
+ >>> t = HTMLTemplate('<a href="article?id={{id|url}}" {{attr(class_=class_)}}>')
+ >>> t.substitute(id=1, class_='foo')
+ '<a href="article?id=1" class="foo">'
+ >>> t.substitute(id='with space', class_=None)
+ '<a href="article?id=with%20space" >'
+
+There's a handyish looper thing you can also use in your templates (or
+in Python, but it's more useful in templates generally)::
+
+ >>> from paste.util.looper import looper
+ >>> seq = ['apple', 'asparagus', 'Banana', 'orange']
+ >>> for loop, item in looper(seq):
+ ... if item == 'apple':
+ ... assert loop.first
+ ... elif item == 'orange':
+ ... assert loop.last
+ ... if loop.first_group(lambda i: i[0].upper()):
+ ... print('%s:' % item[0].upper())
+ ... print("%s %s" % (loop.number, item))
+ A:
+ 1 apple
+ 2 asparagus
+ B:
+ 3 Banana
+ O:
+ 4 orange
+
+It will also strip out empty lines, when there is a line that only
+contains a directive/statement (if/for, etc)::
+
+ >>> sub('{{if 1}}\n{{x}}\n{{endif}}\n', x=0)
+ '0\n'
+ >>> sub('{{if 1}}x={{x}}\n{{endif}}\n', x=1)
+ 'x=1\n'
+ >>> sub('{{if 1}}\nx={{x}}\n{{endif}}\n', x=1)
+ 'x=1\n'
+
+Lastly, there is a special directive that will create a default value
+for a variable, if no value is given::
+
+ >>> sub('{{default x=1}}{{x}}', x=2)
+ '2'
+ >>> sub('{{default x=1}}{{x}}')
+ '1'
+ >>> # The normal case:
+ >>> sub('{{x}}') # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ NameError: name 'x' is not defined at line 1 column 3
+
+And comments work::
+
+ >>> sub('Test=x{{#whatever}}')
+ 'Test=x'
diff --git a/tests/test_urlmap.py b/tests/test_urlmap.py
new file mode 100644
index 0000000..f7ec729
--- /dev/null
+++ b/tests/test_urlmap.py
@@ -0,0 +1,53 @@
+from paste.urlmap import *
+from paste.fixture import *
+import six
+
+def make_app(response_text):
+ def app(environ, start_response):
+ headers = [('Content-type', 'text/html')]
+ start_response('200 OK', headers)
+ body = response_text % environ
+ if six.PY3:
+ body = body.encode('ascii')
+ return [body]
+ return app
+
+def test_map():
+ mapper = URLMap({})
+ app = TestApp(mapper)
+ text = '%s script_name="%%(SCRIPT_NAME)s" path_info="%%(PATH_INFO)s"'
+ mapper[''] = make_app(text % 'root')
+ mapper['/foo'] = make_app(text % 'foo-only')
+ mapper['/foo/bar'] = make_app(text % 'foo:bar')
+ mapper['/f'] = make_app(text % 'f-only')
+ res = app.get('/')
+ res.mustcontain('root')
+ res.mustcontain('script_name=""')
+ res.mustcontain('path_info="/"')
+ res = app.get('/blah')
+ res.mustcontain('root')
+ res.mustcontain('script_name=""')
+ res.mustcontain('path_info="/blah"')
+ res = app.get('/foo/and/more')
+ res.mustcontain('script_name="/foo"')
+ res.mustcontain('path_info="/and/more"')
+ res.mustcontain('foo-only')
+ res = app.get('/foo/bar/baz')
+ res.mustcontain('foo:bar')
+ res.mustcontain('script_name="/foo/bar"')
+ res.mustcontain('path_info="/baz"')
+ res = app.get('/fffzzz')
+ res.mustcontain('root')
+ res.mustcontain('path_info="/fffzzz"')
+ res = app.get('/f/z/y')
+ res.mustcontain('script_name="/f"')
+ res.mustcontain('path_info="/z/y"')
+ res.mustcontain('f-only')
+
+def test_404():
+ mapper = URLMap({})
+ app = TestApp(mapper, extra_environ={'HTTP_ACCEPT': 'text/html'})
+ res = app.get("/-->%0D<script>alert('xss')</script>", status=404)
+ assert b'--><script' not in res.body
+ res = app.get("/--%01><script>", status=404)
+ assert b'--\x01><script>' not in res.body
diff --git a/tests/test_urlparser.py b/tests/test_urlparser.py
new file mode 100644
index 0000000..21c210e
--- /dev/null
+++ b/tests/test_urlparser.py
@@ -0,0 +1,178 @@
+import os
+from paste.urlparser import *
+from paste.fixture import *
+from pkg_resources import get_distribution
+
+def relative_path(name):
+ here = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'urlparser_data')
+ f = os.path.join('urlparser_data', '..', 'urlparser_data', name)
+ return os.path.join(here, f)
+
+def path(name):
+ return os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'urlparser_data', name)
+
+def make_app(name):
+ app = URLParser({}, path(name), name, index_names=['index', 'Main'])
+ testapp = TestApp(app)
+ return testapp
+
+def test_find_file():
+ app = make_app('find_file')
+ res = app.get('/')
+ assert 'index1' in res
+ assert res.header('content-type') == 'text/plain'
+ res = app.get('/index')
+ assert 'index1' in res
+ assert res.header('content-type') == 'text/plain'
+ res = app.get('/index.txt')
+ assert 'index1' in res
+ assert res.header('content-type') == 'text/plain'
+ res = app.get('/test2.html')
+ assert 'test2' in res
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/test 3.html')
+ assert 'test 3' in res
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/test%203.html')
+ assert 'test 3' in res
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/dir with spaces/test 4.html')
+ assert 'test 4' in res
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/dir%20with%20spaces/test%204.html')
+ assert 'test 4' in res
+ assert res.header('content-type') == 'text/html'
+ # Ensure only data under the app's root directory is accessible
+ res = app.get('/../secured.txt', status=404)
+ res = app.get('/dir with spaces/../../secured.txt', status=404)
+ res = app.get('/%2e%2e/secured.txt', status=404)
+ res = app.get('/%2e%2e%3fsecured.txt', status=404)
+ res = app.get('/..%3fsecured.txt', status=404)
+ res = app.get('/dir%20with%20spaces/%2e%2e/%2e%2e/secured.txt', status=404)
+
+def test_deep():
+ app = make_app('deep')
+ res = app.get('/')
+ assert 'index2' in res
+ res = app.get('/sub')
+ assert res.status == 301
+ print(res)
+ assert res.header('location') == 'http://localhost/sub/'
+ assert 'http://localhost/sub/' in res
+ res = app.get('/sub/')
+ assert 'index3' in res
+
+def test_python():
+ app = make_app('python')
+ res = app.get('/simpleapp')
+ assert 'test1' in res
+ assert res.header('test-header') == 'TEST!'
+ assert res.header('content-type') == 'text/html'
+ res = app.get('/stream')
+ assert 'test2' in res
+ res = app.get('/sub/simpleapp')
+ assert 'subsimple' in res
+
+def test_hook():
+ app = make_app('hook')
+ res = app.get('/bob/app')
+ assert 'user: bob' in res
+ res = app.get('/tim/')
+ assert 'index: tim' in res
+
+def test_not_found_hook():
+ app = make_app('not_found')
+ res = app.get('/simple/notfound')
+ assert res.status == 200
+ assert 'not found' in res
+ res = app.get('/simple/found')
+ assert 'is found' in res
+ res = app.get('/recur/__notfound', status=404)
+ # @@: It's unfortunate that the original path doesn't actually show up
+ assert '/recur/notfound' in res
+ res = app.get('/recur/__isfound')
+ assert res.status == 200
+ assert 'is found' in res
+ res = app.get('/user/list')
+ assert 'user: None' in res
+ res = app.get('/user/bob/list')
+ assert res.status == 200
+ assert 'user: bob' in res
+
+def test_relative_path_in_static_parser():
+ x = relative_path('find_file')
+ app = StaticURLParser(relative_path('find_file'))
+ assert '..' not in app.root_directory
+
+def test_xss():
+ app = TestApp(StaticURLParser(relative_path('find_file')),
+ extra_environ={'HTTP_ACCEPT': 'text/html'})
+ res = app.get("/-->%0D<script>alert('xss')</script>", status=404)
+ assert b'--><script>' not in res.body
+
+def test_static_parser():
+ app = StaticURLParser(path('find_file'))
+ testapp = TestApp(app)
+ res = testapp.get('', status=301)
+ res = testapp.get('/', status=404)
+ res = testapp.get('/index.txt')
+ assert res.body.strip() == b'index1'
+ res = testapp.get('/index.txt/foo', status=404)
+ res = testapp.get('/test 3.html')
+ assert res.body.strip() == b'test 3'
+ res = testapp.get('/test%203.html')
+ assert res.body.strip() == b'test 3'
+ res = testapp.get('/dir with spaces/test 4.html')
+ assert res.body.strip() == b'test 4'
+ res = testapp.get('/dir%20with%20spaces/test%204.html')
+ assert res.body.strip() == b'test 4'
+ # Ensure only data under the app's root directory is accessible
+ res = testapp.get('/../secured.txt', status=404)
+ res = testapp.get('/dir with spaces/../../secured.txt', status=404)
+ res = testapp.get('/%2e%2e/secured.txt', status=404)
+ res = testapp.get('/dir%20with%20spaces/%2e%2e/%2e%2e/secured.txt', status=404)
+ res = testapp.get('/dir%20with%20spaces/', status=404)
+
+def test_egg_parser():
+ app = PkgResourcesParser('Paste', 'paste')
+ testapp = TestApp(app)
+ res = testapp.get('', status=301)
+ res = testapp.get('/', status=404)
+ res = testapp.get('/flup_session', status=404)
+ res = testapp.get('/util/classinit.py')
+ assert 'ClassInitMeta' in res
+ res = testapp.get('/util/classinit', status=404)
+ res = testapp.get('/util', status=301)
+ res = testapp.get('/util/classinit.py/foo', status=404)
+
+ # Find a readable file in the Paste pkg's root directory (or upwards the
+ # directory tree). Ensure it's not accessible via the URLParser
+ unreachable_test_file = None
+ search_path = pkg_root_path = get_distribution('Paste').location
+ level = 0
+ # We might not find any readable files in the pkg's root directory (this
+ # is likely when Paste is installed as a .egg in site-packages). We
+ # (hopefully) can prevent this by traversing up the directory tree until
+ # a usable file is found
+ while unreachable_test_file is None and \
+ os.path.normpath(search_path) != os.path.sep:
+ for file in os.listdir(search_path):
+ full_path = os.path.join(search_path, file)
+ if os.path.isfile(full_path) and os.access(full_path, os.R_OK):
+ unreachable_test_file = file
+ break
+
+ search_path = os.path.dirname(search_path)
+ level += 1
+ assert unreachable_test_file is not None, \
+ 'test_egg_parser requires a readable file in a parent dir of the\n' \
+ 'Paste pkg\'s root dir:\n%s' % pkg_root_path
+
+ unreachable_path = '/' + '../'*level + unreachable_test_file
+ unreachable_path_quoted = '/' + '%2e%2e/'*level + unreachable_test_file
+ res = testapp.get(unreachable_path, status=404)
+ res = testapp.get('/util/..' + unreachable_path, status=404)
+ res = testapp.get(unreachable_path_quoted, status=404)
+ res = testapp.get('/util/%2e%2e' + unreachable_path_quoted, status=404)
diff --git a/tests/test_util/__init__.py b/tests/test_util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/test_util/__init__.py
diff --git a/tests/test_util/test_datetimeutil.py b/tests/test_util/test_datetimeutil.py
new file mode 100644
index 0000000..45d96c7
--- /dev/null
+++ b/tests/test_util/test_datetimeutil.py
@@ -0,0 +1,135 @@
+# (c) 2005 Clark C. Evans and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+# Some of this code was funded by: http://prometheusresearch.com
+from time import localtime
+from datetime import date
+from paste.util.datetimeutil import *
+
+def test_timedelta():
+ assert('' == normalize_timedelta(""))
+ assert('0.10' == normalize_timedelta("6m"))
+ assert('0.50' == normalize_timedelta("30m"))
+ assert('0.75' == normalize_timedelta("45m"))
+ assert('1.00' == normalize_timedelta("60 min"))
+ assert('1.50' == normalize_timedelta("90min"))
+ assert('1.50' == normalize_timedelta("1.50"))
+ assert('4.50' == normalize_timedelta("4 : 30"))
+ assert('1.50' == normalize_timedelta("1h 30m"))
+ assert('1.00' == normalize_timedelta("1"))
+ assert('1.00' == normalize_timedelta("1 hour"))
+ assert('8.00' == normalize_timedelta("480 mins"))
+ assert('8.00' == normalize_timedelta("8h"))
+ assert('0.50' == normalize_timedelta("0.5"))
+ assert('0.10' == normalize_timedelta(".1"))
+ assert('0.50' == normalize_timedelta(".50"))
+ assert('0.75' == normalize_timedelta("0.75"))
+
+def test_time():
+ assert('03:00 PM' == normalize_time("3p", ampm=True))
+ assert('03:00 AM' == normalize_time("300", ampm=True))
+ assert('03:22 AM' == normalize_time("322", ampm=True))
+ assert('01:22 PM' == normalize_time("1322", ampm=True))
+ assert('01:00 PM' == normalize_time("13", ampm=True))
+ assert('12:00 PM' == normalize_time("noon", ampm=True))
+ assert("06:00 PM" == normalize_time("6", ampm=True))
+ assert("01:00 PM" == normalize_time("1", ampm=True))
+ assert("07:00 AM" == normalize_time("7", ampm=True))
+ assert("01:00 PM" == normalize_time("1 pm", ampm=True))
+ assert("03:30 PM" == normalize_time("3:30 pm", ampm=True))
+ assert("03:30 PM" == normalize_time("3 30 pm", ampm=True))
+ assert("03:30 PM" == normalize_time("3 30 P.M.", ampm=True))
+ assert("12:00 PM" == normalize_time("0", ampm=True))
+ assert("12:00 AM" == normalize_time("1200 AM", ampm=True))
+
+def test_date():
+ tm = localtime()
+ yr = tm[0]
+ mo = tm[1]
+ assert(date(yr,4,11) == parse_date("411"))
+ assert(date(yr,4,11) == parse_date("APR11"))
+ assert(date(yr,4,11) == parse_date("11APR"))
+ assert(date(yr,4,11) == parse_date("4 11"))
+ assert(date(yr,4,11) == parse_date("11 APR"))
+ assert(date(yr,4,11) == parse_date("APR 11"))
+ assert(date(yr,mo,11) == parse_date("11"))
+ assert(date(yr,4,1) == parse_date("APR"))
+ assert(date(yr,4,11) == parse_date("4/11"))
+ assert(date.today() == parse_date("today"))
+ assert(date.today() == parse_date("now"))
+ assert(None == parse_date(""))
+ assert('' == normalize_date(None))
+
+ assert('2001-02-03' == normalize_date("20010203"))
+ assert('1999-04-11' == normalize_date("1999 4 11"))
+ assert('1999-04-11' == normalize_date("1999 APR 11"))
+ assert('1999-04-11' == normalize_date("APR 11 1999"))
+ assert('1999-04-11' == normalize_date("11 APR 1999"))
+ assert('1999-04-11' == normalize_date("4 11 1999"))
+ assert('1999-04-01' == normalize_date("1999 APR"))
+ assert('1999-04-01' == normalize_date("1999 4"))
+ assert('1999-04-01' == normalize_date("4 1999"))
+ assert('1999-04-01' == normalize_date("APR 1999"))
+ assert('1999-01-01' == normalize_date("1999"))
+
+ assert('1999-04-01' == normalize_date("1APR1999"))
+ assert('2001-04-01' == normalize_date("1APR2001"))
+
+ assert('1999-04-18' == normalize_date("1999-04-11+7"))
+ assert('1999-04-18' == normalize_date("1999-04-11 7"))
+ assert('1999-04-01' == normalize_date("1 apr 1999"))
+ assert('1999-04-11' == normalize_date("11 apr 1999"))
+ assert('1999-04-11' == normalize_date("11 Apr 1999"))
+ assert('1999-04-11' == normalize_date("11-apr-1999"))
+ assert('1999-04-11' == normalize_date("11 April 1999"))
+ assert('1999-04-11' == normalize_date("11 APRIL 1999"))
+ assert('1999-04-11' == normalize_date("11 april 1999"))
+ assert('1999-04-11' == normalize_date("11 aprick 1999"))
+ assert('1999-04-11' == normalize_date("APR 11, 1999"))
+ assert('1999-04-11' == normalize_date("4/11/1999"))
+ assert('1999-04-11' == normalize_date("4-11-1999"))
+ assert('1999-04-11' == normalize_date("1999-4-11"))
+ assert('1999-04-11' == normalize_date("19990411"))
+
+ assert('1999-01-01' == normalize_date("1 Jan 1999"))
+ assert('1999-02-01' == normalize_date("1 Feb 1999"))
+ assert('1999-03-01' == normalize_date("1 Mar 1999"))
+ assert('1999-04-01' == normalize_date("1 Apr 1999"))
+ assert('1999-05-01' == normalize_date("1 May 1999"))
+ assert('1999-06-01' == normalize_date("1 Jun 1999"))
+ assert('1999-07-01' == normalize_date("1 Jul 1999"))
+ assert('1999-08-01' == normalize_date("1 Aug 1999"))
+ assert('1999-09-01' == normalize_date("1 Sep 1999"))
+ assert('1999-10-01' == normalize_date("1 Oct 1999"))
+ assert('1999-11-01' == normalize_date("1 Nov 1999"))
+ assert('1999-12-01' == normalize_date("1 Dec 1999"))
+
+ assert('1999-04-30' == normalize_date("1999-4-30"))
+ assert('2000-02-29' == normalize_date("29 FEB 2000"))
+ assert('2001-02-28' == normalize_date("28 FEB 2001"))
+ assert('2004-02-29' == normalize_date("29 FEB 2004"))
+ assert('2100-02-28' == normalize_date("28 FEB 2100"))
+ assert('1900-02-28' == normalize_date("28 FEB 1900"))
+
+ def assertError(val):
+ try:
+ normalize_date(val)
+ except (TypeError,ValueError):
+ return
+ raise ValueError("type error expected", val)
+
+ assertError("2000-13-11")
+ assertError("APR 99")
+ assertError("29 FEB 1900")
+ assertError("29 FEB 2100")
+ assertError("29 FEB 2001")
+ assertError("1999-4-31")
+ assertError("APR 99")
+ assertError("20301")
+ assertError("020301")
+ assertError("1APR99")
+ assertError("1APR01")
+ assertError("1 APR 99")
+ assertError("1 APR 01")
+ assertError("11/5/01")
+
diff --git a/tests/test_util/test_mimeparse.py b/tests/test_util/test_mimeparse.py
new file mode 100644
index 0000000..9b9b675
--- /dev/null
+++ b/tests/test_util/test_mimeparse.py
@@ -0,0 +1,235 @@
+# (c) 2010 Ch. Zwerschke and contributors
+# This module is part of the Python Paste Project and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from paste.util.mimeparse import *
+
+def test_parse_mime_type():
+ parse = parse_mime_type
+ assert parse('*/*') == ('*', '*', {})
+ assert parse('text/html') == ('text', 'html', {})
+ assert parse('audio/*; q=0.2') == ('audio', '*', {'q': '0.2'})
+ assert parse('text/x-dvi;level=1') == ('text', 'x-dvi', {'level': '1'})
+ assert parse('image/gif; level=2; q=0.4') == (
+ 'image', 'gif', {'level': '2', 'q': '0.4'})
+ assert parse('application/xhtml;level=3;q=0.5') == (
+ 'application', 'xhtml', {'level': '3', 'q': '0.5'})
+ assert parse('application/xml') == ('application', 'xml', {})
+ assert parse('application/xml;q=1') == ('application', 'xml', {'q': '1'})
+ assert parse('application/xml ; q=1;b=other') == (
+ 'application', 'xml', {'q': '1', 'b': 'other'})
+ assert parse('application/xml ; q=2;b=other') == (
+ 'application', 'xml', {'q': '2', 'b': 'other'})
+ assert parse('application/xhtml;q=0.5') == (
+ 'application', 'xhtml', {'q': '0.5'})
+ assert parse('application/xhtml;q=0.5;ver=1.2') == (
+ 'application', 'xhtml', {'q': '0.5', 'ver': '1.2'})
+
+def test_parse_illformed_mime_type():
+ parse = parse_mime_type
+ assert parse('*') == ('*', '*', {})
+ assert parse('text') == ('text', '*', {})
+ assert parse('text/') == ('text', '*', {})
+ assert parse('/plain') == ('*', 'plain', {})
+ assert parse('/') == ('*', '*', {})
+ assert parse('text/plain;') == ('text', 'plain', {})
+ assert parse(';q=0.5') == ('*', '*', {'q': '0.5'})
+ assert parse('*; q=.2') == ('*', '*', {'q': '.2'})
+ assert parse('image; q=.7; level=3') == (
+ 'image', '*', {'q': '.7', 'level': '3'})
+ assert parse('*;q=1') == ('*', '*', {'q': '1'})
+ assert parse('*;q=') == ('*', '*', {})
+ assert parse('*;=0.5') == ('*', '*', {})
+ assert parse('*;q=foobar') == ('*', '*', {'q': 'foobar'})
+ assert parse('image/gif; level=2; q=2') == (
+ 'image', 'gif', {'level': '2', 'q': '2'})
+ assert parse('application/xml;q=') == ('application', 'xml', {})
+ assert parse('application/xml ;q=') == ('application', 'xml', {})
+ assert parse(' *; q =;') == ('*', '*', {})
+ assert parse(' *; q=.2') == ('*', '*', {'q': '.2'})
+
+def test_parse_media_range():
+ parse = parse_media_range
+ assert parse('application/*;q=0.5') == ('application', '*', {'q': '0.5'})
+ assert parse('text/plain') == ('text', 'plain', {'q': '1'})
+ assert parse('*') == ('*', '*', {'q': '1'})
+ assert parse(';q=0.5') == ('*', '*', {'q': '0.5'})
+ assert parse('*;q=0.5') == ('*', '*', {'q': '0.5'})
+ assert parse('*;q=1') == ('*', '*', {'q': '1'})
+ assert parse('*;q=') == ('*', '*', {'q': '1'})
+ assert parse('*;q=-1') == ('*', '*', {'q': '1'})
+ assert parse('*;q=foobar') == ('*', '*', {'q': '1'})
+ assert parse('*;q=0.0001') == ('*', '*', {'q': '0.0001'})
+ assert parse('*;q=1000.0') == ('*', '*', {'q': '1'})
+ assert parse('*;q=0') == ('*', '*', {'q': '0'})
+ assert parse('*;q=0.0000') == ('*', '*', {'q': '0.0000'})
+ assert parse('*;q=1.0001') == ('*', '*', {'q': '1'})
+ assert parse('*;q=2') == ('*', '*', {'q': '1'})
+ assert parse('*;q=1e3') == ('*', '*', {'q': '1'})
+ assert parse('image/gif; level=2') == (
+ 'image', 'gif', {'level': '2', 'q': '1'})
+ assert parse('image/gif; level=2; q=0.5') == (
+ 'image', 'gif', {'level': '2', 'q': '0.5'})
+ assert parse('image/gif; level=2; q=2') == (
+ 'image', 'gif', {'level': '2', 'q': '1'})
+ assert parse('application/xml') == ('application', 'xml', {'q': '1'})
+ assert parse('application/xml;q=1') == ('application', 'xml', {'q': '1'})
+ assert parse('application/xml;q=') == ('application', 'xml', {'q': '1'})
+ assert parse('application/xml ;q=') == ('application', 'xml', {'q': '1'})
+ assert parse('application/xml ; q=1;b=other') == (
+ 'application', 'xml', {'q': '1', 'b': 'other'})
+ assert parse('application/xml ; q=2;b=other') == (
+ 'application', 'xml', {'q': '1', 'b': 'other'})
+ assert parse(' *; q =;') == ('*', '*', {'q': '1'})
+ assert parse(' *; q=.2') == ('*', '*', {'q': '.2'})
+
+def test_fitness_and_quality_parsed():
+ faq = fitness_and_quality_parsed
+ assert faq('*/*;q=0.7', [
+ ('foo', 'bar', {'q': '0.5'})]) == (0, 0.5)
+ assert faq('foo/*;q=0.7', [
+ ('foo', 'bar', {'q': '0.5'})]) == (100, 0.5)
+ assert faq('*/bar;q=0.7', [
+ ('foo', 'bar', {'q': '0.5'})]) == (10, 0.5)
+ assert faq('foo/bar;q=0.7', [
+ ('foo', 'bar', {'q': '0.5'})]) == (110, 0.5)
+ assert faq('text/html;q=0.7', [
+ ('foo', 'bar', {'q': '0.5'})]) == (-1, 0)
+ assert faq('text/html;q=0.7', [
+ ('text', 'bar', {'q': '0.5'})]) == (-1, 0)
+ assert faq('text/html;q=0.7', [
+ ('foo', 'html', {'q': '0.5'})]) == (-1, 0)
+ assert faq('text/html;q=0.7', [
+ ('text', '*', {'q': '0.5'})]) == (100, 0.5)
+ assert faq('text/html;q=0.7', [
+ ('*', 'html', {'q': '0.5'})]) == (10, 0.5)
+ assert faq('text/html;q=0.7', [
+ ('*', '*', {'q': '0'}), ('text', 'html', {'q': '0.5'})]) == (110, 0.5)
+ assert faq('text/html;q=0.7', [
+ ('*', '*', {'q': '0.5'}), ('audio', '*', {'q': '0'})]) == (0, 0.5)
+ assert faq('audio/mp3;q=0.7', [
+ ('*', '*', {'q': '0'}), ('audio', '*', {'q': '0.5'})]) == (100, 0.5)
+ assert faq('*/mp3;q=0.7', [
+ ('foo', 'mp3', {'q': '0.5'}), ('audio', '*', {'q': '0'})]) == (10, 0.5)
+ assert faq('audio/mp3;q=0.7', [
+ ('audio', 'ogg', {'q': '0'}), ('*', 'mp3', {'q': '0.5'})]) == (10, 0.5)
+ assert faq('audio/mp3;q=0.7', [
+ ('*', 'ogg', {'q': '0'}), ('*', 'mp3', {'q': '0.5'})]) == (10, 0.5)
+ assert faq('text/html;q=0.7', [
+ ('text', 'plain', {'q': '0'}),
+ ('plain', 'html', {'q': '0'}),
+ ('text', 'html', {'q': '0.5'}),
+ ('html', 'text', {'q': '0'})]) == (110, 0.5)
+ assert faq('text/html;q=0.7;level=2', [
+ ('plain', 'html', {'q': '0', 'level': '2'}),
+ ('text', '*', {'q': '0.5', 'level': '3'}),
+ ('*', 'html', {'q': '0.5', 'level': '2'}),
+ ('image', 'gif', {'q': '0.5', 'level': '2'})]) == (100, 0.5)
+ assert faq('text/html;q=0.7;level=2', [
+ ('text', 'plain', {'q': '0'}), ('text', 'html', {'q': '0'}),
+ ('text', 'plain', {'q': '0', 'level': '2'}),
+ ('text', 'html', {'q': '0.5', 'level': '2'}),
+ ('*', '*', {'q': '0', 'level': '2'}),
+ ('text', 'html', {'q': '0', 'level': '3'})]) == (111, 0.5)
+ assert faq('text/html;q=0.7;level=2;opt=3', [
+ ('text', 'html', {'q': '0'}),
+ ('text', 'html', {'q': '0', 'level': '2'}),
+ ('text', 'html', {'q': '0', 'opt': '3'}),
+ ('*', '*', {'q': '0', 'level': '2', 'opt': '3'}),
+ ('text', 'html', {'q': '0', 'level': '3', 'opt': '3'}),
+ ('text', 'html', {'q': '0.5', 'level': '2', 'opt': '3'}),
+ ('*', '*', {'q': '0', 'level': '3', 'opt': '3'})]) == (112, 0.5)
+
+def test_quality_parsed():
+ qp = quality_parsed
+ assert qp('image/gif;q=0.7', [('image', 'jpg', {'q': '0.5'})]) == 0
+ assert qp('image/gif;q=0.7', [('image', '*', {'q': '0.5'})]) == 0.5
+ assert qp('audio/mp3;q=0.7;quality=100', [
+ ('*', '*', {'q': '0', 'quality': '100'}),
+ ('audio', '*', {'q': '0', 'quality': '100'}),
+ ('*', 'mp3', {'q': '0', 'quality': '100'}),
+ ('audio', 'mp3', {'q': '0', 'quality': '50'}),
+ ('audio', 'mp3', {'q': '0.5', 'quality': '100'}),
+ ('audio', 'mp3', {'q': '0.5'})]) == 0.5
+
+def test_quality():
+ assert quality('text/html',
+ 'text/*;q=0.3, text/html;q=0.75, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.75
+ assert quality('text/html;level=2',
+ 'text/*;q=0.3, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.4
+ assert quality('text/plain',
+ 'text/*;q=0.25, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.25
+ assert quality('plain/text',
+ 'text/*;q=0.3, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.5
+ assert quality('text/html;level=1',
+ 'text/*;q=0.3, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 1
+ assert quality('image/jpeg',
+ 'text/*;q=0.3, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.5
+ assert quality('text/html;level=2',
+ 'text/*;q=0.3, text/html;q=0.7, text/html;level=1,'
+ ' text/html;level=2;q=0.375, */*;q=0.5') == 0.375
+ assert quality('text/html;level=3',
+ 'text/*;q=0.3, text/html;q=0.75, text/html;level=1,'
+ ' text/html;level=2;q=0.4, */*;q=0.5') == 0.75
+
+def test_best_match():
+ bm = best_match
+ assert bm([], '*/*') == ''
+ assert bm(['application/xbel+xml', 'text/xml'],
+ 'text/*;q=0.5,*/*; q=0.1') == 'text/xml'
+ assert bm(['application/xbel+xml', 'audio/mp3'],
+ 'text/*;q=0.5,*/*; q=0.1') == 'application/xbel+xml'
+ assert bm(['application/xbel+xml', 'audio/mp3'],
+ 'text/*;q=0.5,*/mp3; q=0.1') == 'audio/mp3'
+ assert bm(['application/xbel+xml', 'text/plain', 'text/html'],
+ 'text/*;q=0.5,*/plain; q=0.1') == 'text/plain'
+ assert bm(['application/xbel+xml', 'text/html', 'text/xhtml'],
+ 'text/*;q=0.1,*/xhtml; q=0.5') == 'text/html'
+ assert bm(['application/xbel+xml', 'text/html', 'text/xhtml'],
+ '*/html;q=0.1,*/xhtml; q=0.5') == 'text/xhtml'
+ assert bm(['application/xbel+xml', 'application/xml'],
+ 'application/xbel+xml') == 'application/xbel+xml'
+ assert bm(['application/xbel+xml', 'application/xml'],
+ 'application/xbel+xml; q=1') == 'application/xbel+xml'
+ assert bm(['application/xbel+xml', 'application/xml'],
+ 'application/xml; q=1') == 'application/xml'
+ assert bm(['application/xbel+xml', 'application/xml'],
+ 'application/*; q=1') == 'application/xbel+xml'
+ assert bm(['application/xbel+xml', 'application/xml'],
+ '*/*, application/xml') == 'application/xml'
+ assert bm(['application/xbel+xml', 'text/xml'],
+ 'text/*;q=0.5,*/*; q=0.1') == 'text/xml'
+ assert bm(['application/xbel+xml', 'text/xml'],
+ 'text/html,application/atom+xml; q=0.9') == ''
+ assert bm(['application/json', 'text/html'],
+ 'application/json, text/javascript, */*') == 'application/json'
+ assert bm(['application/json', 'text/html'],
+ 'application/json, text/html;q=0.9') == 'application/json'
+ assert bm(['image/*', 'application/xml'], 'image/png') == 'image/*'
+ assert bm(['image/*', 'application/xml'], 'image/*') == 'image/*'
+
+def test_illformed_best_match():
+ bm = best_match
+ assert bm(['image/png', 'image/jpeg', 'image/gif', 'text/html'],
+ 'text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2') == 'image/jpeg'
+ assert bm(['image/png', 'image/jpg', 'image/tif', 'text/html'],
+ 'text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2') == 'text/html'
+ assert bm(['image/png', 'image/jpg', 'image/tif', 'audio/mp3'],
+ 'text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2') == 'image/png'
+
+def test_sorted_match():
+ dm = desired_matches
+ assert dm(['text/html', 'application/xml'],
+ 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,'
+ 'text/plain;q=0.8,image/png') == ['text/html', 'application/xml']
+ assert dm(['text/html', 'application/xml'],
+ 'application/xml,application/json') == ['application/xml']
+ assert dm(['text/xhtml', 'text/plain', 'application/xhtml'],
+ 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,'
+ 'text/plain;q=0.8,image/png') == ['text/plain']
diff --git a/tests/test_util/test_quoting.py b/tests/test_util/test_quoting.py
new file mode 100644
index 0000000..5f5e0a8
--- /dev/null
+++ b/tests/test_util/test_quoting.py
@@ -0,0 +1,28 @@
+from paste.util import quoting
+import six
+import unittest
+
+class TestQuoting(unittest.TestCase):
+ def test_html_unquote(self):
+ self.assertEqual(quoting.html_unquote(b'&lt;hey&nbsp;you&gt;'),
+ u'<hey\xa0you>')
+ self.assertEqual(quoting.html_unquote(b''),
+ u'')
+ self.assertEqual(quoting.html_unquote(b'&blahblah;'),
+ u'&blahblah;')
+ self.assertEqual(quoting.html_unquote(b'\xe1\x80\xa9'),
+ u'\u1029')
+
+ def test_html_quote(self):
+ self.assertEqual(quoting.html_quote(1),
+ '1')
+ self.assertEqual(quoting.html_quote(None),
+ '')
+ self.assertEqual(quoting.html_quote('<hey!>'),
+ '&lt;hey!&gt;')
+ if six.PY3:
+ self.assertEqual(quoting.html_quote(u'<\u1029>'),
+ u'&lt;\u1029&gt;')
+ else:
+ self.assertEqual(quoting.html_quote(u'<\u1029>'),
+ '&lt;\xe1\x80\xa9&gt;')
diff --git a/tests/test_wsgilib.py b/tests/test_wsgilib.py
new file mode 100644
index 0000000..72573cf
--- /dev/null
+++ b/tests/test_wsgilib.py
@@ -0,0 +1,52 @@
+from paste.wsgilib import add_close
+
+
+def app_iterable_func_bytes():
+ yield b'a'
+ yield b'b'
+ yield b'c'
+
+
+def app_iterable_func_unicode():
+ yield b'a'.decode('ascii')
+ yield b'b'.decode('ascii')
+ yield b'c'.decode('ascii')
+
+
+def close_func():
+ global close_func_called
+ close_func_called = True
+
+
+def test_add_close_bytes():
+ global close_func_called
+
+ close_func_called = False
+ lst = []
+ app_iterable = app_iterable_func_bytes()
+
+ obj = add_close(app_iterable, close_func)
+ for x in obj:
+ lst.append(x)
+ obj.close()
+
+ assert lst == [b'a', b'b', b'c']
+ assert close_func_called
+ assert obj._closed
+
+
+def test_add_close_unicode():
+ global close_func_called
+
+ close_func_called = False
+ lst = []
+ app_iterable = app_iterable_func_unicode()
+
+ obj = add_close(app_iterable, close_func)
+ for x in obj:
+ lst.append(x)
+ obj.close()
+
+ assert lst == ['a', 'b', 'c']
+ assert close_func_called
+ assert obj._closed
diff --git a/tests/test_wsgiwrappers.py b/tests/test_wsgiwrappers.py
new file mode 100644
index 0000000..75d03ed
--- /dev/null
+++ b/tests/test_wsgiwrappers.py
@@ -0,0 +1,146 @@
+# -*- coding: utf-8 -*-
+# (c) 2007 Philip Jenvey; written for Paste (http://pythonpaste.org)
+# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
+import cgi
+from paste.fixture import TestApp
+from paste.wsgiwrappers import WSGIRequest, WSGIResponse
+import six
+
+class AssertApp(object):
+ def __init__(self, assertfunc):
+ self.assertfunc = assertfunc
+
+ def __call__(self, environ, start_response):
+ start_response('200 OK', [('Content-type','text/plain')])
+ self.assertfunc(environ)
+ return [b'Passed']
+
+no_encoding = object()
+def valid_name(name, encoding=no_encoding, post=False):
+ def assert_valid_name(environ):
+ if encoding is not no_encoding:
+ WSGIRequest.defaults._push_object(dict(content_type='text/html',
+ charset=encoding))
+ try:
+ request = WSGIRequest(environ)
+ if post:
+ params = request.POST
+ else:
+ params = request.GET
+ assert params['name'] == name
+ assert request.params['name'] == name
+ finally:
+ if encoding is not no_encoding:
+ WSGIRequest.defaults._pop_object()
+ return assert_valid_name
+
+def test_wsgirequest_charset():
+ # Jose, 'José'
+ app = TestApp(AssertApp(assertfunc=valid_name(u'José', encoding='UTF-8')))
+ res = app.get('/?name=Jos%C3%A9')
+
+ # Tanaka, '田中'
+ app = TestApp(AssertApp(assertfunc=valid_name(u'田中', encoding='UTF-8')))
+ res = app.get('/?name=%E7%94%B0%E4%B8%AD')
+
+ # Nippon (Japan), '日本'
+ app = TestApp(AssertApp(assertfunc=valid_name(u'日本', encoding='UTF-8',
+ post=True)))
+ res = app.post('/', params=dict(name='日本'))
+
+ # WSGIRequest will determine the charset from the Content-Type header when
+ # unicode is expected.
+ # No encoding specified: not expecting unicode
+ app = TestApp(AssertApp(assertfunc=valid_name('日本', post=True)))
+ content_type = 'application/x-www-form-urlencoded; charset=%s'
+ res = app.post('/', params=dict(name='日本'),
+ headers={'content-type': content_type % 'UTF-8'})
+
+ # Encoding specified: expect unicode. Shiftjis is the default encoding, but
+ # params become UTF-8 because the browser specified so
+ app = TestApp(AssertApp(assertfunc=valid_name(u'日本', post=True,
+ encoding='shiftjis')))
+ res = app.post('/', params=dict(name='日本'),
+ headers={'content-type': content_type % 'UTF-8'})
+
+ # Browser did not specify: parse params as the fallback shiftjis
+ app = TestApp(AssertApp(assertfunc=valid_name(u'日本', post=True,
+ encoding='shiftjis')))
+ res = app.post('/', params=dict(name=u'日本'.encode('shiftjis')))
+
+def test_wsgirequest_charset_fileupload():
+ def handle_fileupload(environ, start_response):
+ start_response('200 OK', [('Content-type','text/plain')])
+ request = WSGIRequest(environ)
+
+ assert len(request.POST) == 1
+ assert isinstance(request.POST.keys()[0], str)
+ fs = request.POST['thefile']
+ assert isinstance(fs, cgi.FieldStorage)
+ assert isinstance(fs.filename, str)
+ assert fs.filename == '寿司.txt'
+ assert fs.value == b'Sushi'
+
+ request.charset = 'UTF-8'
+ assert len(request.POST) == 1
+ assert isinstance(request.POST.keys()[0], str)
+ fs = request.POST['thefile']
+ assert isinstance(fs, cgi.FieldStorage)
+ assert isinstance(fs.filename, six.text_type)
+ assert fs.filename == u'寿司.txt'
+ assert fs.value == b'Sushi'
+
+ request.charset = None
+ assert fs.value == b'Sushi'
+ return []
+
+ app = TestApp(handle_fileupload)
+ res = app.post('/', upload_files=[('thefile', '寿司.txt', b'Sushi')])
+
+def test_wsgiresponse_charset():
+ response = WSGIResponse(mimetype='text/html; charset=UTF-8')
+ assert response.content_type == 'text/html'
+ assert response.charset == 'UTF-8'
+ response.write(u'test')
+ response.write(u'test2')
+ response.write('test3')
+ status, headers, content = response.wsgi_response()
+ for data in content:
+ assert isinstance(data, six.binary_type)
+
+ WSGIResponse.defaults._push_object(dict(content_type='text/html',
+ charset='iso-8859-1'))
+ try:
+ response = WSGIResponse()
+ response.write(u'test')
+ response.write(u'test2')
+ response.write('test3')
+ status, headers, content = response.wsgi_response()
+ for data in content:
+ assert isinstance(data, six.binary_type)
+ finally:
+ WSGIResponse.defaults._pop_object()
+
+ # WSGIResponse will allow unicode to pass through when no charset is
+ # set
+ WSGIResponse.defaults._push_object(dict(content_type='text/html',
+ charset=None))
+ try:
+ response = WSGIResponse(u'test')
+ response.write(u'test1')
+ status, headers, content = response.wsgi_response()
+ for data in content:
+ assert isinstance(data, six.text_type)
+ finally:
+ WSGIResponse.defaults._pop_object()
+
+ WSGIResponse.defaults._push_object(dict(content_type='text/html',
+ charset=''))
+ try:
+ response = WSGIResponse(u'test')
+ response.write(u'test1')
+ status, headers, content = response.wsgi_response()
+ for data in content:
+ assert isinstance(data, six.text_type)
+ finally:
+ WSGIResponse.defaults._pop_object()
diff --git a/tests/urlparser_data/__init__.py b/tests/urlparser_data/__init__.py
new file mode 100644
index 0000000..792d600
--- /dev/null
+++ b/tests/urlparser_data/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/tests/urlparser_data/deep/index.html b/tests/urlparser_data/deep/index.html
new file mode 100644
index 0000000..8913442
--- /dev/null
+++ b/tests/urlparser_data/deep/index.html
@@ -0,0 +1 @@
+index2
diff --git a/tests/urlparser_data/deep/sub/Main.txt b/tests/urlparser_data/deep/sub/Main.txt
new file mode 100644
index 0000000..ec42756
--- /dev/null
+++ b/tests/urlparser_data/deep/sub/Main.txt
@@ -0,0 +1 @@
+index3
diff --git a/tests/urlparser_data/find_file/dir with spaces/test 4.html b/tests/urlparser_data/find_file/dir with spaces/test 4.html
new file mode 100644
index 0000000..1121e31
--- /dev/null
+++ b/tests/urlparser_data/find_file/dir with spaces/test 4.html
@@ -0,0 +1 @@
+test 4
diff --git a/tests/urlparser_data/find_file/index.txt b/tests/urlparser_data/find_file/index.txt
new file mode 100644
index 0000000..6be29bc
--- /dev/null
+++ b/tests/urlparser_data/find_file/index.txt
@@ -0,0 +1 @@
+index1
diff --git a/tests/urlparser_data/find_file/test 3.html b/tests/urlparser_data/find_file/test 3.html
new file mode 100644
index 0000000..954a536
--- /dev/null
+++ b/tests/urlparser_data/find_file/test 3.html
@@ -0,0 +1 @@
+test 3
diff --git a/tests/urlparser_data/find_file/test2.html b/tests/urlparser_data/find_file/test2.html
new file mode 100644
index 0000000..180cf83
--- /dev/null
+++ b/tests/urlparser_data/find_file/test2.html
@@ -0,0 +1 @@
+test2
diff --git a/tests/urlparser_data/hook/__init__.py b/tests/urlparser_data/hook/__init__.py
new file mode 100644
index 0000000..985a930
--- /dev/null
+++ b/tests/urlparser_data/hook/__init__.py
@@ -0,0 +1,10 @@
+from paste import request
+
+def urlparser_hook(environ):
+ first, rest = request.path_info_split(environ.get('PATH_INFO', ''))
+ if not first:
+ # No username
+ return
+ environ['app.user'] = first
+ environ['SCRIPT_NAME'] += '/' + first
+ environ['PATH_INFO'] = rest
diff --git a/tests/urlparser_data/hook/app.py b/tests/urlparser_data/hook/app.py
new file mode 100644
index 0000000..1a98013
--- /dev/null
+++ b/tests/urlparser_data/hook/app.py
@@ -0,0 +1,9 @@
+import six
+
+def application(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/html')])
+ body = 'user: %s' % environ['app.user']
+ if six.PY3:
+ body = body.encode('ascii')
+ return [body]
+
diff --git a/tests/urlparser_data/hook/index.py b/tests/urlparser_data/hook/index.py
new file mode 100644
index 0000000..92f3d66
--- /dev/null
+++ b/tests/urlparser_data/hook/index.py
@@ -0,0 +1,9 @@
+import six
+
+def application(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/html')])
+ body = 'index: %s' % environ['app.user']
+ if six.PY3:
+ body = body.encode('ascii')
+ return [body]
+
diff --git a/tests/urlparser_data/not_found/__init__.py b/tests/urlparser_data/not_found/__init__.py
new file mode 100644
index 0000000..792d600
--- /dev/null
+++ b/tests/urlparser_data/not_found/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/tests/urlparser_data/not_found/recur/__init__.py b/tests/urlparser_data/not_found/recur/__init__.py
new file mode 100644
index 0000000..48205a5
--- /dev/null
+++ b/tests/urlparser_data/not_found/recur/__init__.py
@@ -0,0 +1,9 @@
+def not_found_hook(environ, start_response):
+ urlparser = environ['paste.urlparser.not_found_parser']
+ path = environ.get('PATH_INFO', '')
+ if not path:
+ return urlparser.not_found(environ, start_response)
+ # Strip off leading _'s
+ path = '/' + path.lstrip('/').lstrip('_')
+ environ['PATH_INFO'] = path
+ return urlparser(environ, start_response)
diff --git a/tests/urlparser_data/not_found/recur/isfound.txt b/tests/urlparser_data/not_found/recur/isfound.txt
new file mode 100644
index 0000000..c8b8fab
--- /dev/null
+++ b/tests/urlparser_data/not_found/recur/isfound.txt
@@ -0,0 +1 @@
+is found
diff --git a/tests/urlparser_data/not_found/simple/__init__.py b/tests/urlparser_data/not_found/simple/__init__.py
new file mode 100644
index 0000000..7186daa
--- /dev/null
+++ b/tests/urlparser_data/not_found/simple/__init__.py
@@ -0,0 +1,3 @@
+def not_found_hook(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/plain')])
+ return [b'not found']
diff --git a/tests/urlparser_data/not_found/simple/found.txt b/tests/urlparser_data/not_found/simple/found.txt
new file mode 100644
index 0000000..c8b8fab
--- /dev/null
+++ b/tests/urlparser_data/not_found/simple/found.txt
@@ -0,0 +1 @@
+is found
diff --git a/tests/urlparser_data/not_found/user/__init__.py b/tests/urlparser_data/not_found/user/__init__.py
new file mode 100644
index 0000000..4126c04
--- /dev/null
+++ b/tests/urlparser_data/not_found/user/__init__.py
@@ -0,0 +1,12 @@
+from paste import request
+
+def not_found_hook(environ, start_response):
+ urlparser = environ['paste.urlparser.not_found_parser']
+ first, rest = request.path_info_split(environ.get('PATH_INFO', ''))
+ if not first:
+ # No username
+ return
+ environ['app.user'] = first
+ environ['SCRIPT_NAME'] += '/' + first
+ environ['PATH_INFO'] = rest
+ return urlparser(environ, start_response)
diff --git a/tests/urlparser_data/not_found/user/list.py b/tests/urlparser_data/not_found/user/list.py
new file mode 100644
index 0000000..fd7482f
--- /dev/null
+++ b/tests/urlparser_data/not_found/user/list.py
@@ -0,0 +1,8 @@
+import six
+
+def application(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/plain')])
+ body = 'user: %s' % environ.get('app.user')
+ if six.PY3:
+ body = body.encode('ascii')
+ return [body]
diff --git a/tests/urlparser_data/python/__init__.py b/tests/urlparser_data/python/__init__.py
new file mode 100644
index 0000000..792d600
--- /dev/null
+++ b/tests/urlparser_data/python/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/tests/urlparser_data/python/simpleapp.py b/tests/urlparser_data/python/simpleapp.py
new file mode 100644
index 0000000..7a36ce9
--- /dev/null
+++ b/tests/urlparser_data/python/simpleapp.py
@@ -0,0 +1,5 @@
+def application(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/html'),
+ ('test-header', 'TEST!')])
+ return [b'test1']
+
diff --git a/tests/urlparser_data/python/stream.py b/tests/urlparser_data/python/stream.py
new file mode 100644
index 0000000..e81fd1c
--- /dev/null
+++ b/tests/urlparser_data/python/stream.py
@@ -0,0 +1,7 @@
+def stream():
+ def app(environ, start_response):
+ writer = start_response('200 OK', [('Content-type', 'text/html')])
+ writer(b'te')
+ writer(b'st')
+ return [b'2']
+ return app
diff --git a/tests/urlparser_data/python/sub/__init__.py b/tests/urlparser_data/python/sub/__init__.py
new file mode 100644
index 0000000..792d600
--- /dev/null
+++ b/tests/urlparser_data/python/sub/__init__.py
@@ -0,0 +1 @@
+#
diff --git a/tests/urlparser_data/python/sub/simpleapp.py b/tests/urlparser_data/python/sub/simpleapp.py
new file mode 100644
index 0000000..88bd975
--- /dev/null
+++ b/tests/urlparser_data/python/sub/simpleapp.py
@@ -0,0 +1,4 @@
+def application(environ, start_response):
+ start_response('200 OK', [('Content-type', 'text/html'),
+ ('test-header', 'TEST!')])
+ return [b'subsimple']
diff --git a/tests/urlparser_data/secured.txt b/tests/urlparser_data/secured.txt
new file mode 100644
index 0000000..72b11b0
--- /dev/null
+++ b/tests/urlparser_data/secured.txt
@@ -0,0 +1 @@
+secured
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..d1c8978
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,9 @@
+[tox]
+envlist = py26, py27, py34, py35, pypy
+
+[testenv]
+deps =
+ coverage
+ nose
+commands =
+ nosetests {posargs:--with-coverage --cover-package=paste}