summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook@pioneer.com>2021-02-25 11:20:03 -0600
committerJordan Cook <jordan.cook@pioneer.com>2021-02-26 16:02:36 -0600
commitd7850551786e76e08c550b96897b40f5ad3f1778 (patch)
tree1d4fd7f9be29df4c63291067350f1bbce628050d
parent182c3ef080611403e5689f3685d80dd19217b229 (diff)
downloadrequests-cache-d7850551786e76e08c550b96897b40f5ad3f1778.tar.gz
Apply code formatting with black + isort
-rw-r--r--.gitignore28
-rw-r--r--docs/conf.py117
-rw-r--r--example.py5
-rw-r--r--pyproject.toml21
-rw-r--r--requests_cache/__init__.py11
-rw-r--r--requests_cache/backends/__init__.py14
-rw-r--r--requests_cache/backends/base.py62
-rw-r--r--requests_cache/backends/dynamodb.py23
-rw-r--r--requests_cache/backends/gridfs.py7
-rw-r--r--requests_cache/backends/mongo.py7
-rw-r--r--requests_cache/backends/redis.py7
-rw-r--r--requests_cache/backends/sqlite.py6
-rw-r--r--requests_cache/backends/storage/dbdict.py35
-rwxr-xr-xrequests_cache/backends/storage/dynamodbdict.py70
-rwxr-xr-xrequests_cache/backends/storage/gridfspickledict.py6
-rw-r--r--requests_cache/backends/storage/mongodict.py11
-rwxr-xr-xrequests_cache/backends/storage/redisdict.py10
-rw-r--r--requests_cache/compat.py57
-rw-r--r--requests_cache/core.py77
-rw-r--r--requirements-test.txt2
-rw-r--r--sandbox.py2
-rw-r--r--setup.py12
-rw-r--r--tests/__init__.py2
-rw-r--r--tests/test_cache.py64
-rw-r--r--tests/test_custom_dict.py4
-rw-r--r--tests/test_dbdict.py13
-rw-r--r--tests/test_dynamodbdict.py11
-rw-r--r--tests/test_gridfsdict.py8
-rw-r--r--tests/test_mongodict.py7
-rw-r--r--tests/test_monkey_patch.py6
-rw-r--r--tests/test_redisdict.py5
-rw-r--r--tests/test_thread_safety.py10
32 files changed, 405 insertions, 315 deletions
diff --git a/.gitignore b/.gitignore
index 4512f51..bf1cf69 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,23 @@
-*.pyc
-*.pyo
-*.idea
+*.py[cod]
*.sqlite
+*.egg
*.egg-info
-build
-_build
-dist
-MANIFEST
+build/
+dist/
+venv/
+
+# Editors
+.~*
+.idea/
+.vim/
+.vscode/
+
+# Test / coverage reports
+.coverage
+.tox
+.mypy_cache/
+test-reports/
+
+# Sphinx
+docs/_build/
+docs/modules/
diff --git a/docs/conf.py b/docs/conf.py
index 3954f8c..01b62df 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -11,13 +11,15 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys, os
+import os
+import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
+
class Mock(object):
def __init__(self, *args, **kwargs):
pass
@@ -34,6 +36,7 @@ class Mock(object):
else:
return Mock()
+
MOCK_MODULES = ['pymongo', 'redis']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
@@ -43,7 +46,7 @@ from requests_cache import __version__
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
+# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
@@ -56,7 +59,7 @@ templates_path = ['_templates']
source_suffix = '.rst'
# The encoding of source files.
-#source_encoding = 'utf-8-sig'
+# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
@@ -76,37 +79,37 @@ release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
-#language = None
+# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
-#show_authors = False
+# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
@@ -118,26 +121,26 @@ html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-#html_theme_options = {}
+# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
+# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
@@ -146,44 +149,44 @@ html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
+# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
+# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'requests-cachedoc'
@@ -192,55 +195,49 @@ htmlhelp_basename = 'requests-cachedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
+ # The paper size ('letterpaper' or 'a4paper').
+ #'papersize': 'letterpaper',
+ # The font size ('10pt', '11pt' or '12pt').
+ #'pointsize': '10pt',
+ # Additional stuff for the LaTeX preamble.
+ #'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'requests-cache.tex', u'requests-cache Documentation',
- u'Roman Haritonov', 'manual'),
+ ('index', 'requests-cache.tex', u'requests-cache Documentation', u'Roman Haritonov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
# If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'requests-cache', u'requests-cache Documentation',
- [u'Roman Haritonov'], 1)
-]
+man_pages = [('index', 'requests-cache', u'requests-cache Documentation', [u'Roman Haritonov'], 1)]
# If true, show URL addresses after external links.
-#man_show_urls = False
+# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
@@ -249,19 +246,25 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'requests-cache', u'requests-cache Documentation',
- u'Roman Haritonov', 'requests-cache', 'One line description of project.',
- 'Miscellaneous'),
+ (
+ 'index',
+ 'requests-cache',
+ u'requests-cache Documentation',
+ u'Roman Haritonov',
+ 'requests-cache',
+ 'One line description of project.',
+ 'Miscellaneous',
+ ),
]
# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
+# texinfo_appendices = []
# If false, no module index is generated.
-#texinfo_domain_indices = True
+# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
+# texinfo_show_urls = 'footnote'
autoclass_content = 'both'
-autodoc_member_order = 'bysource' \ No newline at end of file
+autodoc_member_order = 'bysource'
diff --git a/example.py b/example.py
index 0170d9b..4db9e62 100644
--- a/example.py
+++ b/example.py
@@ -3,10 +3,12 @@
import time
import requests
+
import requests_cache
requests_cache.install_cache('example_cache')
+
def main():
# Once cached, delayed page will be taken from cache
# redirects also handled
@@ -22,7 +24,8 @@ def main():
# Debugging info about cache
print(requests_cache.get_cache())
+
if __name__ == "__main__":
t = time.time()
main()
- print('Elapsed: %.3f seconds' % (time.time() - t)) \ No newline at end of file
+ print('Elapsed: %.3f seconds' % (time.time() - t))
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..c2aa941
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,21 @@
+[build-system]
+requires = ['setuptools', 'wheel']
+
+[tool.black]
+line-length = 120
+skip-string-normalization = true
+
+[tool.coverage.html]
+directory = 'test-reports'
+
+[tool.coverage.run]
+branch = true
+source = ['requests_cache']
+
+[tool.isort]
+profile = "black"
+line_length = 100
+skip_gitignore = true
+known_first_party = ['test']
+# Things that are common enough they may as well be grouped with stdlib imports
+extra_standard_library = ['pytest', 'setuptools']
diff --git a/requests_cache/__init__.py b/requests_cache/__init__.py
index fff1e5e..c959373 100644
--- a/requests_cache/__init__.py
+++ b/requests_cache/__init__.py
@@ -26,6 +26,13 @@ __docformat__ = 'restructuredtext'
__version__ = '0.5.2'
from .core import (
- CachedSession, install_cache, uninstall_cache,
- disabled, enabled, get_cache, clear, configure, remove_expired_responses
+ CachedSession,
+ clear,
+ configure,
+ disabled,
+ enabled,
+ get_cache,
+ install_cache,
+ remove_expired_responses,
+ uninstall_cache,
)
diff --git a/requests_cache/backends/__init__.py b/requests_cache/backends/__init__.py
index 2bd88af..fbc1d1d 100644
--- a/requests_cache/backends/__init__.py
+++ b/requests_cache/backends/__init__.py
@@ -18,18 +18,20 @@ _backend_dependencies = {
'sqlite': 'sqlite3',
'mongo': 'pymongo',
'redis': 'redis',
- 'dynamodb': 'dynamodb'
+ 'dynamodb': 'dynamodb',
}
try:
# Heroku doesn't allow the SQLite3 module to be installed
from .sqlite import DbCache
+
registry['sqlite'] = DbCache
except ImportError:
DbCache = None
try:
from .mongo import MongoCache
+
registry['mongo'] = registry['mongodb'] = MongoCache
except ImportError:
MongoCache = None
@@ -37,22 +39,26 @@ except ImportError:
try:
from .gridfs import GridFSCache
+
registry['gridfs'] = GridFSCache
except ImportError:
GridFSCache = None
try:
from .redis import RedisCache
+
registry['redis'] = RedisCache
except ImportError:
RedisCache = None
try:
from .dynamodb import DynamoDbCache
+
registry['dynamodb'] = DynamoDbCache
except ImportError:
DynamoDbCache = None
+
def create_backend(backend_name, cache_name, options):
if isinstance(backend_name, BaseCache):
return backend_name
@@ -63,11 +69,9 @@ def create_backend(backend_name, cache_name, options):
return registry[backend_name](cache_name, **options)
except KeyError:
if backend_name in _backend_dependencies:
- raise ImportError('You must install the python package: %s' %
- _backend_dependencies[backend_name])
+ raise ImportError('You must install the python package: %s' % _backend_dependencies[backend_name])
else:
- raise ValueError('Unsupported backend "%s" try one of: %s' %
- (backend_name, ', '.join(registry.keys())))
+ raise ValueError('Unsupported backend "%s" try one of: %s' % (backend_name, ', '.join(registry.keys())))
def _get_default_backend_name():
diff --git a/requests_cache/backends/base.py b/requests_cache/backends/base.py
index 125dbde..9b942c9 100644
--- a/requests_cache/backends/base.py
+++ b/requests_cache/backends/base.py
@@ -7,25 +7,25 @@
Contains BaseCache class which can be used as in-memory cache backend or
extended to support persistence.
"""
-from datetime import datetime
import hashlib
from copy import copy
+from datetime import datetime
from io import BytesIO
import requests
-from ..compat import is_py2, urlencode, urlparse, urlunparse, parse_qsl, bytes, str
-
+from ..compat import bytes, is_py2, parse_qsl, str, urlencode, urlparse, urlunparse
_DEFAULT_HEADERS = requests.utils.default_headers()
class BaseCache(object):
- """ Base class for cache implementations, can be used as in-memory cache.
+ """Base class for cache implementations, can be used as in-memory cache.
To extend it you can provide dictionary-like objects for
:attr:`keys_map` and :attr:`responses` or override public methods.
"""
+
def __init__(self, *args, **kwargs):
#: `key` -> `key_in_responses` mapping
self.keys_map = {}
@@ -35,7 +35,7 @@ class BaseCache(object):
self._ignored_parameters = set(kwargs.get("ignored_parameters") or [])
def save_response(self, key, response):
- """ Save response to cache
+ """Save response to cache
:param key: key for this response
:param response: response to save
@@ -57,7 +57,7 @@ class BaseCache(object):
self.keys_map[new_key] = key_to_response
def get_response_and_time(self, key, default=(None, None)):
- """ Retrieves response and timestamp for `key` if it's stored in cache,
+ """Retrieves response and timestamp for `key` if it's stored in cache,
otherwise returns `default`
:param key: key of resource
@@ -75,8 +75,7 @@ class BaseCache(object):
return self.restore_response(response), timestamp
def delete(self, key):
- """ Delete `key` from cache. Also deletes all responses from response history
- """
+ """Delete `key` from cache. Also deletes all responses from response history"""
try:
if key in self.responses:
response, _ = self.responses[key]
@@ -90,20 +89,18 @@ class BaseCache(object):
pass
def delete_url(self, url):
- """ Delete response associated with `url` from cache.
+ """Delete response associated with `url` from cache.
Also deletes all responses from response history. Works only for GET requests
"""
self.delete(self._url_to_key(url))
def clear(self):
- """ Clear cache
- """
+ """Clear cache"""
self.responses.clear()
self.keys_map.clear()
def remove_old_entries(self, created_before):
- """ Deletes entries from cache with creation time older than ``created_before``
- """
+ """Deletes entries from cache with creation time older than ``created_before``"""
keys_to_delete = set()
for key, (response, created_at) in self.responses.items():
if created_at < created_before:
@@ -113,12 +110,11 @@ class BaseCache(object):
self.delete(key)
def has_key(self, key):
- """ Returns `True` if cache has `key`, `False` otherwise
- """
+ """Returns `True` if cache has `key`, `False` otherwise"""
return key in self.responses or key in self.keys_map
def has_url(self, url):
- """ Returns `True` if cache has `url`, `False` otherwise.
+ """Returns `True` if cache has `url`, `False` otherwise.
Works only for GET request urls
"""
return self.has_key(self._url_to_key(url))
@@ -127,15 +123,30 @@ class BaseCache(object):
session = requests.Session()
return self.create_key(session.prepare_request(requests.Request('GET', url)))
- _response_attrs = ['_content', 'url', 'status_code', 'cookies',
- 'headers', 'encoding', 'request', 'reason', 'raw']
-
- _raw_response_attrs = ['_original_response', 'decode_content', 'headers',
- 'reason', 'status', 'strict', 'version']
+ _response_attrs = [
+ '_content',
+ 'url',
+ 'status_code',
+ 'cookies',
+ 'headers',
+ 'encoding',
+ 'request',
+ 'reason',
+ 'raw',
+ ]
+
+ _raw_response_attrs = [
+ '_original_response',
+ 'decode_content',
+ 'headers',
+ 'reason',
+ 'status',
+ 'strict',
+ 'version',
+ ]
def reduce_response(self, response, seen=None):
- """ Reduce response object to make it compatible with ``pickle``
- """
+ """Reduce response object to make it compatible with ``pickle``"""
if seen is None:
seen = {}
try:
@@ -169,8 +180,7 @@ class BaseCache(object):
return value
def restore_response(self, response, seen=None):
- """ Restore response object after unpickling
- """
+ """Restore response object after unpickling"""
if seen is None:
seen = {}
try:
@@ -186,7 +196,6 @@ class BaseCache(object):
return result
def _remove_ignored_parameters(self, request):
-
def filter_ignored_parameters(data):
return [(k, v) for k, v in data if k not in self._ignored_parameters]
@@ -204,6 +213,7 @@ class BaseCache(object):
body = urlencode(body)
elif content_type == 'application/json':
import json
+
if not is_py2 and isinstance(body, bytes):
body = str(body, "utf8") # TODO how to get body encoding?
body = json.loads(body)
diff --git a/requests_cache/backends/dynamodb.py b/requests_cache/backends/dynamodb.py
index 536a2bf..5a70e48 100644
--- a/requests_cache/backends/dynamodb.py
+++ b/requests_cache/backends/dynamodb.py
@@ -11,20 +11,21 @@ from .storage.dynamodbdict import DynamoDbDict
class DynamoDbCache(BaseCache):
- """ ``dynamodb`` cache backend.
- """
+ """``dynamodb`` cache backend."""
+
def __init__(self, table_name='requests-cache', **options):
"""
:param namespace: dynamodb table name (default: ``'requests-cache'``)
:param connection: (optional) ``boto3.resource('dynamodb')``
"""
super(DynamoDbCache, self).__init__(**options)
- self.responses = DynamoDbDict(table_name, 'responses',
- options.get('connection'),
- options.get('endpont_url'),
- options.get('region_name'),
- options.get('read_capacity_units'),
- options.get('write_capacity_units'))
- self.keys_map = DynamoDbDict(table_name,
- 'urls',
- self.responses.connection)
+ self.responses = DynamoDbDict(
+ table_name,
+ 'responses',
+ options.get('connection'),
+ options.get('endpont_url'),
+ options.get('region_name'),
+ options.get('read_capacity_units'),
+ options.get('write_capacity_units'),
+ )
+ self.keys_map = DynamoDbDict(table_name, 'urls', self.responses.connection)
diff --git a/requests_cache/backends/gridfs.py b/requests_cache/backends/gridfs.py
index ad71dc7..d8dd12a 100644
--- a/requests_cache/backends/gridfs.py
+++ b/requests_cache/backends/gridfs.py
@@ -16,13 +16,13 @@
requests_cache.install_cache(backend='gridfs', connection=MongoClient('another-host.local'))
"""
from .base import BaseCache
-from .storage.mongodict import MongoDict
from .storage.gridfspickledict import GridFSPickleDict
+from .storage.mongodict import MongoDict
class GridFSCache(BaseCache):
- """ ``gridfs`` cache backend.
- """
+ """``gridfs`` cache backend."""
+
def __init__(self, db_name, **options):
"""
:param db_name: database name
@@ -31,4 +31,3 @@ class GridFSCache(BaseCache):
super(GridFSCache, self).__init__(**options)
self.responses = GridFSPickleDict(db_name, options.get('connection'))
self.keys_map = MongoDict(db_name, 'http_redirects', self.responses.connection)
-
diff --git a/requests_cache/backends/mongo.py b/requests_cache/backends/mongo.py
index 76b89c7..6dc456c 100644
--- a/requests_cache/backends/mongo.py
+++ b/requests_cache/backends/mongo.py
@@ -11,14 +11,13 @@ from .storage.mongodict import MongoDict, MongoPickleDict
class MongoCache(BaseCache):
- """ ``mongo`` cache backend.
- """
+ """``mongo`` cache backend."""
+
def __init__(self, db_name='requests-cache', **options):
"""
:param db_name: database name (default: ``'requests-cache'``)
:param connection: (optional) ``pymongo.Connection``
"""
super(MongoCache, self).__init__(**options)
- self.responses = MongoPickleDict(db_name, 'responses',
- options.get('connection'))
+ self.responses = MongoPickleDict(db_name, 'responses', options.get('connection'))
self.keys_map = MongoDict(db_name, 'urls', self.responses.connection)
diff --git a/requests_cache/backends/redis.py b/requests_cache/backends/redis.py
index 5d0200e..07fc014 100644
--- a/requests_cache/backends/redis.py
+++ b/requests_cache/backends/redis.py
@@ -11,14 +11,13 @@ from .storage.redisdict import RedisDict
class RedisCache(BaseCache):
- """ ``redis`` cache backend.
- """
+ """``redis`` cache backend."""
+
def __init__(self, namespace='requests-cache', **options):
"""
:param namespace: redis namespace (default: ``'requests-cache'``)
:param connection: (optional) ``redis.StrictRedis``
"""
super(RedisCache, self).__init__(**options)
- self.responses = RedisDict(namespace, 'responses',
- options.get('connection'))
+ self.responses = RedisDict(namespace, 'responses', options.get('connection'))
self.keys_map = RedisDict(namespace, 'urls', self.responses.connection)
diff --git a/requests_cache/backends/sqlite.py b/requests_cache/backends/sqlite.py
index ed77e92..184ad89 100644
--- a/requests_cache/backends/sqlite.py
+++ b/requests_cache/backends/sqlite.py
@@ -11,13 +11,13 @@ from .storage.dbdict import DbDict, DbPickleDict
class DbCache(BaseCache):
- """ sqlite cache backend.
+ """sqlite cache backend.
Reading is fast, saving is a bit slower. It can store big amount of data
with low memory usage.
"""
- def __init__(self, location='cache',
- fast_save=False, extension='.sqlite', **options):
+
+ def __init__(self, location='cache', fast_save=False, extension='.sqlite', **options):
"""
:param location: database filename prefix (default: ``'cache'``)
:param fast_save: Speedup cache saving up to 50 times but with possibility of data loss.
diff --git a/requests_cache/backends/storage/dbdict.py b/requests_cache/backends/storage/dbdict.py
index 617c5cf..d15e9bc 100644
--- a/requests_cache/backends/storage/dbdict.py
+++ b/requests_cache/backends/storage/dbdict.py
@@ -13,6 +13,7 @@ except ImportError:
import sqlite3 as sqlite
from contextlib import contextmanager
+
try:
import threading
except ImportError:
@@ -26,7 +27,7 @@ from ...compat import bytes
class DbDict(MutableMapping):
- """ DbDict - a dictionary-like object for saving large datasets to `sqlite` database
+ """DbDict - a dictionary-like object for saving large datasets to `sqlite` database
It's possible to create multiply DbDict instances, which will be stored as separate
tables in one database::
@@ -51,18 +52,16 @@ class DbDict(MutableMapping):
self.filename = filename
self.table_name = table_name
self.fast_save = fast_save
-
+
#: Transactions can be committed if this property is set to `True`
self.can_commit = True
-
self._bulk_commit = False
self._pending_connection = None
self._lock = threading.RLock()
with self.connection() as con:
con.execute("create table if not exists `%s` (key PRIMARY KEY, value)" % self.table_name)
-
@contextmanager
def connection(self, commit_on_success=False):
with self._lock:
@@ -118,40 +117,37 @@ class DbDict(MutableMapping):
def __getitem__(self, key):
with self.connection() as con:
- row = con.execute("select value from `%s` where key=?" %
- self.table_name, (key,)).fetchone()
+ row = con.execute("select value from `%s` where key=?" % self.table_name, (key,)).fetchone()
if not row:
raise KeyError
return row[0]
def __setitem__(self, key, item):
with self.connection(True) as con:
- con.execute("insert or replace into `%s` (key,value) values (?,?)" %
- self.table_name, (key, item))
+ con.execute(
+ "insert or replace into `%s` (key,value) values (?,?)" % self.table_name,
+ (key, item),
+ )
def __delitem__(self, key):
with self.connection(True) as con:
- cur = con.execute("delete from `%s` where key=?" %
- self.table_name, (key,))
+ cur = con.execute("delete from `%s` where key=?" % self.table_name, (key,))
if not cur.rowcount:
raise KeyError
def __iter__(self):
with self.connection() as con:
- for row in con.execute("select key from `%s`" %
- self.table_name):
+ for row in con.execute("select key from `%s`" % self.table_name):
yield row[0]
def __len__(self):
with self.connection() as con:
- return con.execute("select count(key) from `%s`" %
- self.table_name).fetchone()[0]
+ return con.execute("select count(key) from `%s`" % self.table_name).fetchone()[0]
def clear(self):
with self.connection(True) as con:
con.execute("drop table `%s`" % self.table_name)
- con.execute("create table `%s` (key PRIMARY KEY, value)" %
- self.table_name)
+ con.execute("create table `%s` (key PRIMARY KEY, value)" % self.table_name)
con.execute("vacuum")
def __str__(self):
@@ -159,11 +155,10 @@ class DbDict(MutableMapping):
class DbPickleDict(DbDict):
- """ Same as :class:`DbDict`, but pickles values before saving
- """
+ """Same as :class:`DbDict`, but pickles values before saving"""
+
def __setitem__(self, key, item):
- super(DbPickleDict, self).__setitem__(key,
- sqlite.Binary(pickle.dumps(item)))
+ super(DbPickleDict, self).__setitem__(key, sqlite.Binary(pickle.dumps(item)))
def __getitem__(self, key):
return pickle.loads(bytes(super(DbPickleDict, self).__getitem__(key)))
diff --git a/requests_cache/backends/storage/dynamodbdict.py b/requests_cache/backends/storage/dynamodbdict.py
index 54d9d29..50ca94c 100755
--- a/requests_cache/backends/storage/dynamodbdict.py
+++ b/requests_cache/backends/storage/dynamodbdict.py
@@ -14,21 +14,25 @@ try:
import cPickle as pickle
except ImportError:
import pickle
+
import boto3
-from boto3.dynamodb.conditions import Key, Attr
+from boto3.dynamodb.conditions import Attr, Key
from botocore.exceptions import ClientError
class DynamoDbDict(MutableMapping):
- """ DynamoDbDict - a dictionary-like interface for ``dynamodb`` key-stores
- """
-
- def __init__(self, table_name, namespace='dynamodb_dict_data',
- connection=None,
- endpoint_url=None,
- region_name='us-east-1',
- read_capacity_units=1,
- write_capacity_units=1):
+ """DynamoDbDict - a dictionary-like interface for ``dynamodb`` key-stores"""
+
+ def __init__(
+ self,
+ table_name,
+ namespace='dynamodb_dict_data',
+ connection=None,
+ endpoint_url=None,
+ region_name='us-east-1',
+ read_capacity_units=1,
+ write_capacity_units=1,
+ ):
"""
The actual key name on the dynamodb server will be
@@ -50,9 +54,7 @@ class DynamoDbDict(MutableMapping):
if connection is not None:
self.connection = connection
else:
- self.connection = boto3.resource('dynamodb',
- endpoint_url=endpoint_url,
- region_name=region_name)
+ self.connection = boto3.resource('dynamodb', endpoint_url=endpoint_url, region_name=region_name)
try:
self.connection.create_table(
AttributeDefinitions=[
@@ -63,23 +65,17 @@ class DynamoDbDict(MutableMapping):
{
'AttributeName': 'key',
'AttributeType': 'S',
- }
+ },
],
TableName=table_name,
KeySchema=[
- {
- 'AttributeName': 'namespace',
- 'KeyType': 'HASH'
- },
- {
- 'AttributeName': 'key',
- 'KeyType': 'RANGE'
- }
+ {'AttributeName': 'namespace', 'KeyType': 'HASH'},
+ {'AttributeName': 'key', 'KeyType': 'RANGE'},
],
ProvisionedThroughput={
'ReadCapacityUnits': read_capacity_units,
- 'WriteCapacityUnits': write_capacity_units
- }
+ 'WriteCapacityUnits': write_capacity_units,
+ },
)
except ClientError:
pass
@@ -94,15 +90,12 @@ class DynamoDbDict(MutableMapping):
return pickle.loads(result['Item']['value'].value)
def __setitem__(self, key, item):
- item = {'namespace': self._self_key,
- 'key': str(key),
- 'value': pickle.dumps(item)}
+ item = {'namespace': self._self_key, 'key': str(key), 'value': pickle.dumps(item)}
self._table.put_item(Item=item)
def __delitem__(self, key):
composite_key = {'namespace': self._self_key, 'key': str(key)}
- response = self._table.delete_item(Key=composite_key,
- ReturnValues='ALL_OLD')
+ response = self._table.delete_item(Key=composite_key, ReturnValues='ALL_OLD')
if not 'Attributes' in response:
raise KeyError
@@ -127,14 +120,19 @@ class DynamoDbDict(MutableMapping):
expression_attribute_values = {':Namespace': self._self_key}
expression_attribute_names = {'#N': 'namespace'}
key_condition_expression = '#N = :Namespace'
- return self._table.query(ExpressionAttributeValues=expression_attribute_values,
- ExpressionAttributeNames=expression_attribute_names,
- KeyConditionExpression=key_condition_expression)
+ return self._table.query(
+ ExpressionAttributeValues=expression_attribute_values,
+ ExpressionAttributeNames=expression_attribute_names,
+ KeyConditionExpression=key_condition_expression,
+ )
+
def __count_table(self):
expression_attribute_values = {':Namespace': self._self_key}
expression_attribute_names = {'#N': 'namespace'}
key_condition_expression = '#N = :Namespace'
- return self._table.query(Select='COUNT',
- ExpressionAttributeValues=expression_attribute_values,
- ExpressionAttributeNames=expression_attribute_names,
- KeyConditionExpression=key_condition_expression)['Count']
+ return self._table.query(
+ Select='COUNT',
+ ExpressionAttributeValues=expression_attribute_values,
+ ExpressionAttributeNames=expression_attribute_names,
+ KeyConditionExpression=key_condition_expression,
+ )['Count']
diff --git a/requests_cache/backends/storage/gridfspickledict.py b/requests_cache/backends/storage/gridfspickledict.py
index 42ca3d2..7ffc899 100755
--- a/requests_cache/backends/storage/gridfspickledict.py
+++ b/requests_cache/backends/storage/gridfspickledict.py
@@ -24,9 +24,10 @@ except ImportError:
from gridfs import GridFS
+
class GridFSPickleDict(MutableMapping):
- """ MongoDict - a dictionary-like interface for ``mongo`` database
- """
+ """MongoDict - a dictionary-like interface for ``mongo`` database"""
+
def __init__(self, db_name, connection=None):
"""
:param db_name: database name (be careful with production databases)
@@ -70,4 +71,3 @@ class GridFSPickleDict(MutableMapping):
def __str__(self):
return str(dict(self.items()))
-
diff --git a/requests_cache/backends/storage/mongodict.py b/requests_cache/backends/storage/mongodict.py
index 4c16cb4..c6d83ef 100644
--- a/requests_cache/backends/storage/mongodict.py
+++ b/requests_cache/backends/storage/mongodict.py
@@ -25,10 +25,9 @@ except ImportError:
class MongoDict(MutableMapping):
- """ MongoDict - a dictionary-like interface for ``mongo`` database
- """
- def __init__(self, db_name,
- collection_name='mongo_dict_data', connection=None):
+ """MongoDict - a dictionary-like interface for ``mongo`` database"""
+
+ def __init__(self, db_name, collection_name='mongo_dict_data', connection=None):
"""
:param db_name: database name (be careful with production databases)
:param collection_name: collection name (default: mongo_dict_data)
@@ -78,8 +77,8 @@ class MongoDict(MutableMapping):
class MongoPickleDict(MongoDict):
- """ Same as :class:`MongoDict`, but pickles values before saving
- """
+ """Same as :class:`MongoDict`, but pickles values before saving"""
+
def __setitem__(self, key, item):
super(MongoPickleDict, self).__setitem__(key, pickle.dumps(item))
diff --git a/requests_cache/backends/storage/redisdict.py b/requests_cache/backends/storage/redisdict.py
index ba6f4ed..df99bc8 100755
--- a/requests_cache/backends/storage/redisdict.py
+++ b/requests_cache/backends/storage/redisdict.py
@@ -18,10 +18,9 @@ from redis import StrictRedis as Redis
class RedisDict(MutableMapping):
- """ RedisDict - a dictionary-like interface for ``redis`` key-stores
- """
- def __init__(self, namespace, collection_name='redis_dict_data',
- connection=None):
+ """RedisDict - a dictionary-like interface for ``redis`` key-stores"""
+
+ def __init__(self, namespace, collection_name='redis_dict_data', connection=None):
"""
The actual key name on the redis server will be
``namespace``:``collection_name``
@@ -50,8 +49,7 @@ class RedisDict(MutableMapping):
return pickle.loads(bytes(result))
def __setitem__(self, key, item):
- self.connection.hset(self._self_key, pickle.dumps(key),
- pickle.dumps(item))
+ self.connection.hset(self._self_key, pickle.dumps(key), pickle.dumps(item))
def __delitem__(self, key):
if not self.connection.hdel(self._self_key, pickle.dumps(key)):
diff --git a/requests_cache/compat.py b/requests_cache/compat.py
index 6d59f79..4f942ea 100644
--- a/requests_cache/compat.py
+++ b/requests_cache/compat.py
@@ -13,37 +13,37 @@ import sys
_ver = sys.version_info
#: Python 2.x?
-is_py2 = (_ver[0] == 2)
+is_py2 = _ver[0] == 2
#: Python 3.x?
-is_py3 = (_ver[0] == 3)
+is_py3 = _ver[0] == 3
#: Python 3.0.x
-is_py30 = (is_py3 and _ver[1] == 0)
+is_py30 = is_py3 and _ver[1] == 0
#: Python 3.1.x
-is_py31 = (is_py3 and _ver[1] == 1)
+is_py31 = is_py3 and _ver[1] == 1
#: Python 3.2.x
-is_py32 = (is_py3 and _ver[1] == 2)
+is_py32 = is_py3 and _ver[1] == 2
#: Python 3.3.x
-is_py33 = (is_py3 and _ver[1] == 3)
+is_py33 = is_py3 and _ver[1] == 3
#: Python 3.4.x
-is_py34 = (is_py3 and _ver[1] == 4)
+is_py34 = is_py3 and _ver[1] == 4
#: Python 2.7.x
-is_py27 = (is_py2 and _ver[1] == 7)
+is_py27 = is_py2 and _ver[1] == 7
#: Python 2.6.x
-is_py26 = (is_py2 and _ver[1] == 6)
+is_py26 = is_py2 and _ver[1] == 6
#: Python 2.5.x
-is_py25 = (is_py2 and _ver[1] == 5)
+is_py25 = is_py2 and _ver[1] == 5
#: Python 2.4.x
-is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
+is_py24 = is_py2 and _ver[1] == 4 # I'm assuming this is not by choice.
# ---------
@@ -54,9 +54,9 @@ is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# Syntax sugar.
_ver = sys.version.lower()
-is_pypy = ('pypy' in _ver)
-is_jython = ('jython' in _ver)
-is_ironpython = ('iron' in _ver)
+is_pypy = 'pypy' in _ver
+is_jython = 'jython' in _ver
+is_ironpython = 'iron' in _ver
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
@@ -65,10 +65,10 @@ is_cpython = not any((is_pypy, is_jython, is_ironpython))
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
-is_linux = ('linux' in str(sys.platform).lower())
-is_osx = ('darwin' in str(sys.platform).lower())
-is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
-is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
+is_linux = 'linux' in str(sys.platform).lower()
+is_osx = 'darwin' in str(sys.platform).lower()
+is_hpux = 'hpux' in str(sys.platform).lower() # Complete guess.
+is_solaris = 'solar==' in str(sys.platform).lower() # Complete guess.
# ---------
@@ -78,22 +78,33 @@ is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
if is_py2:
from urllib import quote, unquote, urlencode
- from urlparse import urlparse, urlunparse, urljoin, urlsplit, parse_qsl
- from urllib2 import parse_http_list
+
import cookielib
from StringIO import StringIO
+ from urllib2 import parse_http_list
+ from urlparse import parse_qsl, urljoin, urlparse, urlsplit, urlunparse
+
bytes = str
str = unicode
basestring = basestring
elif is_py3:
- from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, parse_qsl
- from urllib.request import parse_http_list
from http import cookiejar as cookielib
from http.cookies import SimpleCookie
from io import StringIO
+ from urllib.parse import (
+ parse_qsl,
+ quote,
+ unquote,
+ urlencode,
+ urljoin,
+ urlparse,
+ urlsplit,
+ urlunparse,
+ )
+ from urllib.request import parse_http_list
str = str
bytes = bytes
- basestring = (str,bytes)
+ basestring = (str, bytes)
diff --git a/requests_cache/core.py b/requests_cache/core.py
index d2d1462..db6fc0e 100644
--- a/requests_cache/core.py
+++ b/requests_cache/core.py
@@ -29,13 +29,19 @@ else:
class CachedSession(OriginalSession):
- """ Requests ``Sessions`` with caching support.
- """
-
- def __init__(self, cache_name='cache', backend=None, expire_after=None,
- allowable_codes=(200,), allowable_methods=('GET',),
- filter_fn=lambda r: True, old_data_on_error=False,
- **backend_options):
+ """Requests ``Sessions`` with caching support."""
+
+ def __init__(
+ self,
+ cache_name='cache',
+ backend=None,
+ expire_after=None,
+ allowable_codes=(200,),
+ allowable_methods=('GET',),
+ filter_fn=lambda r: True,
+ old_data_on_error=False,
+ **backend_options
+ ):
"""
:param cache_name: for ``sqlite`` backend: cache file will start with this prefix,
e.g ``cache.sqlite``
@@ -85,8 +91,7 @@ class CachedSession(OriginalSession):
super(CachedSession, self).__init__()
def send(self, request, **kwargs):
- if (self._is_cache_disabled
- or request.method not in self._cache_allowable_methods):
+ if self._is_cache_disabled or request.method not in self._cache_allowable_methods:
response = super(CachedSession, self).send(request, **kwargs)
response.from_cache = False
response.cache_date = None
@@ -133,10 +138,7 @@ class CachedSession(OriginalSession):
def request(self, method, url, params=None, data=None, **kwargs):
response = super(CachedSession, self).request(
- method, url,
- _normalize_parameters(params),
- _normalize_parameters(data),
- **kwargs
+ method, url, _normalize_parameters(params), _normalize_parameters(data), **kwargs
)
if self._is_cache_disabled:
return response
@@ -145,15 +147,12 @@ class CachedSession(OriginalSession):
# If self._return_old_data_on_error is set,
# responses won't always have the from_cache attribute.
- if (hasattr(response, "from_cache") and not response.from_cache
- and self._filter_fn(response) is not True):
+ if hasattr(response, "from_cache") and not response.from_cache and self._filter_fn(response) is not True:
self.cache.delete(main_key)
return response
for r in response.history:
- self.cache.add_key_mapping(
- self.cache.create_key(r.request), main_key
- )
+ self.cache.add_key_mapping(self.cache.create_key(r.request), main_key)
return response
@contextmanager
@@ -173,26 +172,30 @@ class CachedSession(OriginalSession):
self._is_cache_disabled = False
def remove_expired_responses(self):
- """ Removes expired responses from storage
- """
+ """Removes expired responses from storage"""
if not self._cache_expire_after:
return
self.cache.remove_old_entries(datetime.utcnow() - self._cache_expire_after)
def __repr__(self):
- return (
- "<CachedSession(%s('%s', ...), expire_after=%s, "
- "allowable_methods=%s)>" % (
- self.cache.__class__.__name__, self._cache_name,
- self._cache_expire_after, self._cache_allowable_methods
- )
+ return "<CachedSession(%s('%s', ...), expire_after=%s, " "allowable_methods=%s)>" % (
+ self.cache.__class__.__name__,
+ self._cache_name,
+ self._cache_expire_after,
+ self._cache_allowable_methods,
)
-def install_cache(cache_name='cache', backend=None, expire_after=None,
- allowable_codes=(200,), allowable_methods=('GET',),
- filter_fn=lambda r: True, session_factory=CachedSession,
- **backend_options):
+def install_cache(
+ cache_name='cache',
+ backend=None,
+ expire_after=None,
+ allowable_codes=(200,),
+ allowable_methods=('GET',),
+ filter_fn=lambda r: True,
+ session_factory=CachedSession,
+ **backend_options
+):
"""
Installs cache for all ``Requests`` requests by monkey-patching ``Session``
@@ -223,8 +226,7 @@ configure = install_cache
def uninstall_cache():
- """ Restores ``requests.Session`` and disables cache
- """
+ """Restores ``requests.Session`` and disables cache"""
_patch_session_factory(OriginalSession)
@@ -273,20 +275,17 @@ def enabled(*args, **kwargs):
def get_cache():
- """ Returns internal cache object from globally installed ``CachedSession``
- """
+ """Returns internal cache object from globally installed ``CachedSession``"""
return requests.Session().cache
def clear():
- """ Clears globally installed cache
- """
+ """Clears globally installed cache"""
get_cache().clear()
def remove_expired_responses():
- """ Removes expired responses from storage
- """
+ """Removes expired responses from storage"""
return requests.Session().remove_expired_responses()
@@ -295,7 +294,7 @@ def _patch_session_factory(session_factory=CachedSession):
def _normalize_parameters(params):
- """ If builtin dict is passed as parameter, returns sorted list
+ """If builtin dict is passed as parameter, returns sorted list
of key-value pairs
"""
if type(params) is dict:
diff --git a/requirements-test.txt b/requirements-test.txt
index c240985..57a77cf 100644
--- a/requirements-test.txt
+++ b/requirements-test.txt
@@ -1,3 +1,5 @@
+black==20.8b1
+isort
pytest>=2.8,<3
pytest-cov>=2.0,<2.2
unittest2
diff --git a/sandbox.py b/sandbox.py
index b531967..a7c5168 100644
--- a/sandbox.py
+++ b/sandbox.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
+
from requests_cache import CachedSession
cs = CachedSession(allowable_methods=('GET', 'POST'))
@@ -9,4 +10,3 @@ for i in range(2):
r = cs.get("http://httpbin.org/get?p1=v1", params={'p2': 'v2', 'p3': 'cyrЯЯ'})
print r
print r.from_cache
-
diff --git a/setup.py b/setup.py
index 54271d4..5dd81e2 100644
--- a/setup.py
+++ b/setup.py
@@ -1,8 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-import sys
-import os
import glob
+import os
+import sys
+
try:
from setuptools import setup
except ImportError:
@@ -17,9 +18,7 @@ if sys.argv[-1] == 'test':
setup(
name='requests-cache',
- packages=['requests_cache',
- 'requests_cache.backends',
- 'requests_cache.backends.storage'],
+ packages=['requests_cache', 'requests_cache.backends', 'requests_cache.backends.storage'],
version='0.5.2',
description='Persistent cache for requests library',
author='Roman Haritonov',
@@ -38,6 +37,5 @@ setup(
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
- long_description=open('README.rst').read() + '\n\n' +
- open('HISTORY.rst').read(),
+ long_description=open('README.rst').read() + '\n\n' + open('HISTORY.rst').read(),
)
diff --git a/tests/__init__.py b/tests/__init__.py
index f243dbd..d782f66 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,3 +1,3 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-#date: 08.04.12
+# date: 08.04.12
diff --git a/tests/test_cache.py b/tests/test_cache.py
index 00b84de..0f7c5aa 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -9,9 +11,9 @@ try:
except ImportError:
import unittest
+import json
import pytest
import time
-import json
from collections import defaultdict
from datetime import datetime, timedelta
from unittest import mock
@@ -21,7 +23,7 @@ from requests import Request
import requests_cache
from requests_cache import CachedSession
-from requests_cache.compat import bytes, str, is_py3
+from requests_cache.compat import bytes, is_py3, str
CACHE_BACKEND = 'sqlite'
CACHE_NAME = 'requests_cache_test'
@@ -36,7 +38,6 @@ def httpbin(*suffix):
class CacheTestCase(unittest.TestCase):
-
def setUp(self):
self.s = CachedSession(CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE)
self.s.cache.clear()
@@ -93,6 +94,7 @@ class CacheTestCase(unittest.TestCase):
def hook_func(r, *args, **kwargs):
state[hook] += 1
return r
+
n = 5
for i in range(n):
r = self.s.get(httpbin('get'), hooks={hook: hook_func})
@@ -107,6 +109,7 @@ class CacheTestCase(unittest.TestCase):
self.assert_(r.from_cache, True)
state[hook] += 1
return r
+
n = 5
for i in range(n):
r = self.s.get(httpbin('get'), hooks={hook: hook_func})
@@ -138,8 +141,7 @@ class CacheTestCase(unittest.TestCase):
def test_enabled(self):
url = httpbin('get')
- options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND,
- fast_save=FAST_SAVE)
+ options = dict(cache_name=CACHE_NAME, backend=CACHE_BACKEND, fast_save=FAST_SAVE)
with requests_cache.enabled(**options):
r = requests.get(url)
self.assertFalse(getattr(r, 'from_cache', False))
@@ -152,8 +154,10 @@ class CacheTestCase(unittest.TestCase):
def test_content_and_cookies(self):
requests_cache.install_cache(CACHE_NAME, CACHE_BACKEND)
s = requests.session()
+
def js(url):
return json.loads(s.get(url).text)
+
r1 = js(httpbin('cookies/set/test1/test2'))
with requests_cache.disabled():
r2 = js(httpbin('cookies'))
@@ -171,11 +175,13 @@ class CacheTestCase(unittest.TestCase):
@pytest.mark.skip(reason='httpbin.org/relative-redirect no longer returns redirects')
def test_response_history(self):
r1 = self.s.get(httpbin('relative-redirect/3'))
+
def test_redirect_history(url):
r2 = self.s.get(url)
self.assertTrue(r2.from_cache)
for r11, r22 in zip(r1.history, r2.history):
self.assertEqual(r11.url, r22.url)
+
test_redirect_history(httpbin('relative-redirect/3'))
test_redirect_history(httpbin('relative-redirect/2'))
r3 = requests.get(httpbin('relative-redirect/1'))
@@ -193,8 +199,7 @@ class CacheTestCase(unittest.TestCase):
def test_post_params(self):
# issue #2
- self.s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('GET', 'POST'))
+ self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST'))
d = {'param1': 'test1'}
for _ in range(2):
@@ -208,8 +213,7 @@ class CacheTestCase(unittest.TestCase):
def test_post_data(self):
# issue #2, raw payload
- self.s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('GET', 'POST'))
+ self.s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST'))
d1 = json.dumps({'param1': 'test1'})
d2 = json.dumps({'param1': 'test1', 'param2': 'test2'})
d3 = str('some unicode data')
@@ -223,8 +227,7 @@ class CacheTestCase(unittest.TestCase):
r = self.s.post(httpbin('post'), data=d)
self.assert_(hasattr(r, 'from_cache'))
- self.assertEqual(self.post(bin_data)['data'],
- bin_data.decode('utf8'))
+ self.assertEqual(self.post(bin_data)['data'], bin_data.decode('utf8'))
r = self.s.post(httpbin('post'), data=bin_data)
self.assert_(hasattr(r, 'from_cache'))
@@ -232,7 +235,7 @@ class CacheTestCase(unittest.TestCase):
for _ in range(5):
p = {'arg1': 'value1'}
r = self.s.get(httpbin('get'), params=p)
- self.assert_(self.s.cache.has_url( httpbin('get?arg1=value1')))
+ self.assert_(self.s.cache.has_url(httpbin('get?arg1=value1')))
@unittest.skipIf(sys.version_info < (2, 7), "No https in 2.6")
def test_https_support(self):
@@ -284,8 +287,7 @@ class CacheTestCase(unittest.TestCase):
def test_post_parameters_normalization(self):
params = {"a": "a", "b": ["1", "2", "3"], "c": "4"}
url = httpbin("post")
- s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('GET', 'POST'))
+ s = CachedSession(CACHE_NAME, CACHE_BACKEND, allowable_methods=('GET', 'POST'))
self.assertFalse(s.post(url, data=params).from_cache)
self.assertTrue(s.post(url, data=params).from_cache)
self.assertTrue(s.post(url, data=sorted(params.items())).from_cache)
@@ -374,8 +376,7 @@ class CacheTestCase(unittest.TestCase):
usual_param = "some"
params = {ignored_param: "1", usual_param: "1"}
- s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- ignored_parameters=[ignored_param])
+ s = CachedSession(CACHE_NAME, CACHE_BACKEND, ignored_parameters=[ignored_param])
r = s.get(url, params=params)
self.assertIn(ignored_param, r.json()['args'].keys())
@@ -395,9 +396,12 @@ class CacheTestCase(unittest.TestCase):
usual_param = "some"
d = {ignored_param: "1", usual_param: "1"}
- s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('POST'),
- ignored_parameters=[ignored_param])
+ s = CachedSession(
+ CACHE_NAME,
+ CACHE_BACKEND,
+ allowable_methods=('POST'),
+ ignored_parameters=[ignored_param],
+ )
r = s.post(url, data=d)
self.assertIn(ignored_param, r.json()['form'].keys())
@@ -417,9 +421,12 @@ class CacheTestCase(unittest.TestCase):
usual_param = "some"
d = {ignored_param: "1", usual_param: "1"}
- s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('POST'),
- ignored_parameters=[ignored_param])
+ s = CachedSession(
+ CACHE_NAME,
+ CACHE_BACKEND,
+ allowable_methods=('POST'),
+ ignored_parameters=[ignored_param],
+ )
r = s.post(url, json=d)
self.assertIn(ignored_param, json.loads(r.json()['data']).keys())
@@ -438,9 +445,12 @@ class CacheTestCase(unittest.TestCase):
ignored_param = "ignored"
raw_data = "raw test data"
- s = CachedSession(CACHE_NAME, CACHE_BACKEND,
- allowable_methods=('POST'),
- ignored_parameters=[ignored_param])
+ s = CachedSession(
+ CACHE_NAME,
+ CACHE_BACKEND,
+ allowable_methods=('POST'),
+ ignored_parameters=[ignored_param],
+ )
self.assertFalse(s.post(url, data=raw_data).from_cache)
self.assertTrue(s.post(url, data=raw_data).from_cache)
@@ -483,7 +493,7 @@ class CacheTestCase(unittest.TestCase):
resp = self.s.get(url)
self.assertTrue(resp.from_cache)
self.assertEquals(resp.json()["args"]["q"], "1")
-
+
def test_cache_date(self):
url = httpbin('get')
response1 = self.s.get(url)
diff --git a/tests/test_custom_dict.py b/tests/test_custom_dict.py
index f146d79..76f3f82 100644
--- a/tests/test_custom_dict.py
+++ b/tests/test_custom_dict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
from requests_cache.backends.storage.dbdict import DbDict, DbPickleDict
diff --git a/tests/test_dbdict.py b/tests/test_dbdict.py
index f77385a..5404c66 100644
--- a/tests/test_dbdict.py
+++ b/tests/test_dbdict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,12 +12,12 @@ except ImportError:
import unittest
from threading import Thread
-from tests.test_custom_dict import BaseCustomDictTestCase
+
from requests_cache.backends.storage.dbdict import DbDict, DbPickleDict
+from tests.test_custom_dict import BaseCustomDictTestCase
class DbdictTestCase(BaseCustomDictTestCase, unittest.TestCase):
-
def test_bulk_commit(self):
d = DbDict(self.NAMESPACE, self.TABLES[0])
with d.bulk_commit():
@@ -54,10 +56,10 @@ class DbdictTestCase(BaseCustomDictTestCase, unittest.TestCase):
self.assertEqual(sorted(d2.values()), list(range(n)))
def test_usage_with_threads(self):
-
def do_test_for(d, n_threads=5):
d.clear()
fails = []
+
def do_inserts(values):
try:
for v in values:
@@ -69,8 +71,7 @@ class DbdictTestCase(BaseCustomDictTestCase, unittest.TestCase):
def values(x, n):
return [i * x for i in range(n)]
- threads = [Thread(target=do_inserts, args=(values(i, n_threads),))
- for i in range(n_threads)]
+ threads = [Thread(target=do_inserts, args=(values(i, n_threads),)) for i in range(n_threads)]
for t in threads:
t.start()
for t in threads:
diff --git a/tests/test_dynamodbdict.py b/tests/test_dynamodbdict.py
index d29c9c9..33d3a7d 100644
--- a/tests/test_dynamodbdict.py
+++ b/tests/test_dynamodbdict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,6 +12,7 @@ except ImportError:
import unittest
from tests.test_custom_dict import BaseCustomDictTestCase
+
try:
from requests_cache.backends.storage.dynamodbdict import DynamoDbDict
except ImportError:
@@ -18,8 +21,10 @@ else:
class WrapDynamoDbDict(DynamoDbDict):
def __init__(self, namespace, collection_name='dynamodb_dict_data', **options):
- options['endpoint_url'] = os.environ['DYNAMODB_ENDPOINT_URL'] if 'DYNAMODB_ENDPOINT_URL' in os.environ else None
- super(WrapDynamoDbDict,self).__init__( namespace, collection_name, **options)
+ options['endpoint_url'] = (
+ os.environ['DYNAMODB_ENDPOINT_URL'] if 'DYNAMODB_ENDPOINT_URL' in os.environ else None
+ )
+ super(WrapDynamoDbDict, self).__init__(namespace, collection_name, **options)
class DynamoDbDictTestCase(BaseCustomDictTestCase, unittest.TestCase):
dict_class = WrapDynamoDbDict
diff --git a/tests/test_gridfsdict.py b/tests/test_gridfsdict.py
index 30880e3..31271f9 100644
--- a/tests/test_gridfsdict.py
+++ b/tests/test_gridfsdict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,13 +12,15 @@ except ImportError:
import unittest
from tests.test_custom_dict import BaseCustomDictTestCase
+
try:
- from requests_cache.backends.storage.mongodict import MongoDict
from requests_cache.backends.storage.gridfspickledict import GridFSPickleDict
+ from requests_cache.backends.storage.mongodict import MongoDict
except ImportError:
print("pymongo not installed")
else:
+
class MongoDictTestCase(BaseCustomDictTestCase, unittest.TestCase):
dict_class = MongoDict
pickled_dict_class = GridFSPickleDict
diff --git a/tests/test_mongodict.py b/tests/test_mongodict.py
index bd7f49e..18cc10c 100644
--- a/tests/test_mongodict.py
+++ b/tests/test_mongodict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,15 +12,16 @@ except ImportError:
import unittest
from tests.test_custom_dict import BaseCustomDictTestCase
+
try:
from requests_cache.backends.storage.mongodict import MongoDict, MongoPickleDict
except ImportError:
print("pymongo not installed")
else:
+
class MongoDictTestCase(BaseCustomDictTestCase, unittest.TestCase):
dict_class = MongoDict
pickled_dict_class = MongoPickleDict
-
if __name__ == '__main__':
unittest.main()
diff --git a/tests/test_monkey_patch.py b/tests/test_monkey_patch.py
index 596544b..9cd89fc 100644
--- a/tests/test_monkey_patch.py
+++ b/tests/test_monkey_patch.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -16,7 +18,6 @@ import requests_cache
from requests_cache import CachedSession
from requests_cache.backends import BaseCache
-
CACHE_NAME = 'requests_cache_test'
CACHE_BACKEND = 'sqlite'
FAST_SAVE = False
@@ -69,7 +70,6 @@ class MonkeyPatchTestCase(unittest.TestCase):
self.assertTrue(isinstance(s, CachedSession))
def test_passing_backend_instance_support(self):
-
class MyCache(BaseCache):
pass
diff --git a/tests/test_redisdict.py b/tests/test_redisdict.py
index 6dce788..360f717 100644
--- a/tests/test_redisdict.py
+++ b/tests/test_redisdict.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,6 +12,7 @@ except ImportError:
import unittest
from tests.test_custom_dict import BaseCustomDictTestCase
+
try:
from requests_cache.backends.storage.redisdict import RedisDict
except ImportError:
diff --git a/tests/test_thread_safety.py b/tests/test_thread_safety.py
index 3b90e67..b6ae9d8 100644
--- a/tests/test_thread_safety.py
+++ b/tests/test_thread_safety.py
@@ -1,7 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Path hack
-import os, sys
+import os
+import sys
+
sys.path.insert(0, os.path.abspath('..'))
try:
@@ -10,25 +12,25 @@ except ImportError:
import unittest
from threading import Thread
+
from requests_cache import CachedSession
CACHE_NAME = 'requests_cache_test'
class ThreadSafetyTestCase(unittest.TestCase):
-
def test_caching_with_threads(self):
-
def do_tests_for(backend):
s = CachedSession(CACHE_NAME, backend)
s.cache.clear()
n_threads = 10
url = 'http://httpbin.org/get'
+
def do_requests(url, params):
for i in range(10): # for testing write and read from cache
s.get(url, params=params)
- for _ in range(20): # stress test
+ for _ in range(20): # stress test
threads = [Thread(target=do_requests, args=(url, {'param': i})) for i in range(n_threads)]
for t in threads:
t.start()