summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook@pioneer.com>2022-06-11 19:25:55 -0500
committerJordan Cook <jordan.cook@pioneer.com>2022-06-11 19:33:54 -0500
commitc0fc3079c7db0b8d744062e79342613fc8be9367 (patch)
tree2e2278845da741b34973324df54debf42ead5c1e
parentd04cc094efb44586dd996e6a0554dec99f7d40c6 (diff)
downloadrequests-cache-c0fc3079c7db0b8d744062e79342613fc8be9367.tar.gz
Update tests and docs
-rw-r--r--HISTORY.md31
-rw-r--r--docs/user_guide/inspection.md68
-rw-r--r--docs/user_guide/matching.md3
-rwxr-xr-xexamples/basic_patching.py2
-rwxr-xr-xexamples/basic_sessions.py2
-rwxr-xr-xexamples/generate_test_db.py2
-rw-r--r--noxfile.py4
-rwxr-xr-xtests/generate_test_db.py2
-rw-r--r--tests/integration/base_cache_test.py12
-rw-r--r--tests/integration/base_storage_test.py8
-rw-r--r--tests/integration/test_dynamodb.py5
-rw-r--r--tests/integration/test_mongodb.py8
-rw-r--r--tests/integration/test_redis.py2
-rw-r--r--tests/unit/test_base_cache.py268
-rw-r--r--tests/unit/test_patcher.py12
-rw-r--r--tests/unit/test_session.py25
16 files changed, 249 insertions, 205 deletions
diff --git a/HISTORY.md b/HISTORY.md
index f10f94f..8926679 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -55,12 +55,14 @@
* Always skip both cache read and write for requests excluded by `allowable_methods` (previously only skipped write)
* Ignore and redact common authentication headers and request parameters by default. This provides some default recommended values for `ignored_parameters`, to avoid accidentally storing common credentials (e.g., OAuth tokens) in the cache. This will have no effect if you are already setting `ignored_parameters`.
-**Other features:**
-* Split existing features of `BaseCache.remove_expired_responses()` into multiple methods and arguments:
- * Add `BaseCache.remove()` method with `expired` and `invalid` arguments
- * Add `BaseCache.reset_expiration()` method to reset expiration for existing responses
-* Add `older_than` argument to `BaseCache.remove()` to remove responses older than a given value
-* Add `BaseCache.items()` method
+**Cache convenience methods:**
+* Add `expired` and `invalid` arguments to `BaseCache.delete()` (to replace `remove_expired_responses()`)
+* Add `older_than` argument to `BaseCache.delete()` to delete responses older than a given value
+* Add `requests` argument to `BaseCache.delete()` to delete responses matching the given requests
+* Add `BaseCache.contains()` method to check for cached requests either by key or by `requests.Request` object
+* Add `BaseCache.filter()` method to get responses from the cache with various filters
+* Add `BaseCache.reset_expiration()` method to reset expiration for existing responses
+* Update `BaseCache.urls` into a method that takes optional filter params, and returns sorted unique URLs
**Type hints:**
* Add `OriginalResponse` type, which adds type hints to `requests.Response` objects for extra attributes added by requests-cache:
@@ -77,19 +79,30 @@
* Replace `appdirs` with `platformdirs`
**Deprecations:**
-* `BaseCache.remove_expired_responses()` and `CachedSession.remove_expired_responses()` are deprecated in favor of `BaseCache.remove()`
+
+The following methods are deprecated, and will be removed in a future release. The recommended
+replacements are listed below:
+* `BaseCache.remove_expired_responses()`: `BaseCache.delete()`
+* `CachedSession.remove_expired_responses()`: `BaseCache.delete()`
+* `BaseCache.delete_url()`: `BaseCache.delete()`
+* `BaseCache.delete_urls()`: `BaseCache.delete()`
+* `BaseCache.has_key()`: `BaseCache.contains()`
+* `BaseCache.has_url()`: `BaseCache.contains()`
+* `BaseCache.keys()`: `BaseCache.filter()`
+* `BaseCache.values()`: `BaseCache.filter()`
+* `BaseCache.response_count()`: `BaseCache.filter()`
**Breaking changes:**
Some relatively minor breaking changes have been made that are not expected to affect most users.
-If you encounter a problem not listed here after updating to 1.0, please file a bug report!
+If you encounter a problem not listed here after updating to 1.0, please create a bug report!
* The following undocumented behaviors have been removed:
* The arguments `match_headers` and `ignored_parameters` must be passed to `CachedSession`. Previously, these could also be passed to a `BaseCache` instance.
* The `CachedSession` `backend` argument must be either an instance or string alias. Previously it would also accept a backend class.
* After initialization, cache settings can only be accesed and modified via
`CachedSession.settings`. Previously, some settings could be modified by setting them on either `CachedSession` or `BaseCache`. In some cases this could silently fail or otherwise have undefined behavior.
-* The following is relevant for users who have made custom backends that extend built-in storage classes:
+* The following is relevant for users who have made **custom backends** that extend built-in storage classes:
* All `BaseStorage` subclasses now have a `serializer` attribute, which will be unused if
set to `None`.
* All serializer-specific `BaseStorage` subclasses have been removed, and merged into their respective parent classes. This includes `SQLitePickleDict`, `MongoPickleDict`, and `GridFSPickleDict`.
diff --git a/docs/user_guide/inspection.md b/docs/user_guide/inspection.md
index 26645b2..73f7c29 100644
--- a/docs/user_guide/inspection.md
+++ b/docs/user_guide/inspection.md
@@ -20,12 +20,12 @@ Examples:
>>> from requests_cache import CachedSession
>>> session = CachedSession(expire_after=timedelta(days=1))
->>> # Placeholders are added for non-cached responses
+>>> # Placeholder attributes are added for non-cached responses
>>> response = session.get('https://httpbin.org/get')
>>> print(response.from_cache, response.created_at, response.expires, response.is_expired)
False None None None
->>> # Values will be populated for cached responses
+>>> # These attributes will be populated for cached responses
>>> response = session.get('https://httpbin.org/get')
>>> print(response.from_cache, response.created_at, response.expires, response.is_expired)
True 2021-01-01 18:00:00 2021-01-02 18:00:00 False
@@ -37,49 +37,55 @@ True 2021-01-01 18:00:00 2021-01-02 18:00:00 False
:::
## Cache Contents
-You can use `CachedSession.cache.urls` to see all URLs currently in the cache:
+
+### Checking for responses
+Use {py:meth}`.BaseCache.contains` to check if a given request is cached.
+Either check with a {py:class}`~requests.models.Request` object:
```python
->>> session = CachedSession()
->>> print(session.cache.urls)
-['https://httpbin.org/get', 'https://httpbin.org/stream/100']
-```
+>>> from requests import Request
-If needed, you can get more details on cached responses via `CachedSession.cache.responses`, which
-is a dict-like interface to the cache backend. See {py:class}`.CachedResponse` for a full list of
-attributes available.
+>>> request = Request('GET', 'https://httpbin.org/get', params={'k': 'v'})
+>>> print(session.cache.contains(request=request))
+```
-For example, if you wanted to to see all URLs requested with a specific method:
+Or with a cache key:
```python
->>> post_urls = [
-... response.url for response in session.cache.responses.values()
-... if response.request.method == 'POST'
-... ]
+>>> print(session.cache.contains('d1e666e9fdfb3f86'))
```
-You can also inspect `CachedSession.cache.redirects`, which maps redirect URLs to keys of the
-responses they redirect to.
+### Filtering responses
+Use {py:meth}`.BaseCache.filter` to get responses with optional filters. By default, it returns all
+responses except any invalid ones that would raise an exception:
+```python
+>>> for response in session.cache.filter():
+>>> print(response)
+```
-Additional `keys()` and `values()` wrapper methods are available on {py:class}`.BaseCache` to get
-combined keys and responses.
+Get unexpired responses:
```python
->>> print('All responses:')
->>> for response in session.cache.values():
+>>> for response in session.cache.filter(expired=False):
>>> print(response)
+```
->>> print('All cache keys for redirects and responses combined:')
->>> print(list(session.cache.keys()))
+Get keys for **only** expired responses:
+```python
+>>> expired_responses = session.cache.filter(valid=False, expired=True)
+>>> keys = [response.cache_key for response in expired_responses]
```
-Both methods also take a `include_expired` argument. Set to `False` to exclude expired responses:
+### Response URLs
+You can use {py:meth}`.BaseCache.urls` to see all URLs currently in the cache:
```python
->>> print('All unexpired responses:')
->>> for response in session.cache.values(include_expired=False):
->>> print(response)
+>>> session = CachedSession()
+>>> print(session.cache.urls())
+['https://httpbin.org/get', 'https://httpbin.org/stream/100']
```
-Similarly, you can get a count of responses with {py:meth}`.BaseCache.response_count`, and optionally
-exclude expired responses:
+If needed, you can access all responses via `CachedSession.cache.responses`, which is a dict-like
+interface to the cache backend. For example, if you wanted to to see all URLs requested with a specific method:
```python
->>> print(f'Total responses: {session.cache.response_count()}')
->>> print(f'Unexpired responses: {session.cache.response_count(include_expired=False)}')
+>>> post_urls = [
+... response.url for response in session.cache.responses.values()
+... if response.request.method == 'POST'
+... ]
```
diff --git a/docs/user_guide/matching.md b/docs/user_guide/matching.md
index 596298b..c368b7d 100644
--- a/docs/user_guide/matching.md
+++ b/docs/user_guide/matching.md
@@ -77,7 +77,8 @@ cached response:
```
If you want to implement your own request matching, you can provide a cache key function which will
-take a {py:class}`~requests.PreparedRequest` plus optional keyword args, and return a string:
+take a {py:class}`~requests.PreparedRequest` plus optional keyword args for
+{py:func}`~requests.request`, and return a string:
```python
def create_key(request: requests.PreparedRequest, **kwargs) -> str:
"""Generate a custom cache key for the given request"""
diff --git a/examples/basic_patching.py b/examples/basic_patching.py
index 66842f2..d7ef0d3 100755
--- a/examples/basic_patching.py
+++ b/examples/basic_patching.py
@@ -29,7 +29,7 @@ def main():
# Get some debugging info about the cache
print(requests_cache.get_cache())
print('Cached URLS:')
- print('\n'.join(requests_cache.get_cache().urls))
+ print('\n'.join(requests_cache.get_cache().urls()))
# Uninstall to remove caching from all requests functions
requests_cache.uninstall_cache()
diff --git a/examples/basic_sessions.py b/examples/basic_sessions.py
index 3c1e907..21c746e 100755
--- a/examples/basic_sessions.py
+++ b/examples/basic_sessions.py
@@ -26,7 +26,7 @@ def main():
# Get some debugging info about the cache
print(session.cache)
print('Cached URLS:')
- print('\n'.join(session.cache.urls))
+ print('\n'.join(session.cache.urls()))
if __name__ == "__main__":
diff --git a/examples/generate_test_db.py b/examples/generate_test_db.py
index 93822fd..5950688 100755
--- a/examples/generate_test_db.py
+++ b/examples/generate_test_db.py
@@ -92,7 +92,7 @@ def remove_expired_responses():
total_responses = len(session.cache.responses)
start = time()
- session.cache.remove(expired=True)
+ session.cache.delete(expired=True)
elapsed = time() - start
n_removed = total_responses - len(session.cache.responses)
logger.info(
diff --git a/noxfile.py b/noxfile.py
index 93f6cbc..e852294 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -35,9 +35,9 @@ XDIST_ARGS = '--numprocesses=auto --dist=loadfile'
def test(session):
"""Run tests in a separate virtualenv per python version"""
test_paths = session.posargs or [UNIT_TESTS, INTEGRATION_TESTS]
- session.install('.', 'pytest', 'pytest-xdist', 'requests-mock', 'timeout-decorator')
+ session.install('.', 'pytest', 'pytest-xdist', 'requests-mock', 'rich', 'timeout-decorator')
- cmd = f'pytest -rs -x {XDIST_ARGS}'
+ cmd = f'pytest -rs {XDIST_ARGS}'
session.run(*cmd.split(' '), *test_paths)
diff --git a/tests/generate_test_db.py b/tests/generate_test_db.py
index 2ad1438..bd7f09a 100755
--- a/tests/generate_test_db.py
+++ b/tests/generate_test_db.py
@@ -17,7 +17,7 @@ def make_sample_db():
for format in HTTPBIN_FORMATS:
session.get(f'https://httpbin.org/{format}')
- print(list(session.cache.urls))
+ print(session.cache.urls())
if __name__ == '__main__':
diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py
index 6f63197..5d69ea8 100644
--- a/tests/integration/base_cache_test.py
+++ b/tests/integration/base_cache_test.py
@@ -285,7 +285,7 @@ class BaseCacheTest:
for i in range(5):
assert session.post(httpbin('post'), files={'file1': BytesIO(b'10' * 1024)}).from_cache
- def test_remove_expired(self):
+ def test_delete__expired(self):
session = self.init_session(expire_after=1)
# Populate the cache with several responses that should expire immediately
@@ -298,21 +298,13 @@ class BaseCacheTest:
session.get(httpbin('get'), expire_after=-1)
session.get(httpbin('redirect/3'), expire_after=-1)
assert len(session.cache.redirects.keys()) == 4
- session.cache.remove(expired=True)
+ session.cache.delete(expired=True)
assert len(session.cache.responses.keys()) == 2
assert len(session.cache.redirects.keys()) == 3
assert not session.cache.has_url(httpbin('redirect/1'))
assert not any([session.cache.has_url(httpbin(f)) for f in HTTPBIN_FORMATS])
- def test_bulk_delete__noop(self):
- """Just make sure bulk_delete doesn't do anything unexpected if no keys are provided"""
- session = self.init_session()
- for i in range(100):
- session.cache.responses[f'key_{i}'] = f'value_{i}'
- session.cache.bulk_delete([])
- assert len(session.cache.responses) == 100
-
@pytest.mark.parametrize('method', HTTPBIN_METHODS)
def test_filter_request_headers(self, method):
url = httpbin(method.lower())
diff --git a/tests/integration/base_storage_test.py b/tests/integration/base_storage_test.py
index ee3bf31..4e0f217 100644
--- a/tests/integration/base_storage_test.py
+++ b/tests/integration/base_storage_test.py
@@ -85,6 +85,14 @@ class BaseStorageTest:
assert set(cache.keys()) == {f'key_{i}' for i in range(5, 20)}
assert set(cache.values()) == {f'value_{i}' for i in range(5, 20)}
+ def test_bulk_delete__noop(self):
+ """Just make sure bulk_delete doesn't do anything unexpected if no keys are provided"""
+ cache = self.init_cache()
+ for i in range(20):
+ cache[f'key_{i}'] = f'value_{i}'
+ cache.bulk_delete([])
+ assert len(cache) == 20
+
def test_keyerrors(self):
"""Accessing or deleting a deleted item should raise a KeyError"""
cache = self.init_cache()
diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py
index 8c5fe31..561c979 100644
--- a/tests/integration/test_dynamodb.py
+++ b/tests/integration/test_dynamodb.py
@@ -3,7 +3,6 @@ from decimal import Decimal
from unittest.mock import patch
import pytest
-from botocore.exceptions import ClientError
from requests_cache.backends import DynamoDbCache, DynamoDbDict
from tests.conftest import fail_if_no_connection
@@ -44,6 +43,8 @@ class TestDynamoDbDict(BaseStorageTest):
def test_create_table_error(self):
"""An error other than 'table already exists' should be reraised"""
+ from botocore.exceptions import ClientError
+
cache = self.init_cache()
error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
with patch.object(cache.connection.meta.client, 'update_time_to_live', side_effect=error):
@@ -52,6 +53,8 @@ class TestDynamoDbDict(BaseStorageTest):
def test_enable_ttl_error(self):
"""An error other than 'ttl already enabled' should be reraised"""
+ from botocore.exceptions import ClientError
+
cache = self.init_cache()
error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
with patch.object(cache.connection, 'create_table', side_effect=error):
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index b122880..856c0f4 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -3,8 +3,6 @@ from time import sleep
from unittest.mock import patch
import pytest
-from gridfs import GridFS
-from gridfs.errors import CorruptGridFile, FileExists
from requests_cache.backends import GridFSCache, GridFSDict, MongoCache, MongoDict
from requests_cache.policy import NEVER_EXPIRE
@@ -107,12 +105,18 @@ class TestGridFSDict(BaseStorageTest):
def test_corrupt_file(self):
"""A corrupted file should be handled and raise a KeyError instead"""
+ from gridfs import GridFS
+ from gridfs.errors import CorruptGridFile
+
cache = self.init_cache()
cache['key'] = 'value'
with pytest.raises(KeyError), patch.object(GridFS, 'find_one', side_effect=CorruptGridFile):
cache['key']
def test_file_exists(self):
+ from gridfs import GridFS
+ from gridfs.errors import FileExists
+
cache = self.init_cache()
# This write should just quiety fail
diff --git a/tests/integration/test_redis.py b/tests/integration/test_redis.py
index 08f1ee0..83a15d5 100644
--- a/tests/integration/test_redis.py
+++ b/tests/integration/test_redis.py
@@ -2,7 +2,7 @@ from unittest.mock import patch
import pytest
-from requests_cache.backends.redis import RedisCache, RedisDict, RedisHashDict
+from requests_cache.backends import RedisCache, RedisDict, RedisHashDict
from tests.conftest import fail_if_no_connection
from tests.integration.base_cache_test import BaseCacheTest
from tests.integration.base_storage_test import BaseStorageTest
diff --git a/tests/unit/test_base_cache.py b/tests/unit/test_base_cache.py
index 6e68ea5..f720cb8 100644
--- a/tests/unit/test_base_cache.py
+++ b/tests/unit/test_base_cache.py
@@ -6,6 +6,7 @@ from time import sleep
from unittest.mock import patch
import pytest
+from requests import Request
from requests_cache.backends import BaseCache, SQLiteDict
from requests_cache.models import CachedRequest, CachedResponse
@@ -21,7 +22,7 @@ YESTERDAY = datetime.utcnow() - timedelta(days=1)
logger = getLogger(__name__)
-class TimeBomb:
+class InvalidResponse:
"""Class that will raise an error when unpickled"""
def __init__(self):
@@ -31,64 +32,23 @@ class TimeBomb:
raise ValueError('Invalid response!')
-def test_urls__with_invalid_response(mock_session):
- responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
- responses[2] = AttributeError
- with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
- expected_urls = [MOCKED_URL, MOCKED_URL_JSON]
- assert set(mock_session.cache.urls) == set(expected_urls)
-
- # The invalid response should be skipped, but remain in the cache for now
- assert len(mock_session.cache.responses.keys()) == 3
-
-
-def test_keys(mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
- mock_session.get(url)
-
- all_keys = set(mock_session.cache.responses.keys()) | set(mock_session.cache.redirects.keys())
- assert set(mock_session.cache.keys()) == all_keys
-
-
-def test_values(mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
- mock_session.get(url)
-
- responses = list(mock_session.cache.values())
- assert len(responses) == 3
- assert all([isinstance(response, CachedResponse) for response in responses])
-
-
-@pytest.mark.parametrize('include_expired, expected_count', [(False, 1), (True, 2)])
-def test_values__with_invalid_responses(include_expired, expected_count, mock_session):
- """values() should always exclude invalid responses, and optionally exclude expired responses"""
- responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
- responses[1] = AttributeError
- responses[2] = CachedResponse(expires=YESTERDAY, url='test')
-
- with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
- values = mock_session.cache.values(include_expired=include_expired)
- assert len(list(values)) == expected_count
-
- # The invalid response should be skipped, but remain in the cache for now
- assert len(mock_session.cache.responses.keys()) == 3
-
+def test_contains__key(mock_session):
+ mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+ key = list(mock_session.cache.responses.keys())[0]
+ assert mock_session.cache.contains(key)
+ assert not mock_session.cache.contains(f'{key}_b')
-@pytest.mark.parametrize('include_expired, expected_count', [(False, 2), (True, 3)])
-def test_response_count(include_expired, expected_count, mock_session):
- """response_count() should always exclude invalid responses, and optionally exclude expired
- and invalid responses"""
- mock_session.get(MOCKED_URL)
- mock_session.get(MOCKED_URL_JSON)
- mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
- mock_session.cache.responses['invalid_response'] = TimeBomb()
- assert mock_session.cache.response_count(include_expired=include_expired) == expected_count
+def test_contains__request(mock_session):
+ mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+ request = Request('GET', MOCKED_URL, params={'foo': 'bar'})
+ assert mock_session.cache.contains(request=request)
+ request.params = None
+ assert not mock_session.cache.contains(request=request)
@patch_normalize_url
-def test_remove__expired(mock_normalize_url, mock_session):
- """Test BaseCache.remove_expired_responses()"""
+def test_delete__expired(mock_normalize_url, mock_session):
unexpired_url = f'{MOCKED_URL}?x=1'
mock_session.mock_adapter.register_uri(
'GET', unexpired_url, status_code=200, text='mock response'
@@ -103,37 +63,18 @@ def test_remove__expired(mock_normalize_url, mock_session):
assert len(mock_session.cache.responses) == 3
# Use the generic BaseCache implementation, not the SQLite-specific one
- BaseCache.remove(mock_session.cache, expired=True)
+ BaseCache.delete(mock_session.cache, expired=True)
assert len(mock_session.cache.responses) == 1
cached_response = list(mock_session.cache.responses.values())[0]
assert cached_response.url == unexpired_url
# Now the last response should be expired as well
sleep(1)
- BaseCache.remove(mock_session.cache, expired=True)
+ BaseCache.delete(mock_session.cache, expired=True)
assert len(mock_session.cache.responses) == 0
-def test_remove__error(mock_session):
- # Start with two cached responses, one of which will raise an error
- response_1 = mock_session.get(MOCKED_URL)
- response_2 = mock_session.get(MOCKED_URL_JSON)
-
- def error_on_key(key):
- if key == response_2.cache_key:
- raise PickleError
- return CachedResponse.from_response(response_1)
-
- # Use the generic BaseCache implementation, not the SQLite-specific one
- with patch.object(SQLiteDict, '__getitem__', side_effect=error_on_key):
- BaseCache.remove(mock_session.cache, expired=True)
-
- assert len(mock_session.cache.responses) == 1
- assert mock_session.get(MOCKED_URL).from_cache is True
- assert mock_session.get(MOCKED_URL_JSON).from_cache is False
-
-
-def test_remove__expired__per_request(mock_session):
+def test_delete__expired__per_request(mock_session):
# Cache 3 responses with different expiration times
second_url = f'{MOCKED_URL}/endpoint_2'
third_url = f'{MOCKED_URL}/endpoint_3'
@@ -144,21 +85,40 @@ def test_remove__expired__per_request(mock_session):
mock_session.get(third_url, expire_after=4)
# All 3 responses should still be cached
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
for response in mock_session.cache.responses.values():
logger.info(f'Expires in {response.expires_delta} seconds')
assert len(mock_session.cache.responses) == 3
# One should be expired after 2s, and another should be expired after 4s
sleep(2)
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
assert len(mock_session.cache.responses) == 2
sleep(2)
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
assert len(mock_session.cache.responses) == 1
-def test_remove__older_than(mock_session):
+def test_delete__invalid(mock_session):
+ # Start with two cached responses, one of which will raise an error
+ response_1 = mock_session.get(MOCKED_URL)
+ response_2 = mock_session.get(MOCKED_URL_JSON)
+
+ def error_on_key(key):
+ if key == response_2.cache_key:
+ raise PickleError
+ return CachedResponse.from_response(response_1)
+
+ # Use the generic BaseCache implementation, not the SQLite-specific one
+ with patch.object(SQLiteDict, '__getitem__', side_effect=error_on_key):
+ BaseCache.delete(mock_session.cache, expired=True, invalid=True)
+
+ assert len(mock_session.cache.responses) == 1
+ assert mock_session.get(MOCKED_URL).from_cache is True
+ assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+
+
+def test_delete__older_than(mock_session):
# Cache 4 responses with different creation times
response_0 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_0'))
mock_session.cache.save_response(response_0)
@@ -174,30 +134,29 @@ def test_remove__older_than(mock_session):
# Incrementally remove responses older than 3, 2, and 1 seconds
assert len(mock_session.cache.responses) == 4
- mock_session.cache.remove(older_than=timedelta(seconds=3))
+ mock_session.cache.delete(older_than=timedelta(seconds=3))
assert len(mock_session.cache.responses) == 3
- mock_session.cache.remove(older_than=timedelta(seconds=2))
+ mock_session.cache.delete(older_than=timedelta(seconds=2))
assert len(mock_session.cache.responses) == 2
- mock_session.cache.remove(older_than=timedelta(seconds=1))
+ mock_session.cache.delete(older_than=timedelta(seconds=1))
assert len(mock_session.cache.responses) == 1
# Remove the last response after it's 1 second old
sleep(1)
- mock_session.cache.remove(older_than=timedelta(seconds=1))
+ mock_session.cache.delete(older_than=timedelta(seconds=1))
assert len(mock_session.cache.responses) == 0
-def test_remove_expired_responses(mock_session):
- """Test for backwards-compatibility"""
- with patch.object(mock_session.cache, 'remove') as mock_remove, patch.object(
- mock_session.cache, 'reset_expiration'
- ) as mock_reset:
- mock_session.cache.remove_expired_responses(expire_after=1)
- mock_remove.assert_called_once_with(expired=True, invalid=True)
- mock_reset.assert_called_once_with(1)
+def test_delete__requests(mock_session):
+ urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
+ for url in urls:
+ mock_session.get(url)
- mock_session.cache.remove_expired_responses()
- assert mock_remove.call_count == 2 and mock_reset.call_count == 1
+ requests = [Request('GET', url).prepare() for url in urls]
+ mock_session.cache.delete(requests=requests)
+
+ for request in requests:
+ assert not mock_session.cache.contains(request=request)
def test_reset_expiration__extend_expiration(mock_session):
@@ -231,13 +190,48 @@ def test_clear(mock_session):
assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-def test_has_url(mock_session):
- mock_session.get(MOCKED_URL)
- assert mock_session.cache.has_url(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+def test_save_response__manual(mock_session):
+ response = mock_session.get(MOCKED_URL)
+ mock_session.cache.clear()
+ mock_session.cache.save_response(response)
+
+def test_update(mock_session):
+ src_cache = BaseCache()
+ for i in range(20):
+ src_cache.responses[f'key_{i}'] = f'value_{i}'
+ src_cache.redirects[f'key_{i}'] = f'value_{i}'
-def test_has_url__request_args(mock_session):
+ mock_session.cache.update(src_cache)
+ assert len(mock_session.cache.responses) == 20
+ assert len(mock_session.cache.redirects) == 20
+
+
+@patch_normalize_url
+def test_urls(mock_normalize_url, mock_session):
+ for url in [MOCKED_URL, MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+ mock_session.get(url)
+
+ expected_urls = [MOCKED_URL_JSON, MOCKED_URL, MOCKED_URL_HTTPS]
+ assert mock_session.cache.urls() == expected_urls
+
+
+def test_urls__error(mock_session):
+ responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+ responses[2] = AttributeError
+ with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
+ expected_urls = [MOCKED_URL_JSON, MOCKED_URL]
+ assert mock_session.cache.urls() == expected_urls
+
+ # The invalid response should be skipped, but remain in the cache
+ assert len(mock_session.cache.responses.keys()) == 3
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_has_url(mock_session):
mock_session.get(MOCKED_URL, params={'foo': 'bar'})
assert mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
assert not mock_session.cache.has_url(MOCKED_URL)
@@ -265,36 +259,64 @@ def test_delete_url__nonexistent_response(mock_session):
mock_session.cache.delete_url(MOCKED_URL) # Should fail silently
-def test_delete_url__redirect(mock_session):
- mock_session.get(MOCKED_URL_REDIRECT)
- assert mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
- mock_session.cache.delete_url(MOCKED_URL_REDIRECT)
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+def test_delete_urls(mock_session):
+ mock_session.get(MOCKED_URL)
+ mock_session.cache.delete_urls([MOCKED_URL])
+ assert not mock_session.cache.has_url(MOCKED_URL)
-def test_delete_urls(mock_session):
- urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
- for url in urls:
+def test_keys(mock_session):
+ for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
mock_session.get(url)
- mock_session.cache.delete_urls(urls)
- for url in urls:
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+ all_keys = set(mock_session.cache.responses.keys()) | set(mock_session.cache.redirects.keys())
+ assert set(mock_session.cache.keys()) == all_keys
-def test_save_response__manual(mock_session):
- response = mock_session.get(MOCKED_URL)
- mock_session.cache.clear()
- mock_session.cache.save_response(response)
+def test_remove_expired_responses(mock_session):
+ """Test for backwards-compatibility"""
+ with patch.object(mock_session.cache, 'delete') as mock_delete, patch.object(
+ mock_session.cache, 'reset_expiration'
+ ) as mock_reset:
+ mock_session.cache.remove_expired_responses(expire_after=1)
+ mock_delete.assert_called_once_with(expired=True, invalid=True)
+ mock_reset.assert_called_once_with(1)
+ mock_session.cache.remove_expired_responses()
+ assert mock_delete.call_count == 2 and mock_reset.call_count == 1
-def test_update(mock_session):
- src_cache = BaseCache()
- for i in range(20):
- src_cache.responses[f'key_{i}'] = f'value_{i}'
- src_cache.redirects[f'key_{i}'] = f'value_{i}'
- mock_session.cache.update(src_cache)
- assert len(mock_session.cache.responses) == 20
- assert len(mock_session.cache.redirects) == 20
+@pytest.mark.parametrize('check_expiry, expected_count', [(True, 2), (False, 3)])
+def test_response_count(check_expiry, expected_count, mock_session):
+ """response_count() should always exclude invalid responses, and optionally exclude expired
+ responses"""
+ mock_session.get(MOCKED_URL)
+ mock_session.get(MOCKED_URL_JSON)
+
+ mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
+ mock_session.cache.responses['invalid_response'] = InvalidResponse()
+ assert mock_session.cache.response_count(check_expiry=check_expiry) == expected_count
+
+
+def test_values(mock_session):
+ for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+ mock_session.get(url)
+
+ responses = list(mock_session.cache.values())
+ assert len(responses) == 3
+ assert all([isinstance(response, CachedResponse) for response in responses])
+
+
+@pytest.mark.parametrize('check_expiry, expected_count', [(True, 1), (False, 2)])
+def test_values__with_invalid_responses(check_expiry, expected_count, mock_session):
+ """values() should always exclude invalid responses, and optionally exclude expired responses"""
+ responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+ responses[1] = AttributeError
+ responses[2] = CachedResponse(expires=YESTERDAY, url='test')
+
+ with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
+ values = mock_session.cache.values(check_expiry=check_expiry)
+ assert len(list(values)) == expected_count
+
+ # The invalid response should be skipped, but remain in the cache for now
+ assert len(mock_session.cache.responses.keys()) == 3
diff --git a/tests/unit/test_patcher.py b/tests/unit/test_patcher.py
index d656b19..5a99d6a 100644
--- a/tests/unit/test_patcher.py
+++ b/tests/unit/test_patcher.py
@@ -81,15 +81,15 @@ def test_is_installed():
assert requests_cache.is_installed() is False
-@patch.object(BaseCache, 'remove')
-def test_remove_expired_responses(mock_remove):
+@patch.object(BaseCache, 'delete')
+def test_remove_expired_responses(mock_delete):
requests_cache.install_cache(backend='memory', expire_after=360)
requests_cache.remove_expired_responses()
- assert mock_remove.called is True
+ assert mock_delete.called is True
requests_cache.uninstall_cache()
-@patch.object(BaseCache, 'remove')
-def test_remove_expired_responses__cache_not_installed(mock_remove):
+@patch.object(BaseCache, 'delete')
+def test_remove_expired_responses__cache_not_installed(mock_delete):
requests_cache.remove_expired_responses()
- assert mock_remove.called is False
+ assert mock_delete.called is False
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 2354ea3..9a2e0dc 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -185,15 +185,6 @@ def test_response_history(mock_session):
assert len(mock_session.cache.redirects) == 1
-@patch_normalize_url
-def test_urls(mock_normalize_url, mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
- mock_session.get(url)
-
- expected_urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]
- assert set(mock_session.cache.urls) == set(expected_urls)
-
-
# Request matching
# -----------------------------------------------------
@@ -630,12 +621,6 @@ def test_url_allowlist(mock_session):
assert not mock_session.cache.has_url(MOCKED_URL)
-def test_remove_expired_responses(mock_session):
- with patch.object(mock_session.cache, 'remove') as mock_remove:
- mock_session.remove_expired_responses()
- mock_remove.assert_called_once_with(expired=True, invalid=True)
-
-
def test_stale_while_revalidate(mock_session):
# Start with expired responses
mocked_url_2 = f'{MOCKED_URL_ETAG}?k=v'
@@ -862,3 +847,13 @@ def test_request_force_refresh__prepared_request(mock_session):
assert response_2.from_cache is False
assert response_3.from_cache is True
assert response_3.expires is not None
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_remove_expired_responses(mock_session):
+ with patch.object(mock_session.cache, 'delete') as mock_delete:
+ mock_session.remove_expired_responses()
+ mock_delete.assert_called_once_with(expired=True, invalid=True)