summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook@pioneer.com>2022-06-11 19:25:55 -0500
committerJordan Cook <jordan.cook@pioneer.com>2022-06-11 19:33:54 -0500
commitc0fc3079c7db0b8d744062e79342613fc8be9367 (patch)
tree2e2278845da741b34973324df54debf42ead5c1e /tests
parentd04cc094efb44586dd996e6a0554dec99f7d40c6 (diff)
downloadrequests-cache-c0fc3079c7db0b8d744062e79342613fc8be9367.tar.gz
Update tests and docs
Diffstat (limited to 'tests')
-rwxr-xr-xtests/generate_test_db.py2
-rw-r--r--tests/integration/base_cache_test.py12
-rw-r--r--tests/integration/base_storage_test.py8
-rw-r--r--tests/integration/test_dynamodb.py5
-rw-r--r--tests/integration/test_mongodb.py8
-rw-r--r--tests/integration/test_redis.py2
-rw-r--r--tests/unit/test_base_cache.py268
-rw-r--r--tests/unit/test_patcher.py12
-rw-r--r--tests/unit/test_session.py25
9 files changed, 183 insertions, 159 deletions
diff --git a/tests/generate_test_db.py b/tests/generate_test_db.py
index 2ad1438..bd7f09a 100755
--- a/tests/generate_test_db.py
+++ b/tests/generate_test_db.py
@@ -17,7 +17,7 @@ def make_sample_db():
for format in HTTPBIN_FORMATS:
session.get(f'https://httpbin.org/{format}')
- print(list(session.cache.urls))
+ print(session.cache.urls())
if __name__ == '__main__':
diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py
index 6f63197..5d69ea8 100644
--- a/tests/integration/base_cache_test.py
+++ b/tests/integration/base_cache_test.py
@@ -285,7 +285,7 @@ class BaseCacheTest:
for i in range(5):
assert session.post(httpbin('post'), files={'file1': BytesIO(b'10' * 1024)}).from_cache
- def test_remove_expired(self):
+ def test_delete__expired(self):
session = self.init_session(expire_after=1)
# Populate the cache with several responses that should expire immediately
@@ -298,21 +298,13 @@ class BaseCacheTest:
session.get(httpbin('get'), expire_after=-1)
session.get(httpbin('redirect/3'), expire_after=-1)
assert len(session.cache.redirects.keys()) == 4
- session.cache.remove(expired=True)
+ session.cache.delete(expired=True)
assert len(session.cache.responses.keys()) == 2
assert len(session.cache.redirects.keys()) == 3
assert not session.cache.has_url(httpbin('redirect/1'))
assert not any([session.cache.has_url(httpbin(f)) for f in HTTPBIN_FORMATS])
- def test_bulk_delete__noop(self):
- """Just make sure bulk_delete doesn't do anything unexpected if no keys are provided"""
- session = self.init_session()
- for i in range(100):
- session.cache.responses[f'key_{i}'] = f'value_{i}'
- session.cache.bulk_delete([])
- assert len(session.cache.responses) == 100
-
@pytest.mark.parametrize('method', HTTPBIN_METHODS)
def test_filter_request_headers(self, method):
url = httpbin(method.lower())
diff --git a/tests/integration/base_storage_test.py b/tests/integration/base_storage_test.py
index ee3bf31..4e0f217 100644
--- a/tests/integration/base_storage_test.py
+++ b/tests/integration/base_storage_test.py
@@ -85,6 +85,14 @@ class BaseStorageTest:
assert set(cache.keys()) == {f'key_{i}' for i in range(5, 20)}
assert set(cache.values()) == {f'value_{i}' for i in range(5, 20)}
+ def test_bulk_delete__noop(self):
+ """Just make sure bulk_delete doesn't do anything unexpected if no keys are provided"""
+ cache = self.init_cache()
+ for i in range(20):
+ cache[f'key_{i}'] = f'value_{i}'
+ cache.bulk_delete([])
+ assert len(cache) == 20
+
def test_keyerrors(self):
"""Accessing or deleting a deleted item should raise a KeyError"""
cache = self.init_cache()
diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py
index 8c5fe31..561c979 100644
--- a/tests/integration/test_dynamodb.py
+++ b/tests/integration/test_dynamodb.py
@@ -3,7 +3,6 @@ from decimal import Decimal
from unittest.mock import patch
import pytest
-from botocore.exceptions import ClientError
from requests_cache.backends import DynamoDbCache, DynamoDbDict
from tests.conftest import fail_if_no_connection
@@ -44,6 +43,8 @@ class TestDynamoDbDict(BaseStorageTest):
def test_create_table_error(self):
"""An error other than 'table already exists' should be reraised"""
+ from botocore.exceptions import ClientError
+
cache = self.init_cache()
error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
with patch.object(cache.connection.meta.client, 'update_time_to_live', side_effect=error):
@@ -52,6 +53,8 @@ class TestDynamoDbDict(BaseStorageTest):
def test_enable_ttl_error(self):
"""An error other than 'ttl already enabled' should be reraised"""
+ from botocore.exceptions import ClientError
+
cache = self.init_cache()
error = ClientError({'Error': {'Code': 'NullPointerException'}}, 'CreateTable')
with patch.object(cache.connection, 'create_table', side_effect=error):
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index b122880..856c0f4 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -3,8 +3,6 @@ from time import sleep
from unittest.mock import patch
import pytest
-from gridfs import GridFS
-from gridfs.errors import CorruptGridFile, FileExists
from requests_cache.backends import GridFSCache, GridFSDict, MongoCache, MongoDict
from requests_cache.policy import NEVER_EXPIRE
@@ -107,12 +105,18 @@ class TestGridFSDict(BaseStorageTest):
def test_corrupt_file(self):
"""A corrupted file should be handled and raise a KeyError instead"""
+ from gridfs import GridFS
+ from gridfs.errors import CorruptGridFile
+
cache = self.init_cache()
cache['key'] = 'value'
with pytest.raises(KeyError), patch.object(GridFS, 'find_one', side_effect=CorruptGridFile):
cache['key']
def test_file_exists(self):
+ from gridfs import GridFS
+ from gridfs.errors import FileExists
+
cache = self.init_cache()
# This write should just quiety fail
diff --git a/tests/integration/test_redis.py b/tests/integration/test_redis.py
index 08f1ee0..83a15d5 100644
--- a/tests/integration/test_redis.py
+++ b/tests/integration/test_redis.py
@@ -2,7 +2,7 @@ from unittest.mock import patch
import pytest
-from requests_cache.backends.redis import RedisCache, RedisDict, RedisHashDict
+from requests_cache.backends import RedisCache, RedisDict, RedisHashDict
from tests.conftest import fail_if_no_connection
from tests.integration.base_cache_test import BaseCacheTest
from tests.integration.base_storage_test import BaseStorageTest
diff --git a/tests/unit/test_base_cache.py b/tests/unit/test_base_cache.py
index 6e68ea5..f720cb8 100644
--- a/tests/unit/test_base_cache.py
+++ b/tests/unit/test_base_cache.py
@@ -6,6 +6,7 @@ from time import sleep
from unittest.mock import patch
import pytest
+from requests import Request
from requests_cache.backends import BaseCache, SQLiteDict
from requests_cache.models import CachedRequest, CachedResponse
@@ -21,7 +22,7 @@ YESTERDAY = datetime.utcnow() - timedelta(days=1)
logger = getLogger(__name__)
-class TimeBomb:
+class InvalidResponse:
"""Class that will raise an error when unpickled"""
def __init__(self):
@@ -31,64 +32,23 @@ class TimeBomb:
raise ValueError('Invalid response!')
-def test_urls__with_invalid_response(mock_session):
- responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
- responses[2] = AttributeError
- with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
- expected_urls = [MOCKED_URL, MOCKED_URL_JSON]
- assert set(mock_session.cache.urls) == set(expected_urls)
-
- # The invalid response should be skipped, but remain in the cache for now
- assert len(mock_session.cache.responses.keys()) == 3
-
-
-def test_keys(mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
- mock_session.get(url)
-
- all_keys = set(mock_session.cache.responses.keys()) | set(mock_session.cache.redirects.keys())
- assert set(mock_session.cache.keys()) == all_keys
-
-
-def test_values(mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
- mock_session.get(url)
-
- responses = list(mock_session.cache.values())
- assert len(responses) == 3
- assert all([isinstance(response, CachedResponse) for response in responses])
-
-
-@pytest.mark.parametrize('include_expired, expected_count', [(False, 1), (True, 2)])
-def test_values__with_invalid_responses(include_expired, expected_count, mock_session):
- """values() should always exclude invalid responses, and optionally exclude expired responses"""
- responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
- responses[1] = AttributeError
- responses[2] = CachedResponse(expires=YESTERDAY, url='test')
-
- with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
- values = mock_session.cache.values(include_expired=include_expired)
- assert len(list(values)) == expected_count
-
- # The invalid response should be skipped, but remain in the cache for now
- assert len(mock_session.cache.responses.keys()) == 3
-
+def test_contains__key(mock_session):
+ mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+ key = list(mock_session.cache.responses.keys())[0]
+ assert mock_session.cache.contains(key)
+ assert not mock_session.cache.contains(f'{key}_b')
-@pytest.mark.parametrize('include_expired, expected_count', [(False, 2), (True, 3)])
-def test_response_count(include_expired, expected_count, mock_session):
- """response_count() should always exclude invalid responses, and optionally exclude expired
- and invalid responses"""
- mock_session.get(MOCKED_URL)
- mock_session.get(MOCKED_URL_JSON)
- mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
- mock_session.cache.responses['invalid_response'] = TimeBomb()
- assert mock_session.cache.response_count(include_expired=include_expired) == expected_count
+def test_contains__request(mock_session):
+ mock_session.get(MOCKED_URL, params={'foo': 'bar'})
+ request = Request('GET', MOCKED_URL, params={'foo': 'bar'})
+ assert mock_session.cache.contains(request=request)
+ request.params = None
+ assert not mock_session.cache.contains(request=request)
@patch_normalize_url
-def test_remove__expired(mock_normalize_url, mock_session):
- """Test BaseCache.remove_expired_responses()"""
+def test_delete__expired(mock_normalize_url, mock_session):
unexpired_url = f'{MOCKED_URL}?x=1'
mock_session.mock_adapter.register_uri(
'GET', unexpired_url, status_code=200, text='mock response'
@@ -103,37 +63,18 @@ def test_remove__expired(mock_normalize_url, mock_session):
assert len(mock_session.cache.responses) == 3
# Use the generic BaseCache implementation, not the SQLite-specific one
- BaseCache.remove(mock_session.cache, expired=True)
+ BaseCache.delete(mock_session.cache, expired=True)
assert len(mock_session.cache.responses) == 1
cached_response = list(mock_session.cache.responses.values())[0]
assert cached_response.url == unexpired_url
# Now the last response should be expired as well
sleep(1)
- BaseCache.remove(mock_session.cache, expired=True)
+ BaseCache.delete(mock_session.cache, expired=True)
assert len(mock_session.cache.responses) == 0
-def test_remove__error(mock_session):
- # Start with two cached responses, one of which will raise an error
- response_1 = mock_session.get(MOCKED_URL)
- response_2 = mock_session.get(MOCKED_URL_JSON)
-
- def error_on_key(key):
- if key == response_2.cache_key:
- raise PickleError
- return CachedResponse.from_response(response_1)
-
- # Use the generic BaseCache implementation, not the SQLite-specific one
- with patch.object(SQLiteDict, '__getitem__', side_effect=error_on_key):
- BaseCache.remove(mock_session.cache, expired=True)
-
- assert len(mock_session.cache.responses) == 1
- assert mock_session.get(MOCKED_URL).from_cache is True
- assert mock_session.get(MOCKED_URL_JSON).from_cache is False
-
-
-def test_remove__expired__per_request(mock_session):
+def test_delete__expired__per_request(mock_session):
# Cache 3 responses with different expiration times
second_url = f'{MOCKED_URL}/endpoint_2'
third_url = f'{MOCKED_URL}/endpoint_3'
@@ -144,21 +85,40 @@ def test_remove__expired__per_request(mock_session):
mock_session.get(third_url, expire_after=4)
# All 3 responses should still be cached
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
for response in mock_session.cache.responses.values():
logger.info(f'Expires in {response.expires_delta} seconds')
assert len(mock_session.cache.responses) == 3
# One should be expired after 2s, and another should be expired after 4s
sleep(2)
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
assert len(mock_session.cache.responses) == 2
sleep(2)
- mock_session.cache.remove(expired=True)
+ mock_session.cache.delete(expired=True)
assert len(mock_session.cache.responses) == 1
-def test_remove__older_than(mock_session):
+def test_delete__invalid(mock_session):
+ # Start with two cached responses, one of which will raise an error
+ response_1 = mock_session.get(MOCKED_URL)
+ response_2 = mock_session.get(MOCKED_URL_JSON)
+
+ def error_on_key(key):
+ if key == response_2.cache_key:
+ raise PickleError
+ return CachedResponse.from_response(response_1)
+
+ # Use the generic BaseCache implementation, not the SQLite-specific one
+ with patch.object(SQLiteDict, '__getitem__', side_effect=error_on_key):
+ BaseCache.delete(mock_session.cache, expired=True, invalid=True)
+
+ assert len(mock_session.cache.responses) == 1
+ assert mock_session.get(MOCKED_URL).from_cache is True
+ assert mock_session.get(MOCKED_URL_JSON).from_cache is False
+
+
+def test_delete__older_than(mock_session):
# Cache 4 responses with different creation times
response_0 = CachedResponse(request=CachedRequest(method='GET', url='https://test.com/test_0'))
mock_session.cache.save_response(response_0)
@@ -174,30 +134,29 @@ def test_remove__older_than(mock_session):
# Incrementally remove responses older than 3, 2, and 1 seconds
assert len(mock_session.cache.responses) == 4
- mock_session.cache.remove(older_than=timedelta(seconds=3))
+ mock_session.cache.delete(older_than=timedelta(seconds=3))
assert len(mock_session.cache.responses) == 3
- mock_session.cache.remove(older_than=timedelta(seconds=2))
+ mock_session.cache.delete(older_than=timedelta(seconds=2))
assert len(mock_session.cache.responses) == 2
- mock_session.cache.remove(older_than=timedelta(seconds=1))
+ mock_session.cache.delete(older_than=timedelta(seconds=1))
assert len(mock_session.cache.responses) == 1
# Remove the last response after it's 1 second old
sleep(1)
- mock_session.cache.remove(older_than=timedelta(seconds=1))
+ mock_session.cache.delete(older_than=timedelta(seconds=1))
assert len(mock_session.cache.responses) == 0
-def test_remove_expired_responses(mock_session):
- """Test for backwards-compatibility"""
- with patch.object(mock_session.cache, 'remove') as mock_remove, patch.object(
- mock_session.cache, 'reset_expiration'
- ) as mock_reset:
- mock_session.cache.remove_expired_responses(expire_after=1)
- mock_remove.assert_called_once_with(expired=True, invalid=True)
- mock_reset.assert_called_once_with(1)
+def test_delete__requests(mock_session):
+ urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
+ for url in urls:
+ mock_session.get(url)
- mock_session.cache.remove_expired_responses()
- assert mock_remove.call_count == 2 and mock_reset.call_count == 1
+ requests = [Request('GET', url).prepare() for url in urls]
+ mock_session.cache.delete(requests=requests)
+
+ for request in requests:
+ assert not mock_session.cache.contains(request=request)
def test_reset_expiration__extend_expiration(mock_session):
@@ -231,13 +190,48 @@ def test_clear(mock_session):
assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-def test_has_url(mock_session):
- mock_session.get(MOCKED_URL)
- assert mock_session.cache.has_url(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+def test_save_response__manual(mock_session):
+ response = mock_session.get(MOCKED_URL)
+ mock_session.cache.clear()
+ mock_session.cache.save_response(response)
+
+def test_update(mock_session):
+ src_cache = BaseCache()
+ for i in range(20):
+ src_cache.responses[f'key_{i}'] = f'value_{i}'
+ src_cache.redirects[f'key_{i}'] = f'value_{i}'
-def test_has_url__request_args(mock_session):
+ mock_session.cache.update(src_cache)
+ assert len(mock_session.cache.responses) == 20
+ assert len(mock_session.cache.redirects) == 20
+
+
+@patch_normalize_url
+def test_urls(mock_normalize_url, mock_session):
+ for url in [MOCKED_URL, MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+ mock_session.get(url)
+
+ expected_urls = [MOCKED_URL_JSON, MOCKED_URL, MOCKED_URL_HTTPS]
+ assert mock_session.cache.urls() == expected_urls
+
+
+def test_urls__error(mock_session):
+ responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+ responses[2] = AttributeError
+ with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
+ expected_urls = [MOCKED_URL_JSON, MOCKED_URL]
+ assert mock_session.cache.urls() == expected_urls
+
+ # The invalid response should be skipped, but remain in the cache
+ assert len(mock_session.cache.responses.keys()) == 3
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_has_url(mock_session):
mock_session.get(MOCKED_URL, params={'foo': 'bar'})
assert mock_session.cache.has_url(MOCKED_URL, params={'foo': 'bar'})
assert not mock_session.cache.has_url(MOCKED_URL)
@@ -265,36 +259,64 @@ def test_delete_url__nonexistent_response(mock_session):
mock_session.cache.delete_url(MOCKED_URL) # Should fail silently
-def test_delete_url__redirect(mock_session):
- mock_session.get(MOCKED_URL_REDIRECT)
- assert mock_session.cache.has_url(MOCKED_URL_REDIRECT)
-
- mock_session.cache.delete_url(MOCKED_URL_REDIRECT)
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+def test_delete_urls(mock_session):
+ mock_session.get(MOCKED_URL)
+ mock_session.cache.delete_urls([MOCKED_URL])
+ assert not mock_session.cache.has_url(MOCKED_URL)
-def test_delete_urls(mock_session):
- urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
- for url in urls:
+def test_keys(mock_session):
+ for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]:
mock_session.get(url)
- mock_session.cache.delete_urls(urls)
- for url in urls:
- assert not mock_session.cache.has_url(MOCKED_URL_REDIRECT)
+ all_keys = set(mock_session.cache.responses.keys()) | set(mock_session.cache.redirects.keys())
+ assert set(mock_session.cache.keys()) == all_keys
-def test_save_response__manual(mock_session):
- response = mock_session.get(MOCKED_URL)
- mock_session.cache.clear()
- mock_session.cache.save_response(response)
+def test_remove_expired_responses(mock_session):
+ """Test for backwards-compatibility"""
+ with patch.object(mock_session.cache, 'delete') as mock_delete, patch.object(
+ mock_session.cache, 'reset_expiration'
+ ) as mock_reset:
+ mock_session.cache.remove_expired_responses(expire_after=1)
+ mock_delete.assert_called_once_with(expired=True, invalid=True)
+ mock_reset.assert_called_once_with(1)
+ mock_session.cache.remove_expired_responses()
+ assert mock_delete.call_count == 2 and mock_reset.call_count == 1
-def test_update(mock_session):
- src_cache = BaseCache()
- for i in range(20):
- src_cache.responses[f'key_{i}'] = f'value_{i}'
- src_cache.redirects[f'key_{i}'] = f'value_{i}'
- mock_session.cache.update(src_cache)
- assert len(mock_session.cache.responses) == 20
- assert len(mock_session.cache.redirects) == 20
+@pytest.mark.parametrize('check_expiry, expected_count', [(True, 2), (False, 3)])
+def test_response_count(check_expiry, expected_count, mock_session):
+ """response_count() should always exclude invalid responses, and optionally exclude expired
+ responses"""
+ mock_session.get(MOCKED_URL)
+ mock_session.get(MOCKED_URL_JSON)
+
+ mock_session.cache.responses['expired_response'] = CachedResponse(expires=YESTERDAY)
+ mock_session.cache.responses['invalid_response'] = InvalidResponse()
+ assert mock_session.cache.response_count(check_expiry=check_expiry) == expected_count
+
+
+def test_values(mock_session):
+ for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
+ mock_session.get(url)
+
+ responses = list(mock_session.cache.values())
+ assert len(responses) == 3
+ assert all([isinstance(response, CachedResponse) for response in responses])
+
+
+@pytest.mark.parametrize('check_expiry, expected_count', [(True, 1), (False, 2)])
+def test_values__with_invalid_responses(check_expiry, expected_count, mock_session):
+ """values() should always exclude invalid responses, and optionally exclude expired responses"""
+ responses = [mock_session.get(url) for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]]
+ responses[1] = AttributeError
+ responses[2] = CachedResponse(expires=YESTERDAY, url='test')
+
+ with patch.object(SQLiteDict, '__getitem__', side_effect=responses):
+ values = mock_session.cache.values(check_expiry=check_expiry)
+ assert len(list(values)) == expected_count
+
+ # The invalid response should be skipped, but remain in the cache for now
+ assert len(mock_session.cache.responses.keys()) == 3
diff --git a/tests/unit/test_patcher.py b/tests/unit/test_patcher.py
index d656b19..5a99d6a 100644
--- a/tests/unit/test_patcher.py
+++ b/tests/unit/test_patcher.py
@@ -81,15 +81,15 @@ def test_is_installed():
assert requests_cache.is_installed() is False
-@patch.object(BaseCache, 'remove')
-def test_remove_expired_responses(mock_remove):
+@patch.object(BaseCache, 'delete')
+def test_remove_expired_responses(mock_delete):
requests_cache.install_cache(backend='memory', expire_after=360)
requests_cache.remove_expired_responses()
- assert mock_remove.called is True
+ assert mock_delete.called is True
requests_cache.uninstall_cache()
-@patch.object(BaseCache, 'remove')
-def test_remove_expired_responses__cache_not_installed(mock_remove):
+@patch.object(BaseCache, 'delete')
+def test_remove_expired_responses__cache_not_installed(mock_delete):
requests_cache.remove_expired_responses()
- assert mock_remove.called is False
+ assert mock_delete.called is False
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 2354ea3..9a2e0dc 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -185,15 +185,6 @@ def test_response_history(mock_session):
assert len(mock_session.cache.redirects) == 1
-@patch_normalize_url
-def test_urls(mock_normalize_url, mock_session):
- for url in [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]:
- mock_session.get(url)
-
- expected_urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_HTTPS]
- assert set(mock_session.cache.urls) == set(expected_urls)
-
-
# Request matching
# -----------------------------------------------------
@@ -630,12 +621,6 @@ def test_url_allowlist(mock_session):
assert not mock_session.cache.has_url(MOCKED_URL)
-def test_remove_expired_responses(mock_session):
- with patch.object(mock_session.cache, 'remove') as mock_remove:
- mock_session.remove_expired_responses()
- mock_remove.assert_called_once_with(expired=True, invalid=True)
-
-
def test_stale_while_revalidate(mock_session):
# Start with expired responses
mocked_url_2 = f'{MOCKED_URL_ETAG}?k=v'
@@ -862,3 +847,13 @@ def test_request_force_refresh__prepared_request(mock_session):
assert response_2.from_cache is False
assert response_3.from_cache is True
assert response_3.expires is not None
+
+
+# Deprecated methods
+# --------------------
+
+
+def test_remove_expired_responses(mock_session):
+ with patch.object(mock_session.cache, 'delete') as mock_delete:
+ mock_session.remove_expired_responses()
+ mock_delete.assert_called_once_with(expired=True, invalid=True)