summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook.git@proton.me>2022-09-30 19:22:51 -0500
committerJordan Cook <jordan.cook.git@proton.me>2022-09-30 19:45:58 -0500
commit7d7c9fb9f637ce99a274742eab67aa59b871ad53 (patch)
treebac69b60cf19497d84a802a7c3152997a4af973e
parentf7e029ee27562634d42b609f6484216dc7960a57 (diff)
parent8be0f51a1e310342814e5ec1d19830aae857b39e (diff)
downloadrequests-cache-7d7c9fb9f637ce99a274742eab67aa59b871ad53.tar.gz
Merge pull request #700 from requests-cache/delete-urls
Add 'url' argument to BaseCache.contains() and delete()
-rw-r--r--HISTORY.md18
-rw-r--r--docs/user_guide/expiration.md15
-rw-r--r--docs/user_guide/inspection.md11
-rw-r--r--requests_cache/backends/base.py14
-rw-r--r--tests/integration/base_cache_test.py4
-rw-r--r--tests/integration/test_mongodb.py4
-rw-r--r--tests/sample_data/sample.db.1.0.0-beta (renamed from tests/sample_data/sample.db.1.0.0-alpha)bin122880 -> 122880 bytes
-rw-r--r--tests/unit/test_base_cache.py21
-rw-r--r--tests/unit/test_session.py39
9 files changed, 82 insertions, 44 deletions
diff --git a/HISTORY.md b/HISTORY.md
index ed8c6c5..8fd923b 100644
--- a/HISTORY.md
+++ b/HISTORY.md
@@ -17,8 +17,8 @@
**Session settings:**
* All settings that affect cache behavior can now be accessed and modified via `CachedSession.settings`
-* Add `always_revalidate` session setting to always revalidate before using a cached response (if a validator) is available.
-* Add `only_if_cached` settion setting to return only cached results without sending real requests
+* Add `always_revalidate` session setting to always revalidate before using a cached response (if a validator is available).
+* Add `only_if_cached` session setting to return only cached results without sending real requests
* Add `stale_while_revalidate` session setting to return a stale response initially, while a non-blocking request is sent to refresh the response
* Make behavior for `stale_if_error` partially consistent with `Cache-Control: stale-if-error`: Add support for time values (int, timedelta, etc.) in addition to `True/False`
@@ -65,9 +65,11 @@
**Cache convenience methods:**
* Add `expired` and `invalid` arguments to `BaseCache.delete()` (to replace `remove_expired_responses()`)
+* Add `urls` and `requests` arguments to `BaseCache.delete()` (to replace `delete_url()`)
* Add `older_than` argument to `BaseCache.delete()` to delete responses older than a given value
* Add `requests` argument to `BaseCache.delete()` to delete responses matching the given requests
* Add `BaseCache.contains()` method to check for cached requests either by key or by `requests.Request` object
+* Add `url` argument to `BaseCache.contains()` method (to replace `has_url()`)
* Add `BaseCache.filter()` method to get responses from the cache with various filters
* Add `BaseCache.reset_expiration()` method to reset expiration for existing responses
* Add `BaseCache.recreate_keys()` method to recreate cache keys for all previously cached responses
@@ -87,9 +89,9 @@
**Bugfixes:**
* Fix usage of memory backend with `install_cache()`
+* Fix issue on Windows with occasional missing `CachedResponse.created_at` timestamp
* Add `CachedRequest.path_url` property for compatibility with `RequestEncodingMixin`
* Add compatibility with cattrs 22.1+
-* Fix issue on Windows with occasional missing `CachedResponse.created_at` timestamp
**Dependencies:**
* Replace `appdirs` with `platformdirs`
@@ -98,12 +100,12 @@
The following methods are deprecated, and will be removed in a future release. The recommended
replacements are listed below:
-* `BaseCache.remove_expired_responses()`: `BaseCache.delete()`
-* `CachedSession.remove_expired_responses()`: `BaseCache.delete()`
-* `BaseCache.delete_url()`: `BaseCache.delete()`
-* `BaseCache.delete_urls()`: `BaseCache.delete()`
+* `BaseCache.remove_expired_responses()`: `BaseCache.delete(expired=True)`
+* `CachedSession.remove_expired_responses()`: `BaseCache.delete(expired=True)`
+* `BaseCache.delete_url()`: `BaseCache.delete(urls=[...])`
+* `BaseCache.delete_urls()`: `BaseCache.delete(urls=[...])`
* `BaseCache.has_key()`: `BaseCache.contains()`
-* `BaseCache.has_url()`: `BaseCache.contains()`
+* `BaseCache.has_url()`: `BaseCache.contains(url=...)`
* `BaseCache.keys()`: `BaseCache.filter()`
* `BaseCache.values()`: `BaseCache.filter()`
* `BaseCache.response_count()`: `BaseCache.filter()`
diff --git a/docs/user_guide/expiration.md b/docs/user_guide/expiration.md
index ef1d9e1..4c9fc88 100644
--- a/docs/user_guide/expiration.md
+++ b/docs/user_guide/expiration.md
@@ -181,15 +181,20 @@ Or apply a new expiration value to previously cached responses:
>>> session.cache.reset_expiration(timedelta(days=30))
```
-Finally, you can delete responses matching specific requests or {ref}`cache keys <custom-matching>`:
+Finally, you can delete individual responses matching specific requests or
+{ref}`cache keys <custom-matching>`:
```python
>>> from requests import Request
->>> request_1 = Request('GET', 'https://httpbin.org/get')
->>> request_2 = Request('GET', 'https://httpbin.org/get', params={'key': 'value'})
+
+# Delete a simple GET request by URL
+>>> session.cache.delete(urls=['https://httpbin.org/json'])
+
+# Delete by additional request values
+>>> request_1 = Request('GET', 'https://httpbin.org/get', params={'key': 'value'})
+>>> request_2 = Request('GET', 'https://httpbin.org/get', headers={'header': 'value'})
>>> session.cache.delete(requests=[request_1, request_2])
-```
-```python
+# Delete by cache key
>>> session.cache.delete('e25f7e6326966e82')
```
diff --git a/docs/user_guide/inspection.md b/docs/user_guide/inspection.md
index 873f6a3..9f9991e 100644
--- a/docs/user_guide/inspection.md
+++ b/docs/user_guide/inspection.md
@@ -42,7 +42,14 @@ True 2021-01-01 18:00:00 2021-01-02 18:00:00 False
### Checking for responses
Use {py:meth}`.BaseCache.contains` to check if a given request is cached.
-Either check with a {py:class}`~requests.models.Request` object:
+
+Check if a specific URL is cached:
+```python
+>>> print(session.cache.contains(url='https://httpbin.org/get'))
+```
+
+To match additional request values (parameters, headers, etc), you can pass a
+{py:class}`~requests.models.Request` object instead:
```python
>>> from requests import Request
@@ -50,7 +57,7 @@ Either check with a {py:class}`~requests.models.Request` object:
>>> print(session.cache.contains(request=request))
```
-Or with a cache key:
+You can also check for a specific cache key:
```python
>>> print(session.cache.contains('d1e666e9fdfb3f86'))
```
diff --git a/requests_cache/backends/base.py b/requests_cache/backends/base.py
index cd1dd44..3afe4e7 100644
--- a/requests_cache/backends/base.py
+++ b/requests_cache/backends/base.py
@@ -124,13 +124,17 @@ class BaseCache:
self,
key: str = None,
request: AnyRequest = None,
+ url: str = None,
):
"""Check if the specified request is cached
Args:
key: Check for a specific cache key
request: Check for a matching request, according to current request matching settings
+ url: Check for a matching GET request with the specified URL
"""
+ if url:
+ request = Request('GET', url)
if request and not key:
key = self.create_key(request)
return key in self.responses or key in self.redirects
@@ -142,6 +146,7 @@ class BaseCache:
invalid: bool = False,
older_than: ExpirationTime = None,
requests: Iterable[AnyRequest] = None,
+ urls: Iterable[str] = None,
):
"""Remove responses from the cache according one or more conditions.
@@ -151,8 +156,11 @@ class BaseCache:
invalid: Remove all invalid responses (that can't be deserialized with current settings)
older_than: Remove responses older than this value, relative to ``response.created_at``
requests: Remove matching responses, according to current request matching settings
+ urls: Remove matching GET requests for the specified URL(s)
"""
delete_keys: List[str] = list(keys) if keys else []
+ if urls:
+ requests = list(requests or []) + [Request('GET', url).prepare() for url in urls]
if requests:
delete_keys += [self.create_key(request) for request in requests]
@@ -247,21 +255,21 @@ class BaseCache:
def delete_url(self, url: str, method: str = 'GET', **kwargs):
warn(
- 'BaseCache.delete_url() is deprecated; please use .delete() instead',
+ 'BaseCache.delete_url() is deprecated; please use .delete(urls=...) instead',
DeprecationWarning,
)
self.delete(requests=[Request(method, url, **kwargs)])
def delete_urls(self, urls: Iterable[str], method: str = 'GET', **kwargs):
warn(
- 'BaseCache.delete_urls() is deprecated; please use .delete() instead',
+ 'BaseCache.delete_urls() is deprecated; please use .delete(urls=...) instead',
DeprecationWarning,
)
self.delete(requests=[Request(method, url, **kwargs) for url in urls])
def has_url(self, url: str, method: str = 'GET', **kwargs) -> bool:
warn(
- 'BaseCache.has_url() is deprecated; please use .contains() instead',
+ 'BaseCache.has_url() is deprecated; please use .contains(url=...) instead',
DeprecationWarning,
)
return self.contains(request=Request(method, url, **kwargs))
diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py
index 546f6c5..4a55504 100644
--- a/tests/integration/base_cache_test.py
+++ b/tests/integration/base_cache_test.py
@@ -302,8 +302,8 @@ class BaseCacheTest:
assert len(session.cache.responses.keys()) == 2
assert len(session.cache.redirects.keys()) == 3
- assert not session.cache.has_url(httpbin('redirect/1'))
- assert not any([session.cache.has_url(httpbin(f)) for f in HTTPBIN_FORMATS])
+ assert not session.cache.contains(url=httpbin('redirect/1'))
+ assert not any([session.cache.contains(url=httpbin(f)) for f in HTTPBIN_FORMATS])
@pytest.mark.parametrize('method', HTTPBIN_METHODS)
def test_filter_request_headers(self, method):
diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py
index fb56820..4a3a114 100644
--- a/tests/integration/test_mongodb.py
+++ b/tests/integration/test_mongodb.py
@@ -61,9 +61,9 @@ class TestMongoCache(BaseCacheTest):
response = session.get(httpbin('get'))
assert response.from_cache is True
- # Wait up to 60 seconds for removal background process to run
+ # Wait for removal background process to run
# Unfortunately there doesn't seem to be a way to manually trigger it
- for i in range(60):
+ for i in range(70):
if response.cache_key not in session.cache.responses:
logger.debug(f'Removed {response.cache_key} after {i} seconds')
break
diff --git a/tests/sample_data/sample.db.1.0.0-alpha b/tests/sample_data/sample.db.1.0.0-beta
index b727b22..148979a 100644
--- a/tests/sample_data/sample.db.1.0.0-alpha
+++ b/tests/sample_data/sample.db.1.0.0-beta
Binary files differ
diff --git a/tests/unit/test_base_cache.py b/tests/unit/test_base_cache.py
index 0a0b265..fe25337 100644
--- a/tests/unit/test_base_cache.py
+++ b/tests/unit/test_base_cache.py
@@ -50,6 +50,12 @@ def test_contains__request(mock_session):
assert not mock_session.cache.contains(request=request)
+def test_contains__url(mock_session):
+ mock_session.get(MOCKED_URL)
+ assert mock_session.cache.contains(url=MOCKED_URL)
+ assert not mock_session.cache.contains(url=f'{MOCKED_URL}?foo=bar')
+
+
@patch_normalize_url
def test_delete__expired(mock_normalize_url, mock_session):
unexpired_url = f'{MOCKED_URL}?x=1'
@@ -151,6 +157,17 @@ def test_delete__older_than(mock_session):
assert len(mock_session.cache.responses) == 0
+def test_delete__urls(mock_session):
+ urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
+ for url in urls:
+ mock_session.get(url)
+
+ mock_session.cache.delete(urls=urls)
+
+ for url in urls:
+ assert not mock_session.cache.contains(url=url)
+
+
def test_delete__requests(mock_session):
urls = [MOCKED_URL, MOCKED_URL_JSON, MOCKED_URL_REDIRECT]
for url in urls:
@@ -228,8 +245,8 @@ def test_clear(mock_session):
mock_session.get(MOCKED_URL)
mock_session.get(MOCKED_URL_REDIRECT)
mock_session.cache.clear()
- assert not mock_session.cache.contains(request=Request('GET', MOCKED_URL))
- assert not mock_session.cache.contains(request=Request('GET', MOCKED_URL_REDIRECT))
+ assert not mock_session.cache.contains(url=MOCKED_URL)
+ assert not mock_session.cache.contains(url=MOCKED_URL_REDIRECT)
def test_save_response__manual(mock_session):
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 60f2d42..53e368c 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -453,12 +453,12 @@ def test_allowable_codes(mock_session):
# This request should be cached
mock_session.get(MOCKED_URL_404)
- assert mock_session.cache.has_url(MOCKED_URL_404)
+ assert mock_session.cache.contains(url=MOCKED_URL_404)
assert mock_session.get(MOCKED_URL_404).from_cache is True
# This request should be filtered out on both read and write
mock_session.get(MOCKED_URL_500)
- assert not mock_session.cache.has_url(MOCKED_URL_500)
+ assert not mock_session.cache.contains(url=MOCKED_URL_500)
assert mock_session.get(MOCKED_URL_500).from_cache is False
@@ -467,20 +467,20 @@ def test_allowable_methods(mock_session):
# This request should be cached
mock_session.options(MOCKED_URL)
- assert mock_session.cache.has_url(MOCKED_URL, method='OPTIONS')
+ assert mock_session.cache.contains(request=Request('OPTIONS', MOCKED_URL))
assert mock_session.options(MOCKED_URL).from_cache is True
# These requests should be filtered out on both read and write
mock_session.put(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL, method='PUT')
+ assert not mock_session.cache.contains(request=Request('PUT', MOCKED_URL))
assert mock_session.put(MOCKED_URL).from_cache is False
mock_session.patch(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL, method='PATCH')
+ assert not mock_session.cache.contains(request=Request('PATCH', MOCKED_URL))
assert mock_session.patch(MOCKED_URL).from_cache is False
mock_session.delete(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL, method='DELETE')
+ assert not mock_session.cache.contains(request=Request('DELETE', MOCKED_URL))
assert mock_session.delete(MOCKED_URL).from_cache is False
@@ -532,12 +532,12 @@ def test_filter_fn(mock_normalize_url, mock_session):
# This request should be cached
mock_session.get(MOCKED_URL)
- assert mock_session.cache.has_url(MOCKED_URL)
+ assert mock_session.cache.contains(url=MOCKED_URL)
assert mock_session.get(MOCKED_URL).from_cache is True
# This request should be filtered out on both read and write
mock_session.get(MOCKED_URL_JSON)
- assert not mock_session.cache.has_url(MOCKED_URL_JSON)
+ assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
assert mock_session.get(MOCKED_URL_JSON).from_cache is False
@@ -547,7 +547,7 @@ def test_filter_fn__retroactive(mock_normalize_url, mock_session):
mock_session.get(MOCKED_URL_JSON)
mock_session.settings.filter_fn = lambda r: r.request.url != MOCKED_URL_JSON
mock_session.get(MOCKED_URL_JSON)
- assert not mock_session.cache.has_url(MOCKED_URL_JSON)
+ assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
def test_key_fn(mock_session):
@@ -586,7 +586,7 @@ def test_expire_after_alias(mock_session):
def test_do_not_cache(mock_session):
"""DO_NOT_CACHE should bypass the cache on both read and write"""
mock_session.get(MOCKED_URL)
- assert mock_session.cache.has_url(MOCKED_URL)
+ assert mock_session.cache.contains(url=MOCKED_URL)
# Skip read
response = mock_session.get(MOCKED_URL, expire_after=DO_NOT_CACHE)
@@ -595,7 +595,7 @@ def test_do_not_cache(mock_session):
# Skip write
mock_session.settings.expire_after = DO_NOT_CACHE
mock_session.get(MOCKED_URL_JSON)
- assert not mock_session.cache.has_url(MOCKED_URL_JSON)
+ assert not mock_session.cache.contains(url=MOCKED_URL_JSON)
def test_expire_immediately(mock_session):
@@ -604,7 +604,7 @@ def test_expire_immediately(mock_session):
mock_session.settings.expire_after = EXPIRE_IMMEDIATELY
mock_session.get(MOCKED_URL)
response = mock_session.get(MOCKED_URL)
- assert not mock_session.cache.has_url(MOCKED_URL)
+ assert not mock_session.cache.contains(url=MOCKED_URL)
assert response.from_cache is False
# With validator
@@ -650,7 +650,7 @@ def test_url_allowlist(mock_session):
assert mock_session.get(MOCKED_URL_JSON).from_cache is True
mock_session.get(MOCKED_URL)
assert mock_session.get(MOCKED_URL).from_cache is False
- assert not mock_session.cache.has_url(MOCKED_URL)
+ assert not mock_session.cache.contains(url=MOCKED_URL)
def test_stale_while_revalidate(mock_session):
@@ -659,7 +659,7 @@ def test_stale_while_revalidate(mock_session):
mock_session.settings.stale_while_revalidate = True
mock_session.get(MOCKED_URL_ETAG, expire_after=timedelta(seconds=-2))
mock_session.get(mocked_url_2, expire_after=timedelta(seconds=-2))
- assert mock_session.cache.has_url(MOCKED_URL_ETAG)
+ assert mock_session.cache.contains(url=MOCKED_URL_ETAG)
# First, let's just make sure the correct method is called
mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
@@ -677,11 +677,10 @@ def test_stale_while_revalidate(mock_session):
response = mock_session.get(mocked_url_2, expire_after=60)
assert response.from_cache is True and response.is_expired is True
assert time() - start < 0.1
- sleep(0.1)
+ sleep(1) # Background thread may be a bit slow on CI runner
mock_send.assert_called()
# Finally, check that the cached response has been refreshed
- sleep(0.2) # Background thread may be a bit slow on CI runner
response = mock_session.get(mocked_url_2)
assert response.from_cache is True and response.is_expired is False
@@ -744,7 +743,7 @@ def test_request_expire_after__disable_expiration(mock_session):
def test_request_expire_after__prepared_request(mock_session):
"""Pre-request expiration should also work for PreparedRequests with CachedSession.send()"""
mock_session.settings.expire_after = None
- request = Request(method='GET', url=MOCKED_URL, headers={}, data=None).prepare()
+ request = Request('GET', MOCKED_URL, headers={}, data=None).prepare()
response = mock_session.send(request, expire_after=1)
assert response.from_cache is False
assert mock_session.send(request).from_cache is True
@@ -803,7 +802,7 @@ def test_request_only_if_cached__skips_revalidate(mock_session):
def test_request_only_if_cached__prepared_request(mock_session):
"""The only_if_cached option should also work for PreparedRequests with CachedSession.send()"""
- request = Request(method='GET', url=MOCKED_URL, headers={}).prepare()
+ request = Request('GET', MOCKED_URL, headers={}).prepare()
response = mock_session.send(request, only_if_cached=True)
assert response.status_code == 504
with pytest.raises(HTTPError):
@@ -848,7 +847,7 @@ def test_request_refresh__no_validator(mock_session):
def test_request_refresh__prepared_request(mock_session):
"""The refresh option should also work for PreparedRequests with CachedSession.send()"""
mock_session.settings.expire_after = 60
- request = Request(method='GET', url=MOCKED_URL_ETAG, headers={}, data=None).prepare()
+ request = Request('GET', MOCKED_URL_ETAG, headers={}, data=None).prepare()
response_1 = mock_session.send(request)
response_2 = mock_session.send(request)
mock_session.mock_adapter.register_uri('GET', MOCKED_URL_ETAG, status_code=304)
@@ -880,7 +879,7 @@ def test_request_force_refresh(mock_session):
def test_request_force_refresh__prepared_request(mock_session):
"""The force_refresh option should also work for PreparedRequests with CachedSession.send()"""
mock_session.settings.expire_after = 60
- request = Request(method='GET', url=MOCKED_URL, headers={}, data=None)
+ request = Request('GET', MOCKED_URL, headers={}, data=None)
response_1 = mock_session.send(request.prepare())
response_2 = mock_session.send(request.prepare(), force_refresh=True)
response_3 = mock_session.send(request.prepare())