From 023b9bf36a4305f5edceece7ac6eabbd16e2e40d Mon Sep 17 00:00:00 2001 From: Jordan Cook Date: Sat, 1 Jan 2022 12:02:10 -0600 Subject: Format using a more typical line length of 100 --- noxfile.py | 4 +++- pyproject.toml | 4 ++-- requests_cache/backends/dynamodb.py | 8 ++++++-- requests_cache/cache_keys.py | 8 ++++++-- requests_cache/models/response.py | 5 ++++- requests_cache/serializers/cattrs.py | 4 +++- requests_cache/serializers/preconf.py | 8 ++++++-- requests_cache/session.py | 16 +++++++++++++--- tests/benchmark_serializers.py | 7 ++++++- tests/integration/base_cache_test.py | 12 +++++++++--- tests/integration/test_mongodb.py | 8 +++++++- tests/unit/test_cache_control.py | 12 +++++++++--- tests/unit/test_cache_keys.py | 4 +++- tests/unit/test_session.py | 20 ++++++++++++++++---- 14 files changed, 93 insertions(+), 27 deletions(-) diff --git a/noxfile.py b/noxfile.py index 941b945..d57980d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -19,7 +19,9 @@ CLEAN_DIRS = ['dist', 'build', join('docs', '_build'), join('docs', 'modules')] UNIT_TESTS = join('tests', 'unit') INTEGRATION_TESTS = join('tests', 'integration') -COVERAGE_ARGS = '--cov --cov-report=term --cov-report=html' # Generate HTML + stdout coverage report +COVERAGE_ARGS = ( + '--cov --cov-report=term --cov-report=html' # Generate HTML + stdout coverage report +) XDIST_ARGS = '--numprocesses=auto --dist=loadfile' # Run tests in parallel, grouped by test module diff --git a/pyproject.toml b/pyproject.toml index 8509bd6..cd6158f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,7 @@ requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.black] -line-length = 105 +line-length = 100 skip-string-normalization = true [tool.coverage.html] @@ -136,7 +136,7 @@ exclude_lines = [ [tool.isort] profile = 'black' -line_length = 105 +line_length = 100 skip_gitignore = true skip = [ 'examples/', diff --git a/requests_cache/backends/dynamodb.py b/requests_cache/backends/dynamodb.py index e1a6089..689b43f 100644 --- a/requests_cache/backends/dynamodb.py +++ b/requests_cache/backends/dynamodb.py @@ -64,7 +64,9 @@ class DynamoDbCache(BaseCache): kwargs: Additional keyword arguments for :py:meth:`~boto3.session.Session.resource` """ - def __init__(self, table_name: str = 'http_cache', connection: ServiceResource = None, **kwargs): + def __init__( + self, table_name: str = 'http_cache', connection: ServiceResource = None, **kwargs + ): super().__init__(**kwargs) self.responses = DynamoDbDict(table_name, 'responses', connection=connection, **kwargs) self.redirects = DynamoDbDict( @@ -148,7 +150,9 @@ class DynamoDbDict(BaseStorage): # Depending on the serializer, the value may be either a string or Binary object raw_value = result['Item']['value'] - return self.serializer.loads(raw_value.value if isinstance(raw_value, Binary) else raw_value) + return self.serializer.loads( + raw_value.value if isinstance(raw_value, Binary) else raw_value + ) def __setitem__(self, key, value): item = {**self.composite_key(key), 'value': self.serializer.dumps(value)} diff --git a/requests_cache/cache_keys.py b/requests_cache/cache_keys.py index 7029c7f..5b86a1a 100644 --- a/requests_cache/cache_keys.py +++ b/requests_cache/cache_keys.py @@ -82,7 +82,9 @@ def get_matched_headers( included = set(headers) - DEFAULT_EXCLUDE_HEADERS return [ - f'{k.lower()}={headers[k]}' for k in sorted(included, key=lambda x: x.lower()) if k in headers + f'{k.lower()}={headers[k]}' + for k in sorted(included, key=lambda x: x.lower()) + if k in headers ] @@ -110,7 +112,9 @@ def normalize_request(request: AnyRequest, ignored_parameters: ParamList) -> Any return norm_request -def normalize_headers(headers: Mapping[str, str], ignored_parameters: ParamList) -> CaseInsensitiveDict: +def normalize_headers( + headers: Mapping[str, str], ignored_parameters: ParamList +) -> CaseInsensitiveDict: """Sort and filter request headers""" if ignored_parameters: headers = filter_sort_dict(headers, ignored_parameters) diff --git a/requests_cache/models/response.py b/requests_cache/models/response.py index 561d6c8..73b09fd 100755 --- a/requests_cache/models/response.py +++ b/requests_cache/models/response.py @@ -48,7 +48,10 @@ class CachedResponse(Response): @classmethod def from_response( - cls, original_response: Union[Response, 'CachedResponse'], expires: datetime = None, **kwargs + cls, + original_response: Union[Response, 'CachedResponse'], + expires: datetime = None, + **kwargs, ): """Create a CachedResponse based on an original Response or another CachedResponse object""" if isinstance(original_response, CachedResponse): diff --git a/requests_cache/serializers/cattrs.py b/requests_cache/serializers/cattrs.py index b28acd0..7b483d7 100644 --- a/requests_cache/serializers/cattrs.py +++ b/requests_cache/serializers/cattrs.py @@ -59,7 +59,9 @@ def init_converter(factory: Callable[..., GenConverter] = None): converter.register_unstructure_hook(RequestsCookieJar, lambda obj: dict(obj.items())) # type: ignore converter.register_structure_hook(RequestsCookieJar, lambda obj, cls: cookiejar_from_dict(obj)) converter.register_unstructure_hook(CaseInsensitiveDict, dict) - converter.register_structure_hook(CaseInsensitiveDict, lambda obj, cls: CaseInsensitiveDict(obj)) + converter.register_structure_hook( + CaseInsensitiveDict, lambda obj, cls: CaseInsensitiveDict(obj) + ) converter.register_unstructure_hook(HTTPHeaderDict, dict) converter.register_structure_hook(HTTPHeaderDict, lambda obj, cls: HTTPHeaderDict(obj)) diff --git a/requests_cache/serializers/preconf.py b/requests_cache/serializers/preconf.py index dce7e60..67d1453 100644 --- a/requests_cache/serializers/preconf.py +++ b/requests_cache/serializers/preconf.py @@ -23,7 +23,9 @@ from .._utils import get_placeholder_class from .cattrs import CattrStage from .pipeline import SerializerPipeline, Stage -base_stage = CattrStage() #: Base stage for all serializer pipelines (or standalone dict serializer) +base_stage = ( + CattrStage() +) #: Base stage for all serializer pipelines (or standalone dict serializer) dict_serializer = base_stage #: Partial serializer that unstructures responses into dicts bson_preconf_stage = CattrStage(bson_preconf.make_converter) #: Pre-serialization steps for BSON json_preconf_stage = CattrStage(json_preconf.make_converter) #: Pre-serialization steps for JSON @@ -47,7 +49,9 @@ try: """ return Stage(Signer(secret_key=secret_key, salt=salt), dumps='sign', loads='unsign') - def safe_pickle_serializer(secret_key=None, salt='requests-cache', **kwargs) -> SerializerPipeline: + def safe_pickle_serializer( + secret_key=None, salt='requests-cache', **kwargs + ) -> SerializerPipeline: """Create a serializer that uses ``pickle`` + ``itsdangerous`` to add a signature to responses on write, and validate that signature with a secret key on read. """ diff --git a/requests_cache/session.py b/requests_cache/session.py index 578e7c8..26c06e5 100644 --- a/requests_cache/session.py +++ b/requests_cache/session.py @@ -198,7 +198,11 @@ class CacheMixin(MIXIN_BASE): return set_response_defaults(response, actions.cache_key) def _resend( - self, request: PreparedRequest, actions: CacheActions, cached_response: CachedResponse, **kwargs + self, + request: PreparedRequest, + actions: CacheActions, + cached_response: CachedResponse, + **kwargs, ) -> AnyResponse: """Attempt to resend the request and cache the new response. If the request fails, delete the stale cache item. @@ -211,7 +215,11 @@ class CacheMixin(MIXIN_BASE): raise def _resend_and_ignore( - self, request: PreparedRequest, actions: CacheActions, cached_response: CachedResponse, **kwargs + self, + request: PreparedRequest, + actions: CacheActions, + cached_response: CachedResponse, + **kwargs, ) -> AnyResponse: """Attempt to resend the request and cache the new response. If there are any errors, ignore them and and return the stale cache item. @@ -223,7 +231,9 @@ class CacheMixin(MIXIN_BASE): response.raise_for_status() return response except Exception: - logger.warning(f'Request for URL {request.url} failed; using cached response', exc_info=True) + logger.warning( + f'Request for URL {request.url} failed; using cached response', exc_info=True + ) return cached_response def _update_revalidated_response( diff --git a/tests/benchmark_serializers.py b/tests/benchmark_serializers.py index 4b786fd..96950a9 100644 --- a/tests/benchmark_serializers.py +++ b/tests/benchmark_serializers.py @@ -42,7 +42,12 @@ except ImportError: sys.path.insert(0, os.path.abspath('..')) from requests_cache import CachedSession -from requests_cache.serializers import CattrStage, bson_serializer, json_serializer, pickle_serializer +from requests_cache.serializers import ( + CattrStage, + bson_serializer, + json_serializer, + pickle_serializer, +) ITERATIONS = 10000 diff --git a/tests/integration/base_cache_test.py b/tests/integration/base_cache_test.py index e35af90..800cebd 100644 --- a/tests/integration/base_cache_test.py +++ b/tests/integration/base_cache_test.py @@ -200,7 +200,9 @@ class BaseCacheTest: response = session.get(httpbin('cache')) assert response.from_cache == expected_from_cache - @pytest.mark.parametrize('validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}]) + @pytest.mark.parametrize( + 'validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}] + ) @pytest.mark.parametrize('cache_headers', [{'Cache-Control': 'max-age=0'}, {'Expires': '0'}]) def test_conditional_request__max_age_0(self, cache_headers, validator_headers): """With both max-age=0 and a validator, the response should be saved and revalidated on next @@ -220,7 +222,9 @@ class BaseCacheTest: assert response.from_cache is True assert response.is_expired is True - @pytest.mark.parametrize('validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}]) + @pytest.mark.parametrize( + 'validator_headers', [{'ETag': ETAG}, {'Last-Modified': LAST_MODIFIED}] + ) @pytest.mark.parametrize('cache_headers', [{'Cache-Control': 'max-age=0'}]) def test_conditional_request_refreshenes_expire_date(self, cache_headers, validator_headers): """Test that revalidation attempt with 304 responses causes stale entry to become fresh again considering @@ -235,7 +239,9 @@ class BaseCacheTest: # Add different Response Header to mocked return value of the session.send() function. updated_response_headers = {**first_response_headers, 'Cache-Control': 'max-age=60'} with patch.object( - Session, 'send', return_value=MagicMock(status_code=304, headers=updated_response_headers) + Session, + 'send', + return_value=MagicMock(status_code=304, headers=updated_response_headers), ): response = session.get(url, params=first_response_headers) assert response.from_cache is True diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py index 77ab3ed..d32b0ea 100644 --- a/tests/integration/test_mongodb.py +++ b/tests/integration/test_mongodb.py @@ -4,7 +4,13 @@ import pytest from pymongo import MongoClient from requests_cache._utils import get_valid_kwargs -from requests_cache.backends import GridFSCache, GridFSPickleDict, MongoCache, MongoDict, MongoPickleDict +from requests_cache.backends import ( + GridFSCache, + GridFSPickleDict, + MongoCache, + MongoDict, + MongoPickleDict, +) from tests.conftest import fail_if_no_connection from tests.integration.base_cache_test import BaseCacheTest from tests.integration.base_storage_test import BaseStorageTest diff --git a/tests/unit/test_cache_control.py b/tests/unit/test_cache_control.py index 54182e3..d9140d8 100644 --- a/tests/unit/test_cache_control.py +++ b/tests/unit/test_cache_control.py @@ -177,7 +177,9 @@ def test_update_from_cached_response(response_headers, expected_validation_heade cache_key='key', request=MagicMock(url='https://img.site.com/base/img.jpg'), ) - cached_response = CachedResponse(headers=response_headers, expires=datetime.now() - timedelta(1)) + cached_response = CachedResponse( + headers=response_headers, expires=datetime.now() - timedelta(1) + ) actions.update_from_cached_response(cached_response) assert actions.validation_headers == expected_validation_headers @@ -232,7 +234,9 @@ def test_update_from_response(headers, expected_expiration): def test_update_from_response__ignored(): url = 'https://img.site.com/base/img.jpg' - actions = CacheActions.from_request(cache_key='key', request=MagicMock(url=url), cache_control=False) + actions = CacheActions.from_request( + cache_key='key', request=MagicMock(url=url), cache_control=False + ) actions.update_from_response(MagicMock(url=url, headers={'Cache-Control': 'max-age=5'})) assert actions.expire_after is None @@ -246,7 +250,9 @@ def test_update_from_response__revalidate(mock_datetime, cache_headers, validato """ url = 'https://img.site.com/base/img.jpg' headers = {**cache_headers, **validator_headers} - actions = CacheActions.from_request(cache_key='key', request=MagicMock(url=url), cache_control=True) + actions = CacheActions.from_request( + cache_key='key', request=MagicMock(url=url), cache_control=True + ) actions.update_from_response(MagicMock(url=url, headers=headers)) assert actions.expires == mock_datetime.utcnow() assert actions.skip_write is False diff --git a/tests/unit/test_cache_keys.py b/tests/unit/test_cache_keys.py index c685d2b..f2d8c26 100644 --- a/tests/unit/test_cache_keys.py +++ b/tests/unit/test_cache_keys.py @@ -41,7 +41,9 @@ def test_normalize_request__json_body(): data=b'{"param_1": "value_1", "param_2": "value_2"}', headers={'Content-Type': 'application/json'}, ) - assert normalize_request(request, ignored_parameters=['param_2']).body == b'{"param_1": "value_1"}' + assert ( + normalize_request(request, ignored_parameters=['param_2']).body == b'{"param_1": "value_1"}' + ) def test_normalize_request__invalid_json_body(): diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py index d225e57..41883c0 100644 --- a/tests/unit/test_session.py +++ b/tests/unit/test_session.py @@ -290,7 +290,10 @@ def test_raw_data(method, mock_session): """POST and PUT requests with different data (raw) should be cached under different keys""" assert mock_session.request(method, MOCKED_URL, data='raw data').from_cache is False assert mock_session.request(method, MOCKED_URL, data='raw data').from_cache is True - assert mock_session.request(method, MOCKED_URL, data='{"data": "new raw data"}').from_cache is False + assert ( + mock_session.request(method, MOCKED_URL, data='{"data": "new raw data"}').from_cache + is False + ) @pytest.mark.parametrize('field', ['params', 'data', 'json']) @@ -455,7 +458,12 @@ def test_response_defaults(mock_session): def test_match_headers(mock_session): """With match_headers, requests with different headers should have different cache keys""" mock_session.cache.match_headers = True - headers_list = [{'Accept': 'application/json'}, {'Accept': 'text/xml'}, {'Accept': 'custom'}, None] + headers_list = [ + {'Accept': 'application/json'}, + {'Accept': 'text/xml'}, + {'Accept': 'custom'}, + None, + ] for headers in headers_list: assert mock_session.get(MOCKED_URL, headers=headers).from_cache is False assert mock_session.get(MOCKED_URL, headers=headers).from_cache is True @@ -587,7 +595,9 @@ def test_do_not_cache(mock_session): (304, True, True, True), ], ) -def test_304_not_modified(response_code, cache_hit, cache_expired, expected_from_cache, mock_session): +def test_304_not_modified( + response_code, cache_hit, cache_expired, expected_from_cache, mock_session +): url = f'{MOCKED_URL}/endpoint_2' if cache_expired: mock_session.expire_after = datetime.now() - timedelta(1) @@ -614,7 +624,9 @@ def test_url_allowlist(mock_session): def test_remove_expired_responses(mock_session): unexpired_url = f'{MOCKED_URL}?x=1' - mock_session.mock_adapter.register_uri('GET', unexpired_url, status_code=200, text='mock response') + mock_session.mock_adapter.register_uri( + 'GET', unexpired_url, status_code=200, text='mock response' + ) mock_session.expire_after = timedelta(seconds=0.2) mock_session.get(MOCKED_URL) mock_session.get(MOCKED_URL_JSON) -- cgit v1.2.1