summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordan Cook <jordan.cook@pioneer.com>2021-04-21 20:18:02 -0500
committerJordan Cook <jordan.cook@pioneer.com>2021-04-22 00:21:49 -0500
commitc29569892f3d91c3ad6686a5cb7a0a1e2d39c5ec (patch)
tree06c00d229dd42e996fc1b02ffdf9a9c0a5a025e3
parentdc204a8d7df9c627fcfb2b098a28f2f27fbb31c8 (diff)
downloadrequests-cache-c29569892f3d91c3ad6686a5cb7a0a1e2d39c5ec.tar.gz
Reorganize backend integration tests and add some more thorough tests
-rwxr-xr-xruntests.sh4
-rw-r--r--tests/conftest.py12
-rw-r--r--tests/integration/test_backends.py151
-rw-r--r--tests/integration/test_dynamodb.py16
-rw-r--r--tests/integration/test_filesystem.py24
-rw-r--r--tests/integration/test_gridfs.py11
-rw-r--r--tests/integration/test_sqlite.py92
-rw-r--r--tests/integration/test_thread_safety.py7
8 files changed, 168 insertions, 149 deletions
diff --git a/runtests.sh b/runtests.sh
index f3be637..e4cd976 100755
--- a/runtests.sh
+++ b/runtests.sh
@@ -4,5 +4,5 @@ COVERAGE_ARGS='--cov --cov-report=term --cov-report=html'
export STRESS_TEST_MULTIPLIER=2
# Run unit tests first (and with multiprocessing) to fail quickly if there are issues
-pytest tests/unit --numprocesses=auto $COVERAGE_ARGS
-pytest tests/integration --cov-append $COVERAGE_ARGS
+pytest tests/unit -x --numprocesses=auto $COVERAGE_ARGS
+pytest tests/integration -x --cov-append $COVERAGE_ARGS
diff --git a/tests/conftest.py b/tests/conftest.py
index 577cb78..a918bc7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -22,6 +22,13 @@ from timeout_decorator import timeout
import requests_cache
from requests_cache.session import ALL_METHODS, CachedSession
+CACHE_NAME = 'pytest_cache'
+
+# Allow running longer stress tests with an environment variable
+STRESS_TEST_MULTIPLIER = int(os.getenv('STRESS_TEST_MULTIPLIER', '1'))
+N_THREADS = 2 * STRESS_TEST_MULTIPLIER
+N_ITERATIONS = 4 * STRESS_TEST_MULTIPLIER
+
MOCKED_URL = 'http+mock://requests-cache.com/text'
MOCKED_URL_HTTPS = 'https+mock://requests-cache.com/text'
MOCKED_URL_JSON = 'http+mock://requests-cache.com/json'
@@ -36,9 +43,10 @@ AWS_OPTIONS = {
'aws_secret_access_key': 'placeholder',
}
-# Configure logging to show debug output when tests fail (or with pytest -s)
+
+# Configure logging to show log output when tests fail (or with pytest -s)
basicConfig(level='INFO')
-getLogger('requests_cache').setLevel('DEBUG')
+# getLogger('requests_cache').setLevel('DEBUG')
logger = getLogger(__name__)
diff --git a/tests/integration/test_backends.py b/tests/integration/test_backends.py
index ca9c6e2..b24a653 100644
--- a/tests/integration/test_backends.py
+++ b/tests/integration/test_backends.py
@@ -1,100 +1,127 @@
-# TODO: Refactor with pytest fixtures
import pytest
-from typing import Type
+from threading import Thread
+from time import time
+from typing import Dict, Type
-from requests_cache.backends.base import BaseStorage
+from requests_cache.backends.base import BaseCache, BaseStorage
+from requests_cache.session import CachedSession
+from tests.conftest import AWS_OPTIONS, CACHE_NAME, N_ITERATIONS, N_THREADS, httpbin
class BaseStorageTestCase:
- """Base class for testing backends"""
+ """Base class for testing cache storage dict-like interfaces"""
def __init__(
self,
*args,
storage_class: Type[BaseStorage],
+ init_kwargs: Dict = None,
picklable: bool = False,
**kwargs,
):
+ super().__init__(*args, **kwargs)
self.storage_class = storage_class
+ self.init_kwargs = init_kwargs or {}
self.picklable = picklable
- super().__init__(*args, **kwargs)
+ self.num_instances = 10 # Max number of cache instances to test
- NAMESPACE = 'pytest-temp'
- TABLES = ['table%s' % i for i in range(5)]
+ def init_cache(self, index=0, clear=True, **kwargs):
+ kwargs['suppress_warnings'] = True
+ cache = self.storage_class(CACHE_NAME, f'table_{index}', **self.init_kwargs, **kwargs)
+ if clear:
+ cache.clear()
+ return cache
def tearDown(self):
- for table in self.TABLES:
- self.storage_class(self.NAMESPACE, table).clear()
+ for i in range(self.num_instances):
+ self.init_cache(i, clear=True)
super().tearDown()
- def test_set_get(self):
- d1 = self.storage_class(self.NAMESPACE, self.TABLES[0])
- d2 = self.storage_class(self.NAMESPACE, self.TABLES[1])
- d3 = self.storage_class(self.NAMESPACE, self.TABLES[2])
- d1['key_1'] = 1
- d2['key_2'] = 2
- d3['key_3'] = 3
- assert list(d1.keys()) == ['key_1']
- assert list(d2.keys()) == ['key_2']
- assert list(d3.keys()) == ['key_3']
-
- with pytest.raises(KeyError):
- d1[4]
-
- def test_str(self):
- d = self.storage_class(self.NAMESPACE)
- d.clear()
- d['key_1'] = 'value_1'
- d['key_2'] = 'value_2'
- assert dict(d) == {'key_1': 'value_1', 'key_2': 'value_2'}
+ def test_basic_methods(self):
+ """Test basic dict methods with multiple cache instances:
+ ``getitem, setitem, delitem, len, contains``
+ """
+ caches = [self.init_cache(i) for i in range(10)]
+ for i in range(self.num_instances):
+ caches[i][f'key_{i}'] = f'value_{i}'
+ caches[i][f'key_{i+1}'] = f'value_{i+1}'
+
+ for i in range(self.num_instances):
+ cache = caches[i]
+ cache[f'key_{i}'] == f'value_{i}'
+ assert len(cache) == 2
+ assert f'key_{i}' in cache and f'key_{i+1}' in cache
+
+ del cache[f'key_{i}']
+ assert f'key_{i}' not in cache
+
+ def test_iterable_methods(self):
+ """Test iterable dict methods with multiple cache instances:
+ ``iter, keys, values, items``
+ """
+ caches = [self.init_cache(i) for i in range(self.num_instances)]
+ for i in range(self.num_instances):
+ caches[i][f'key_{i}'] = f'value_{i}'
+
+ for i in range(self.num_instances):
+ cache = caches[i]
+ assert list(cache) == [f'key_{i}']
+ assert list(cache.keys()) == [f'key_{i}']
+ assert list(cache.values()) == [f'value_{i}']
+ assert list(cache.items()) == [(f'key_{i}', f'value_{i}')]
+ assert dict(cache) == {f'key_{i}': f'value_{i}'}
def test_del(self):
- d = self.storage_class(self.NAMESPACE)
- d.clear()
+ """Some more tests to ensure ``delitem`` deletes only the expected items"""
+ cache = self.init_cache()
+ for i in range(20):
+ cache[f'key_{i}'] = f'value_{i}'
for i in range(5):
- d[f'key_{i}'] = i
- del d['key_0']
- del d['key_1']
- del d['key_2']
- assert set(d.keys()) == {f'key_{i}' for i in range(3, 5)}
- assert set(d.values()) == set(range(3, 5))
+ del cache[f'key_{i}']
+
+ assert len(cache) == 15
+ assert set(cache.keys()) == {f'key_{i}' for i in range(5, 20)}
+ assert set(cache.values()) == {f'value_{i}' for i in range(5, 20)}
+ def test_keyerrors(self):
+ """Accessing or deleting a deleted item should raise a KeyError"""
+ cache = self.init_cache()
+ cache['key'] = 'value'
+ del cache['key']
+
+ with pytest.raises(KeyError):
+ del cache['key']
with pytest.raises(KeyError):
- del d['key_0']
+ cache['key']
def test_picklable_dict(self):
if self.picklable:
- d = self.storage_class(self.NAMESPACE)
- d['key_1'] = Picklable()
- d = self.storage_class(self.NAMESPACE)
- assert d['key_1'].a == 1
- assert d['key_1'].b == 2
+ cache = self.init_cache()
+ cache['key_1'] = Picklable()
+ assert cache['key_1'].attr_1 == 'value_1'
+ assert cache['key_1'].attr_2 == 'value_2'
def test_clear_and_work_again(self):
- d1 = self.storage_class(self.NAMESPACE)
- d2 = self.storage_class(self.NAMESPACE, connection=getattr(d1, 'connection', None))
- d1.clear()
- d2.clear()
+ cache_1 = self.init_cache()
+ cache_2 = self.init_cache(connection=getattr(cache_1, 'connection', None))
for i in range(5):
- d1[i] = i
- d2[i] = i
+ cache_1[i] = i
+ cache_2[i] = i
- assert len(d1) == len(d2) == 5
- d1.clear()
- d2.clear()
- assert len(d1) == len(d2) == 0
+ assert len(cache_1) == len(cache_2) == 5
+ cache_1.clear()
+ cache_2.clear()
+ assert len(cache_1) == len(cache_2) == 0
def test_same_settings(self):
- d1 = self.storage_class(self.NAMESPACE)
- d2 = self.storage_class(self.NAMESPACE, connection=getattr(d1, 'connection', None))
- d1.clear()
- d2.clear()
- d1['key_1'] = 1
- d2['key_2'] = 2
- assert d1 == d2
+ cache_1 = self.init_cache()
+ cache_2 = self.init_cache(connection=getattr(cache_1, 'connection', None))
+ cache_1['key_1'] = 1
+ cache_2['key_2'] = 2
+ assert cache_1 == cache_2
class Picklable:
- a = 1
- b = 2
+ attr_1 = 'value_1'
+ attr_2 = 'value_2'
diff --git a/tests/integration/test_dynamodb.py b/tests/integration/test_dynamodb.py
index 3a4bd23..c78baad 100644
--- a/tests/integration/test_dynamodb.py
+++ b/tests/integration/test_dynamodb.py
@@ -4,7 +4,7 @@ from unittest.mock import patch
from requests_cache.backends import DynamoDbDict
from tests.conftest import AWS_OPTIONS, fail_if_no_connection
-from tests.integration.test_backends import BaseStorageTestCase
+from tests.integration.test_backends import CACHE_NAME, BaseStorageTestCase
# Run this test module last, since the DynamoDB container takes the longest to initialize
pytestmark = pytest.mark.order(-1)
@@ -20,16 +20,18 @@ def ensure_connection():
client.describe_limits()
-class DynamoDbDictWrapper(DynamoDbDict):
- def __init__(self, namespace, collection_name='dynamodb_dict_data', **options):
- super().__init__(namespace, collection_name, **options, **AWS_OPTIONS)
-
-
class DynamoDbTestCase(BaseStorageTestCase, unittest.TestCase):
+ def init_cache(self, index=0, clear=True, **kwargs):
+ kwargs['suppress_warnings'] = True
+ cache = self.storage_class(CACHE_NAME, f'table_{index}', **kwargs, **AWS_OPTIONS)
+ if clear:
+ cache.clear()
+ return cache
+
def __init__(self, *args, **kwargs):
super().__init__(
*args,
- storage_class=DynamoDbDictWrapper,
+ storage_class=DynamoDbDict,
picklable=True,
**kwargs,
)
diff --git a/tests/integration/test_filesystem.py b/tests/integration/test_filesystem.py
index 72301c9..4224de0 100644
--- a/tests/integration/test_filesystem.py
+++ b/tests/integration/test_filesystem.py
@@ -1,32 +1,28 @@
-import pytest
import unittest
from os.path import isfile
from shutil import rmtree
from requests_cache.backends import FileDict
-from tests.integration.test_backends import BaseStorageTestCase
+from tests.integration.test_backends import CACHE_NAME, BaseStorageTestCase
-class FilesystemTestCase(BaseStorageTestCase, unittest.TestCase):
+class FileDictTestCase(BaseStorageTestCase, unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, storage_class=FileDict, picklable=True, **kwargs)
def tearDown(self):
- rmtree(self.NAMESPACE)
+ rmtree(CACHE_NAME, ignore_errors=True)
- def test_set_get(self):
- cache = self.storage_class(self.NAMESPACE)
- cache['key'] = 'value'
- assert list(cache.keys()) == ['key']
- assert list(cache.values()) == ['value']
-
- with pytest.raises(KeyError):
- cache[4]
+ def init_cache(self, index=0, **kwargs):
+ cache = self.storage_class(f'{CACHE_NAME}_{index}', use_temp=True, **kwargs)
+ cache.clear()
+ return cache
def test_paths(self):
- cache = self.storage_class(self.NAMESPACE)
- for i in range(10):
+ cache = self.storage_class(CACHE_NAME)
+ for i in range(self.num_instances):
cache[f'key_{i}'] = f'value_{i}'
+ assert len(list(cache.paths())) == self.num_instances
for path in cache.paths():
assert isfile(path)
diff --git a/tests/integration/test_gridfs.py b/tests/integration/test_gridfs.py
index aae9948..5931abe 100644
--- a/tests/integration/test_gridfs.py
+++ b/tests/integration/test_gridfs.py
@@ -22,16 +22,7 @@ def ensure_connection():
class GridFSPickleDictTestCase(BaseStorageTestCase, unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, storage_class=GridFSPickleDict, picklable=True, **kwargs)
-
- def test_set_get(self):
- """Override base test to test a single collecton instead of multiple"""
- d1 = self.storage_class(self.NAMESPACE, self.TABLES[0])
- d1[1] = 1
- d1[2] = 2
- assert list(d1.keys()) == [1, 2]
-
- with pytest.raises(KeyError):
- d1[4]
+ self.num_instances = 1 # Only test a single collecton instead of multiple
@patch('requests_cache.backends.gridfs.GridFS')
diff --git a/tests/integration/test_sqlite.py b/tests/integration/test_sqlite.py
index a6fd50d..1924909 100644
--- a/tests/integration/test_sqlite.py
+++ b/tests/integration/test_sqlite.py
@@ -4,78 +4,78 @@ from threading import Thread
from unittest.mock import patch
from requests_cache.backends.sqlite import DbDict, DbPickleDict
-from tests.integration.test_backends import BaseStorageTestCase
+from tests.integration.test_backends import CACHE_NAME, BaseStorageTestCase
class SQLiteTestCase(BaseStorageTestCase):
def tearDown(self):
try:
- os.unlink(self.NAMESPACE)
+ os.unlink(CACHE_NAME)
except Exception:
pass
def test_bulk_commit(self):
- d = self.storage_class(self.NAMESPACE, self.TABLES[0])
- with d.bulk_commit():
+ cache = self.init_cache()
+ with cache.bulk_commit():
pass
- d.clear()
+
n = 1000
- with d.bulk_commit():
+ with cache.bulk_commit():
for i in range(n):
- d[i] = i
- assert list(d.keys()) == list(range(n))
+ cache[i] = i
+ assert list(cache.keys()) == list(range(n))
def test_switch_commit(self):
- d = self.storage_class(self.NAMESPACE)
- d.clear()
- d[1] = 1
- d = self.storage_class(self.NAMESPACE)
- assert 1 in d
+ cache = self.init_cache()
+ cache.clear()
+ cache['key_1'] = 'value_1'
+ cache = self.init_cache(clear=False)
+ assert 'key_1' in cache
- d._can_commit = False
- d[2] = 2
+ cache._can_commit = False
+ cache['key_2'] = 'value_2'
- d = self.storage_class(self.NAMESPACE)
- assert 2 not in d
- assert d._can_commit is True
+ cache = self.init_cache(clear=False)
+ assert 2 not in cache
+ assert cache._can_commit is True
def test_fast_save(self):
- d1 = self.storage_class(self.NAMESPACE, fast_save=True)
- d2 = self.storage_class(self.NAMESPACE, self.TABLES[1], fast_save=True)
- d1.clear()
+ cache_1 = self.init_cache(1, fast_save=True)
+ cache_2 = self.init_cache(2, fast_save=True)
+
n = 1000
for i in range(n):
- d1[i] = i
- d2[i * 2] = i
- # HACK if we will not sort, fast save can produce different order of records
- assert sorted(d1.keys()) == list(range(n))
- assert sorted(d2.values()) == list(range(n))
+ cache_1[i] = i
+ cache_2[i * 2] = i
+
+ assert set(cache_1.keys()) == set(range(n))
+ assert set(cache_2.values()) == set(range(n))
def test_usage_with_threads(self):
- def do_test_for(d, n_threads=5):
- d.clear()
+ def do_test_for(cache, n_threads=5):
+ cache.clear()
def do_inserts(values):
for v in values:
- d[v] = v
+ cache[v] = v
def values(x, n):
return [i * x for i in range(n)]
threads = [Thread(target=do_inserts, args=(values(i, n_threads),)) for i in range(n_threads)]
- for t in threads:
- t.start()
- for t in threads:
- t.join()
+ for thread in threads:
+ thread.start()
+ for thread in threads:
+ thread.join()
for i in range(n_threads):
for x in values(i, n_threads):
- assert d[x] == x
+ assert cache[x] == x
- do_test_for(self.storage_class(self.NAMESPACE))
- do_test_for(self.storage_class(self.NAMESPACE, fast_save=True), 20)
- do_test_for(self.storage_class(self.NAMESPACE, fast_save=True))
- do_test_for(self.storage_class(self.NAMESPACE, self.TABLES[1], fast_save=True))
+ do_test_for(self.init_cache())
+ do_test_for(self.init_cache(fast_save=True), 20)
+ do_test_for(self.init_cache(fast_save=True))
+ do_test_for(self.init_cache('table_2', fast_save=True))
def test_noop(self):
def do_noop_bulk(d):
@@ -83,14 +83,14 @@ class SQLiteTestCase(BaseStorageTestCase):
pass
del d
- d = self.storage_class(self.NAMESPACE)
- t = Thread(target=do_noop_bulk, args=(d,))
- t.start()
- t.join()
+ cache = self.init_cache()
+ thread = Thread(target=do_noop_bulk, args=(cache,))
+ thread.start()
+ thread.join()
# make sure connection is not closed by the thread
- d[0] = 0
- assert str(d) == "{0: 0}"
+ cache[0] = 0
+ assert str(cache) == "{0: 0}"
class DbDictTestCase(SQLiteTestCase, unittest.TestCase):
@@ -106,5 +106,5 @@ class DbPickleDictTestCase(SQLiteTestCase, unittest.TestCase):
@patch('requests_cache.backends.sqlite.sqlite3')
def test_connection_kwargs(mock_sqlite):
"""A spot check to make sure optional connection kwargs gets passed to connection"""
- DbDict('test', timeout=0.5, invalid_kwarg='???')
- mock_sqlite.connect.assert_called_with('test', timeout=0.5)
+ cache = DbDict('test', timeout=0.5, invalid_kwarg='???')
+ mock_sqlite.connect.assert_called_with(cache.db_path, timeout=0.5)
diff --git a/tests/integration/test_thread_safety.py b/tests/integration/test_thread_safety.py
index a62133a..b84235f 100644
--- a/tests/integration/test_thread_safety.py
+++ b/tests/integration/test_thread_safety.py
@@ -5,12 +5,7 @@ from time import time
from requests_cache.backends import BACKEND_CLASSES
from requests_cache.session import CachedSession
-from tests.conftest import AWS_OPTIONS, httpbin
-
-# Allow running longer stress tests with an environment variable
-MULTIPLIER = int(getenv('STRESS_TEST_MULTIPLIER', '1'))
-N_THREADS = 2 * MULTIPLIER
-N_ITERATIONS = 4 * MULTIPLIER
+from tests.conftest import AWS_OPTIONS, N_THREADS, N_ITERATIONS, httpbin
@pytest.mark.parametrize('iteration', range(N_ITERATIONS))