summaryrefslogtreecommitdiff
path: root/tests/conftest.py
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2021-08-29 01:02:10 +0300
committerAlex Grönholm <alex.gronholm@nextday.fi>2021-08-29 01:38:09 +0300
commitb4d4724e95583b9f075a814319c3d5e8e5514a3e (patch)
treedd77fb25ded2ceb5a4f29221de69f19f469cfac0 /tests/conftest.py
parentcf77aec5326e42af7b89e4ab2712daf9694ebad9 (diff)
downloadapscheduler-b4d4724e95583b9f075a814319c3d5e8e5514a3e.tar.gz
Overhauled the data store and event dispatch systems
Diffstat (limited to 'tests/conftest.py')
-rw-r--r--tests/conftest.py200
1 files changed, 121 insertions, 79 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index bf6b975..c242b8d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,10 +1,11 @@
import sys
-from contextlib import AsyncExitStack, ExitStack
-from functools import partial
+from contextlib import asynccontextmanager, contextmanager
+from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator, Optional
import pytest
-from anyio import start_blocking_portal
-from apscheduler.datastores.memory import MemoryDataStore
+from apscheduler.abc import AsyncDataStore, DataStore, Serializer
+from apscheduler.adapters import AsyncDataStoreAdapter
+from apscheduler.datastores.sync.memory import MemoryDataStore
from apscheduler.serializers.cbor import CBORSerializer
from apscheduler.serializers.json import JSONSerializer
from apscheduler.serializers.pickle import PickleSerializer
@@ -14,94 +15,135 @@ if sys.version_info >= (3, 9):
else:
from backports.zoneinfo import ZoneInfo
-try:
- from apscheduler.datastores.mongodb import MongoDBDataStore
- from motor.motor_asyncio import AsyncIOMotorClient
-except ImportError:
- MongoDBDataStore = None
-
-try:
- from apscheduler.datastores.postgresql import PostgresqlDataStore
- from asyncpg import create_pool
-except ImportError:
- PostgresqlDataStore = None
-
-store_params = [
- pytest.param(MemoryDataStore, id='memory'),
- pytest.param(PostgresqlDataStore, id='postgresql'),
- pytest.param(MongoDBDataStore, id='mongodb')
-]
-
@pytest.fixture(scope='session')
-def timezone():
+def timezone() -> ZoneInfo:
return ZoneInfo('Europe/Berlin')
-@pytest.fixture(params=[None, PickleSerializer, CBORSerializer, JSONSerializer],
- ids=['none', 'pickle', 'cbor', 'json'])
-def serializer(request):
+@pytest.fixture(params=[
+ pytest.param(None, id='none'),
+ pytest.param(PickleSerializer, id='pickle'),
+ pytest.param(CBORSerializer, id='cbor'),
+ pytest.param(JSONSerializer, id='json')
+])
+def serializer(request) -> Optional[Serializer]:
return request.param() if request.param else None
@pytest.fixture
-def anyio_backend():
+def anyio_backend() -> 'str':
return 'asyncio'
-@pytest.fixture(params=store_params)
-async def store(request):
- async with AsyncExitStack() as stack:
- if request.param is PostgresqlDataStore:
- if PostgresqlDataStore is None:
- pytest.skip('asyncpg not installed')
-
- pool = await create_pool('postgresql://postgres:secret@localhost/testdb',
- min_size=1, max_size=2)
- await stack.enter_async_context(pool)
- store = PostgresqlDataStore(pool, start_from_scratch=True)
- elif request.param is MongoDBDataStore:
- if MongoDBDataStore is None:
- pytest.skip('motor not installed')
-
- client = AsyncIOMotorClient(tz_aware=True)
- stack.push(lambda *args: client.close())
- store = MongoDBDataStore(client, start_from_scratch=True)
- else:
- store = MemoryDataStore()
-
- await stack.enter_async_context(store)
+@contextmanager
+def setup_mongodb_store() -> Generator[DataStore, None, None]:
+ from apscheduler.datastores.sync.mongodb import MongoDBDataStore
+ from pymongo import MongoClient
+ from pymongo.errors import ConnectionFailure
+
+ client = MongoClient(tz_aware=True, serverSelectionTimeoutMS=1000)
+ try:
+ client.admin.command('ismaster')
+ except ConnectionFailure:
+ pytest.skip('MongoDB server not available')
+ raise
+
+ store = MongoDBDataStore(client, start_from_scratch=True)
+ with client, store:
yield store
-@pytest.fixture
-def portal():
- with start_blocking_portal() as portal:
- yield portal
-
-
-@pytest.fixture(params=store_params)
-def sync_store(request, portal):
- with ExitStack() as stack:
- if request.param is PostgresqlDataStore:
- if PostgresqlDataStore is None:
- pytest.skip('asyncpg not installed')
-
- pool = portal.call(
- partial(create_pool, 'postgresql://postgres:secret@localhost/testdb',
- min_size=1, max_size=2)
- )
- stack.enter_context(portal.wrap_async_context_manager(pool))
- store = PostgresqlDataStore(pool, start_from_scratch=True)
- elif request.param is MongoDBDataStore:
- if MongoDBDataStore is None:
- pytest.skip('motor not installed')
-
- client = portal.call(partial(AsyncIOMotorClient, tz_aware=True))
- stack.push(lambda *args: portal.call(client.close))
- store = MongoDBDataStore(client, start_from_scratch=True)
- else:
- store = MemoryDataStore()
-
- stack.enter_context(portal.wrap_async_context_manager(store))
+@contextmanager
+def setup_memory_store() -> Generator[DataStore, None, None]:
+ with MemoryDataStore() as store:
yield store
+
+
+@asynccontextmanager
+async def setup_postgresql_store() -> AsyncGenerator[AsyncDataStore, None]:
+ try:
+ from apscheduler.datastores.async_.postgresql import PostgresqlDataStore
+ from asyncpg import create_pool
+ except ModuleNotFoundError:
+ pytest.skip('asyncpg not installed')
+ raise
+
+ pool = await create_pool('postgresql://postgres:secret@localhost/testdb',
+ min_size=1, max_size=2)
+ store = PostgresqlDataStore(pool, start_from_scratch=True)
+ async with pool, store:
+ yield store
+
+
+@contextmanager
+def setup_sqlalchemy_store() -> Generator[DataStore, None, None]:
+ try:
+ from apscheduler.datastores.sync.sqlalchemy import SQLAlchemyDataStore
+ from sqlalchemy import create_engine
+ except ModuleNotFoundError:
+ pytest.skip('sqlalchemy not installed')
+ raise
+
+ engine = create_engine('postgresql+psycopg2://postgres:secret@localhost/testdb', future=True)
+ store = SQLAlchemyDataStore(engine, start_from_scratch=True)
+ try:
+ with store:
+ yield store
+ finally:
+ engine.dispose()
+
+
+@asynccontextmanager
+async def setup_async_sqlalchemy_store() -> AsyncGenerator[AsyncDataStore, None]:
+ try:
+ from apscheduler.datastores.async_.sqlalchemy import SQLAlchemyDataStore
+ from sqlalchemy.ext.asyncio import create_async_engine
+ except ModuleNotFoundError:
+ pytest.skip('sqlalchemy not installed')
+ raise
+
+ engine = create_async_engine('postgresql+asyncpg://postgres:secret@localhost/testdb',
+ future=True)
+ store = SQLAlchemyDataStore(engine, start_from_scratch=True)
+ try:
+ async with store:
+ yield store
+ finally:
+ await engine.dispose()
+
+
+@pytest.fixture(params=[
+ pytest.param(setup_memory_store, id='memory'),
+ pytest.param(setup_mongodb_store, id='mongodb')
+])
+def setup_sync_store(request) -> ContextManager[DataStore]:
+ return request.param
+
+
+@pytest.fixture(params=[
+ pytest.param(setup_postgresql_store, id='postgresql'),
+ pytest.param(setup_async_sqlalchemy_store, id='async_sqlalchemy')
+])
+def setup_async_store(request) -> AsyncContextManager[AsyncDataStore]:
+ return request.param
+
+
+@pytest.fixture(params=[
+ pytest.param(setup_memory_store, id='memory'),
+ pytest.param(setup_mongodb_store, id='mongodb'),
+ pytest.param(setup_postgresql_store, id='postgresql'),
+ pytest.param(setup_async_sqlalchemy_store, id='async_sqlalchemy')
+])
+def datastore_cm(request):
+ cm = request.param()
+ if isinstance(cm, AsyncContextManager):
+ return cm
+
+ @asynccontextmanager
+ async def wrapper():
+ with cm as store:
+ async with AsyncDataStoreAdapter(store) as adapter:
+ yield adapter
+
+ return wrapper()