summaryrefslogtreecommitdiff
path: root/tests/test_schedulers.py
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2021-02-15 00:32:33 +0200
committerAlex Grönholm <alex.gronholm@nextday.fi>2021-02-24 23:44:32 +0200
commitf4e8c3a0242b082fa1ca6ed5c78094f8de5ba439 (patch)
tree20573886078f60b59ba2811b3fc156c877ff386d /tests/test_schedulers.py
parent5bdf7bd34300ec764b1c5c979deec1e468b0b822 (diff)
downloadapscheduler-f4e8c3a0242b082fa1ca6ed5c78094f8de5ba439.tar.gz
Implemented data store sharing and proper async support
Diffstat (limited to 'tests/test_schedulers.py')
-rw-r--r--tests/test_schedulers.py59
1 files changed, 59 insertions, 0 deletions
diff --git a/tests/test_schedulers.py b/tests/test_schedulers.py
new file mode 100644
index 0000000..b155f6c
--- /dev/null
+++ b/tests/test_schedulers.py
@@ -0,0 +1,59 @@
+import logging
+from datetime import datetime, timezone
+
+import pytest
+
+from apscheduler.events import JobSuccessful
+from apscheduler.schedulers.async_ import AsyncScheduler
+from apscheduler.schedulers.sync import SyncScheduler
+from apscheduler.triggers.date import DateTrigger
+
+pytestmark = pytest.mark.anyio
+
+
+async def dummy_async_job():
+ return 'returnvalue'
+
+
+def dummy_sync_job():
+ return 'returnvalue'
+
+
+class TestAsyncScheduler:
+ async def test_schedule_job(self, caplog, store):
+ async def listener(event):
+ events.append(event)
+ if isinstance(event, JobSuccessful):
+ await scheduler.stop()
+
+ caplog.set_level(logging.DEBUG)
+ trigger = DateTrigger(datetime.now(timezone.utc))
+ events = []
+ async with AsyncScheduler(store) as scheduler:
+ scheduler.worker.subscribe(listener)
+ await scheduler.add_schedule(dummy_async_job, trigger)
+ await scheduler.wait_until_stopped()
+
+ assert len(events) == 2
+ assert isinstance(events[1], JobSuccessful)
+ assert events[1].return_value == 'returnvalue'
+
+
+class TestSyncScheduler:
+ @pytest.mark.parametrize('anyio_backend', ['asyncio'])
+ def test_schedule_job(self, caplog, anyio_backend, sync_store, portal):
+ def listener(event):
+ events.append(event)
+ if isinstance(event, JobSuccessful):
+ scheduler.stop()
+
+ caplog.set_level(logging.DEBUG)
+ events = []
+ with SyncScheduler(sync_store, portal=portal) as scheduler:
+ scheduler.worker.subscribe(listener)
+ scheduler.add_schedule(dummy_sync_job, DateTrigger(datetime.now(timezone.utc)))
+ scheduler.wait_until_stopped()
+
+ assert len(events) == 2
+ assert isinstance(events[1], JobSuccessful)
+ assert events[1].return_value == 'returnvalue'