summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2022-04-20 01:00:57 +0300
committerAlex Grönholm <alex.gronholm@nextday.fi>2022-04-20 01:11:20 +0300
commitb20f62d929eed84ad18020bb82dd43d8cb70da4d (patch)
treec42bf1877dd54755c55c649269e1254995bdf0c9 /tests
parent82992cd427a9ab2351d8e0719b82d826dff5a521 (diff)
downloadapscheduler-b20f62d929eed84ad18020bb82dd43d8cb70da4d.tar.gz
Switched to Black for code formatting
Diffstat (limited to 'tests')
-rw-r--r--tests/conftest.py18
-rw-r--r--tests/test_datastores.py331
-rw-r--r--tests/test_eventbrokers.py111
-rw-r--r--tests/test_marshalling.py81
-rw-r--r--tests/test_schedulers.py170
-rw-r--r--tests/test_workers.py77
-rw-r--r--tests/triggers/test_calendarinterval.py53
-rw-r--r--tests/triggers/test_combining.py46
-rw-r--r--tests/triggers/test_cron.py407
-rw-r--r--tests/triggers/test_interval.py33
10 files changed, 851 insertions, 476 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index 37c363f..31ea9b0 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -15,20 +15,22 @@ else:
from backports.zoneinfo import ZoneInfo
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def timezone() -> ZoneInfo:
- return ZoneInfo('Europe/Berlin')
+ return ZoneInfo("Europe/Berlin")
-@pytest.fixture(params=[
- pytest.param(PickleSerializer, id='pickle'),
- pytest.param(CBORSerializer, id='cbor'),
- pytest.param(JSONSerializer, id='json')
-])
+@pytest.fixture(
+ params=[
+ pytest.param(PickleSerializer, id="pickle"),
+ pytest.param(CBORSerializer, id="cbor"),
+ pytest.param(JSONSerializer, id="json"),
+ ]
+)
def serializer(request) -> Serializer | None:
return request.param() if request.param else None
@pytest.fixture
def anyio_backend() -> str:
- return 'asyncio'
+ return "asyncio"
diff --git a/tests/test_datastores.py b/tests/test_datastores.py
index e8584a3..2b8faea 100644
--- a/tests/test_datastores.py
+++ b/tests/test_datastores.py
@@ -15,7 +15,13 @@ from apscheduler.datastores.async_adapter import AsyncDataStoreAdapter
from apscheduler.datastores.memory import MemoryDataStore
from apscheduler.enums import CoalescePolicy, ConflictPolicy, JobOutcome
from apscheduler.events import (
- Event, ScheduleAdded, ScheduleRemoved, ScheduleUpdated, TaskAdded, TaskUpdated)
+ Event,
+ ScheduleAdded,
+ ScheduleRemoved,
+ ScheduleUpdated,
+ TaskAdded,
+ TaskUpdated,
+)
from apscheduler.structures import JobResult, Task
from apscheduler.triggers.date import DateTrigger
@@ -41,8 +47,8 @@ def sqlite_store() -> DataStore:
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
- with TemporaryDirectory('sqlite_') as tempdir:
- engine = create_engine(f'sqlite:///{tempdir}/test.db')
+ with TemporaryDirectory("sqlite_") as tempdir:
+ engine = create_engine(f"sqlite:///{tempdir}/test.db")
try:
yield SQLAlchemyDataStore(engine)
finally:
@@ -55,7 +61,7 @@ def psycopg2_store() -> DataStore:
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
- engine = create_engine('postgresql+psycopg2://postgres:secret@localhost/testdb')
+ engine = create_engine("postgresql+psycopg2://postgres:secret@localhost/testdb")
try:
yield SQLAlchemyDataStore(engine, start_from_scratch=True)
finally:
@@ -68,7 +74,7 @@ def mysql_store() -> DataStore:
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
- engine = create_engine('mysql+pymysql://root:secret@localhost/testdb')
+ engine = create_engine("mysql+pymysql://root:secret@localhost/testdb")
try:
yield SQLAlchemyDataStore(engine, start_from_scratch=True)
finally:
@@ -81,46 +87,74 @@ async def asyncpg_store() -> AsyncDataStore:
from apscheduler.datastores.async_sqlalchemy import AsyncSQLAlchemyDataStore
- engine = create_async_engine('postgresql+asyncpg://postgres:secret@localhost/testdb',
- future=True)
+ engine = create_async_engine(
+ "postgresql+asyncpg://postgres:secret@localhost/testdb", future=True
+ )
try:
yield AsyncSQLAlchemyDataStore(engine, start_from_scratch=True)
finally:
await engine.dispose()
-@pytest.fixture(params=[
- pytest.param(lazy_fixture('memory_store'), id='memory'),
- pytest.param(lazy_fixture('sqlite'), id='sqlite'),
- pytest.param(lazy_fixture('mongodb_store'), id='mongodb',
- marks=[pytest.mark.external_service]),
- pytest.param(lazy_fixture('psycopg2_store'), id='psycopg2',
- marks=[pytest.mark.external_service]),
- pytest.param(lazy_fixture('mysql_store'), id='mysql',
- marks=[pytest.mark.external_service])
-])
+@pytest.fixture(
+ params=[
+ pytest.param(lazy_fixture("memory_store"), id="memory"),
+ pytest.param(lazy_fixture("sqlite"), id="sqlite"),
+ pytest.param(
+ lazy_fixture("mongodb_store"),
+ id="mongodb",
+ marks=[pytest.mark.external_service],
+ ),
+ pytest.param(
+ lazy_fixture("psycopg2_store"),
+ id="psycopg2",
+ marks=[pytest.mark.external_service],
+ ),
+ pytest.param(
+ lazy_fixture("mysql_store"),
+ id="mysql",
+ marks=[pytest.mark.external_service],
+ ),
+ ]
+)
def sync_store(request) -> DataStore:
return request.param
-@pytest.fixture(params=[
- pytest.param(lazy_fixture('asyncpg_store'), id='asyncpg',
- marks=[pytest.mark.external_service])
-])
+@pytest.fixture(
+ params=[
+ pytest.param(
+ lazy_fixture("asyncpg_store"),
+ id="asyncpg",
+ marks=[pytest.mark.external_service],
+ )
+ ]
+)
def async_store(request) -> AsyncDataStore:
return request.param
-@pytest.fixture(params=[
- pytest.param(lazy_fixture('memory_store'), id='memory'),
- pytest.param(lazy_fixture('sqlite_store'), id='sqlite'),
- pytest.param(lazy_fixture('mongodb_store'), id='mongodb',
- marks=[pytest.mark.external_service]),
- pytest.param(lazy_fixture('psycopg2_store'), id='psycopg2',
- marks=[pytest.mark.external_service]),
- pytest.param(lazy_fixture('mysql_store'), id='mysql',
- marks=[pytest.mark.external_service])
-])
+@pytest.fixture(
+ params=[
+ pytest.param(lazy_fixture("memory_store"), id="memory"),
+ pytest.param(lazy_fixture("sqlite_store"), id="sqlite"),
+ pytest.param(
+ lazy_fixture("mongodb_store"),
+ id="mongodb",
+ marks=[pytest.mark.external_service],
+ ),
+ pytest.param(
+ lazy_fixture("psycopg2_store"),
+ id="psycopg2",
+ marks=[pytest.mark.external_service],
+ ),
+ pytest.param(
+ lazy_fixture("mysql_store"),
+ id="mysql",
+ marks=[pytest.mark.external_service],
+ ),
+ ]
+)
async def datastore(request):
if isinstance(request.param, DataStore):
return AsyncDataStoreAdapter(request.param)
@@ -131,22 +165,21 @@ async def datastore(request):
@pytest.fixture
def schedules() -> list[Schedule]:
trigger = DateTrigger(datetime(2020, 9, 13, tzinfo=timezone.utc))
- schedule1 = Schedule(id='s1', task_id='task1', trigger=trigger)
+ schedule1 = Schedule(id="s1", task_id="task1", trigger=trigger)
schedule1.next_fire_time = trigger.next()
trigger = DateTrigger(datetime(2020, 9, 14, tzinfo=timezone.utc))
- schedule2 = Schedule(id='s2', task_id='task2', trigger=trigger)
+ schedule2 = Schedule(id="s2", task_id="task2", trigger=trigger)
schedule2.next_fire_time = trigger.next()
trigger = DateTrigger(datetime(2020, 9, 15, tzinfo=timezone.utc))
- schedule3 = Schedule(id='s3', task_id='task1', trigger=trigger)
+ schedule3 = Schedule(id="s3", task_id="task1", trigger=trigger)
return [schedule1, schedule2, schedule3]
@asynccontextmanager
async def capture_events(
- datastore: AsyncDataStore, limit: int,
- event_types: set[type[Event]] | None = None
+ datastore: AsyncDataStore, limit: int, event_types: set[type[Event]] | None = None
) -> AsyncGenerator[list[Event], None]:
def listener(event: Event) -> None:
events.append(event)
@@ -170,62 +203,71 @@ class TestAsyncStores:
event_types = {TaskAdded, TaskUpdated}
async with datastore, capture_events(datastore, 3, event_types) as events:
- await datastore.add_task(Task(id='test_task', func=print))
- await datastore.add_task(Task(id='test_task2', func=math.ceil))
- await datastore.add_task(Task(id='test_task', func=repr))
+ await datastore.add_task(Task(id="test_task", func=print))
+ await datastore.add_task(Task(id="test_task2", func=math.ceil))
+ await datastore.add_task(Task(id="test_task", func=repr))
tasks = await datastore.get_tasks()
assert len(tasks) == 2
- assert tasks[0].id == 'test_task'
+ assert tasks[0].id == "test_task"
assert tasks[0].func is repr
- assert tasks[1].id == 'test_task2'
+ assert tasks[1].id == "test_task2"
assert tasks[1].func is math.ceil
received_event = events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'test_task'
+ assert received_event.task_id == "test_task"
received_event = events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'test_task2'
+ assert received_event.task_id == "test_task2"
received_event = events.pop(0)
assert isinstance(received_event, TaskUpdated)
- assert received_event.task_id == 'test_task'
+ assert received_event.task_id == "test_task"
assert not events
- async def test_add_schedules(self, datastore: AsyncDataStore,
- schedules: list[Schedule]) -> None:
+ async def test_add_schedules(
+ self, datastore: AsyncDataStore, schedules: list[Schedule]
+ ) -> None:
async with datastore, capture_events(datastore, 3, {ScheduleAdded}) as events:
for schedule in schedules:
await datastore.add_schedule(schedule, ConflictPolicy.exception)
assert await datastore.get_schedules() == schedules
- assert await datastore.get_schedules({'s1', 's2', 's3'}) == schedules
- assert await datastore.get_schedules({'s1'}) == [schedules[0]]
- assert await datastore.get_schedules({'s2'}) == [schedules[1]]
- assert await datastore.get_schedules({'s3'}) == [schedules[2]]
+ assert await datastore.get_schedules({"s1", "s2", "s3"}) == schedules
+ assert await datastore.get_schedules({"s1"}) == [schedules[0]]
+ assert await datastore.get_schedules({"s2"}) == [schedules[1]]
+ assert await datastore.get_schedules({"s3"}) == [schedules[2]]
for event, schedule in zip(events, schedules):
assert event.schedule_id == schedule.id
assert event.next_fire_time == schedule.next_fire_time
- async def test_replace_schedules(self, datastore: AsyncDataStore,
- schedules: list[Schedule]) -> None:
+ async def test_replace_schedules(
+ self, datastore: AsyncDataStore, schedules: list[Schedule]
+ ) -> None:
async with datastore, capture_events(datastore, 1, {ScheduleUpdated}) as events:
for schedule in schedules:
await datastore.add_schedule(schedule, ConflictPolicy.exception)
next_fire_time = schedules[2].trigger.next()
- schedule = Schedule(id='s3', task_id='foo', trigger=schedules[2].trigger, args=(),
- kwargs={}, coalesce=CoalescePolicy.earliest,
- misfire_grace_time=None, tags=frozenset())
+ schedule = Schedule(
+ id="s3",
+ task_id="foo",
+ trigger=schedules[2].trigger,
+ args=(),
+ kwargs={},
+ coalesce=CoalescePolicy.earliest,
+ misfire_grace_time=None,
+ tags=frozenset(),
+ )
schedule.next_fire_time = next_fire_time
await datastore.add_schedule(schedule, ConflictPolicy.replace)
schedules = await datastore.get_schedules({schedule.id})
- assert schedules[0].task_id == 'foo'
+ assert schedules[0].task_id == "foo"
assert schedules[0].next_fire_time == next_fire_time
assert schedules[0].args == ()
assert schedules[0].kwargs == {}
@@ -234,47 +276,51 @@ class TestAsyncStores:
assert schedules[0].tags == frozenset()
received_event = events.pop(0)
- assert received_event.schedule_id == 's3'
- assert received_event.next_fire_time == datetime(2020, 9, 15, tzinfo=timezone.utc)
+ assert received_event.schedule_id == "s3"
+ assert received_event.next_fire_time == datetime(
+ 2020, 9, 15, tzinfo=timezone.utc
+ )
assert not events
- async def test_remove_schedules(self, datastore: AsyncDataStore,
- schedules: list[Schedule]) -> None:
+ async def test_remove_schedules(
+ self, datastore: AsyncDataStore, schedules: list[Schedule]
+ ) -> None:
async with datastore, capture_events(datastore, 2, {ScheduleRemoved}) as events:
for schedule in schedules:
await datastore.add_schedule(schedule, ConflictPolicy.exception)
- await datastore.remove_schedules(['s1', 's2'])
+ await datastore.remove_schedules(["s1", "s2"])
assert await datastore.get_schedules() == [schedules[2]]
received_event = events.pop(0)
- assert received_event.schedule_id == 's1'
+ assert received_event.schedule_id == "s1"
received_event = events.pop(0)
- assert received_event.schedule_id == 's2'
+ assert received_event.schedule_id == "s2"
assert not events
@pytest.mark.freeze_time(datetime(2020, 9, 14, tzinfo=timezone.utc))
async def test_acquire_release_schedules(
- self, datastore: AsyncDataStore, schedules: list[Schedule]) -> None:
+ self, datastore: AsyncDataStore, schedules: list[Schedule]
+ ) -> None:
event_types = {ScheduleRemoved, ScheduleUpdated}
async with datastore, capture_events(datastore, 2, event_types) as events:
for schedule in schedules:
await datastore.add_schedule(schedule, ConflictPolicy.exception)
# The first scheduler gets the first due schedule
- schedules1 = await datastore.acquire_schedules('dummy-id1', 1)
+ schedules1 = await datastore.acquire_schedules("dummy-id1", 1)
assert len(schedules1) == 1
- assert schedules1[0].id == 's1'
+ assert schedules1[0].id == "s1"
# The second scheduler gets the second due schedule
- schedules2 = await datastore.acquire_schedules('dummy-id2', 1)
+ schedules2 = await datastore.acquire_schedules("dummy-id2", 1)
assert len(schedules2) == 1
- assert schedules2[0].id == 's2'
+ assert schedules2[0].id == "s2"
# The third scheduler gets nothing
- schedules3 = await datastore.acquire_schedules('dummy-id3', 1)
+ schedules3 = await datastore.acquire_schedules("dummy-id3", 1)
assert not schedules3
# Update the schedules and check that the job store actually deletes the first
@@ -283,24 +329,26 @@ class TestAsyncStores:
schedules2[0].next_fire_time = datetime(2020, 9, 15, tzinfo=timezone.utc)
# Release all the schedules
- await datastore.release_schedules('dummy-id1', schedules1)
- await datastore.release_schedules('dummy-id2', schedules2)
+ await datastore.release_schedules("dummy-id1", schedules1)
+ await datastore.release_schedules("dummy-id2", schedules2)
# Check that the first schedule is gone
schedules = await datastore.get_schedules()
assert len(schedules) == 2
- assert schedules[0].id == 's2'
- assert schedules[1].id == 's3'
+ assert schedules[0].id == "s2"
+ assert schedules[1].id == "s3"
# Check for the appropriate update and delete events
received_event = events.pop(0)
assert isinstance(received_event, ScheduleRemoved)
- assert received_event.schedule_id == 's1'
+ assert received_event.schedule_id == "s1"
received_event = events.pop(0)
assert isinstance(received_event, ScheduleUpdated)
- assert received_event.schedule_id == 's2'
- assert received_event.next_fire_time == datetime(2020, 9, 15, tzinfo=timezone.utc)
+ assert received_event.schedule_id == "s2"
+ assert received_event.next_fire_time == datetime(
+ 2020, 9, 15, tzinfo=timezone.utc
+ )
assert not events
@@ -311,20 +359,21 @@ class TestAsyncStores:
async with datastore:
for i in range(1, 3):
trigger = DateTrigger(datetime(2020, 9, 13, tzinfo=timezone.utc))
- schedule = Schedule(id=f's{i}', task_id='task1', trigger=trigger)
+ schedule = Schedule(id=f"s{i}", task_id="task1", trigger=trigger)
schedule.next_fire_time = trigger.next()
await datastore.add_schedule(schedule, ConflictPolicy.exception)
- schedules = await datastore.acquire_schedules('foo', 3)
+ schedules = await datastore.acquire_schedules("foo", 3)
schedules[0].next_fire_time = None
- await datastore.release_schedules('foo', schedules)
+ await datastore.release_schedules("foo", schedules)
remaining = await datastore.get_schedules({s.id for s in schedules})
assert len(remaining) == 1
assert remaining[0].id == schedules[1].id
async def test_acquire_schedules_lock_timeout(
- self, datastore: AsyncDataStore, schedules: list[Schedule], freezer) -> None:
+ self, datastore: AsyncDataStore, schedules: list[Schedule], freezer
+ ) -> None:
"""
Test that a scheduler can acquire schedules that were acquired by another scheduler but
not released within the lock timeout period.
@@ -334,60 +383,66 @@ class TestAsyncStores:
await datastore.add_schedule(schedules[0], ConflictPolicy.exception)
# First, one scheduler acquires the first available schedule
- acquired1 = await datastore.acquire_schedules('dummy-id1', 1)
+ acquired1 = await datastore.acquire_schedules("dummy-id1", 1)
assert len(acquired1) == 1
- assert acquired1[0].id == 's1'
+ assert acquired1[0].id == "s1"
# Try to acquire the schedule just at the threshold (now == acquired_until).
# This should not yield any schedules.
freezer.tick(30)
- acquired2 = await datastore.acquire_schedules('dummy-id2', 1)
+ acquired2 = await datastore.acquire_schedules("dummy-id2", 1)
assert not acquired2
# Right after that, the schedule should be available
freezer.tick(1)
- acquired3 = await datastore.acquire_schedules('dummy-id2', 1)
+ acquired3 = await datastore.acquire_schedules("dummy-id2", 1)
assert len(acquired3) == 1
- assert acquired3[0].id == 's1'
+ assert acquired3[0].id == "s1"
async def test_acquire_multiple_workers(self, datastore: AsyncDataStore) -> None:
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- jobs = [Job(task_id='task1') for _ in range(2)]
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ jobs = [Job(task_id="task1") for _ in range(2)]
for job in jobs:
await datastore.add_job(job)
# The first worker gets the first job in the queue
- jobs1 = await datastore.acquire_jobs('worker1', 1)
+ jobs1 = await datastore.acquire_jobs("worker1", 1)
assert len(jobs1) == 1
assert jobs1[0].id == jobs[0].id
# The second worker gets the second job
- jobs2 = await datastore.acquire_jobs('worker2', 1)
+ jobs2 = await datastore.acquire_jobs("worker2", 1)
assert len(jobs2) == 1
assert jobs2[0].id == jobs[1].id
# The third worker gets nothing
- jobs3 = await datastore.acquire_jobs('worker3', 1)
+ jobs3 = await datastore.acquire_jobs("worker3", 1)
assert not jobs3
async def test_job_release_success(self, datastore: AsyncDataStore) -> None:
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- job = Job(task_id='task1')
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ job = Job(task_id="task1")
await datastore.add_job(job)
- acquired = await datastore.acquire_jobs('worker_id', 2)
+ acquired = await datastore.acquire_jobs("worker_id", 2)
assert len(acquired) == 1
assert acquired[0].id == job.id
await datastore.release_job(
- 'worker_id', acquired[0].task_id,
- JobResult(job_id=acquired[0].id, outcome=JobOutcome.success, return_value='foo'))
+ "worker_id",
+ acquired[0].task_id,
+ JobResult(
+ job_id=acquired[0].id,
+ outcome=JobOutcome.success,
+ return_value="foo",
+ ),
+ )
result = await datastore.get_job_result(acquired[0].id)
assert result.outcome is JobOutcome.success
assert result.exception is None
- assert result.return_value == 'foo'
+ assert result.return_value == "foo"
# Check that the job and its result are gone
assert not await datastore.get_jobs({acquired[0].id})
@@ -395,22 +450,27 @@ class TestAsyncStores:
async def test_job_release_failure(self, datastore: AsyncDataStore) -> None:
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- job = Job(task_id='task1')
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ job = Job(task_id="task1")
await datastore.add_job(job)
- acquired = await datastore.acquire_jobs('worker_id', 2)
+ acquired = await datastore.acquire_jobs("worker_id", 2)
assert len(acquired) == 1
assert acquired[0].id == job.id
await datastore.release_job(
- 'worker_id', acquired[0].task_id,
- JobResult(job_id=acquired[0].id, outcome=JobOutcome.error,
- exception=ValueError('foo')))
+ "worker_id",
+ acquired[0].task_id,
+ JobResult(
+ job_id=acquired[0].id,
+ outcome=JobOutcome.error,
+ exception=ValueError("foo"),
+ ),
+ )
result = await datastore.get_job_result(acquired[0].id)
assert result.outcome is JobOutcome.error
assert isinstance(result.exception, ValueError)
- assert result.exception.args == ('foo',)
+ assert result.exception.args == ("foo",)
assert result.return_value is None
# Check that the job and its result are gone
@@ -419,17 +479,21 @@ class TestAsyncStores:
async def test_job_release_missed_deadline(self, datastore: AsyncDataStore):
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- job = Job(task_id='task1')
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ job = Job(task_id="task1")
await datastore.add_job(job)
- acquired = await datastore.acquire_jobs('worker_id', 2)
+ acquired = await datastore.acquire_jobs("worker_id", 2)
assert len(acquired) == 1
assert acquired[0].id == job.id
await datastore.release_job(
- 'worker_id', acquired[0].task_id,
- JobResult(job_id=acquired[0].id, outcome=JobOutcome.missed_start_deadline))
+ "worker_id",
+ acquired[0].task_id,
+ JobResult(
+ job_id=acquired[0].id, outcome=JobOutcome.missed_start_deadline
+ ),
+ )
result = await datastore.get_job_result(acquired[0].id)
assert result.outcome is JobOutcome.missed_start_deadline
assert result.exception is None
@@ -441,17 +505,19 @@ class TestAsyncStores:
async def test_job_release_cancelled(self, datastore: AsyncDataStore) -> None:
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- job = Job(task_id='task1')
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ job = Job(task_id="task1")
await datastore.add_job(job)
- acquired = await datastore.acquire_jobs('worker1', 2)
+ acquired = await datastore.acquire_jobs("worker1", 2)
assert len(acquired) == 1
assert acquired[0].id == job.id
await datastore.release_job(
- 'worker1', acquired[0].task_id,
- JobResult(job_id=acquired[0].id, outcome=JobOutcome.cancelled))
+ "worker1",
+ acquired[0].task_id,
+ JobResult(job_id=acquired[0].id, outcome=JobOutcome.cancelled),
+ )
result = await datastore.get_job_result(acquired[0].id)
assert result.outcome is JobOutcome.cancelled
assert result.exception is None
@@ -461,50 +527,59 @@ class TestAsyncStores:
assert not await datastore.get_jobs({acquired[0].id})
assert not await datastore.get_job_result(acquired[0].id)
- async def test_acquire_jobs_lock_timeout(self, datastore: AsyncDataStore,
- freezer: FrozenDateTimeFactory) -> None:
+ async def test_acquire_jobs_lock_timeout(
+ self, datastore: AsyncDataStore, freezer: FrozenDateTimeFactory
+ ) -> None:
"""
Test that a worker can acquire jobs that were acquired by another scheduler but not
released within the lock timeout period.
"""
async with datastore:
- await datastore.add_task(Task(id='task1', func=asynccontextmanager))
- job = Job(task_id='task1')
+ await datastore.add_task(Task(id="task1", func=asynccontextmanager))
+ job = Job(task_id="task1")
await datastore.add_job(job)
# First, one worker acquires the first available job
- acquired = await datastore.acquire_jobs('worker1', 1)
+ acquired = await datastore.acquire_jobs("worker1", 1)
assert len(acquired) == 1
assert acquired[0].id == job.id
# Try to acquire the job just at the threshold (now == acquired_until).
# This should not yield any jobs.
freezer.tick(30)
- assert not await datastore.acquire_jobs('worker2', 1)
+ assert not await datastore.acquire_jobs("worker2", 1)
# Right after that, the job should be available
freezer.tick(1)
- acquired = await datastore.acquire_jobs('worker2', 1)
+ acquired = await datastore.acquire_jobs("worker2", 1)
assert len(acquired) == 1
assert acquired[0].id == job.id
- async def test_acquire_jobs_max_number_exceeded(self, datastore: AsyncDataStore) -> None:
+ async def test_acquire_jobs_max_number_exceeded(
+ self, datastore: AsyncDataStore
+ ) -> None:
async with datastore:
await datastore.add_task(
- Task(id='task1', func=asynccontextmanager, max_running_jobs=2))
- jobs = [Job(task_id='task1'), Job(task_id='task1'), Job(task_id='task1')]
+ Task(id="task1", func=asynccontextmanager, max_running_jobs=2)
+ )
+ jobs = [Job(task_id="task1"), Job(task_id="task1"), Job(task_id="task1")]
for job in jobs:
await datastore.add_job(job)
# Check that only 2 jobs are returned from acquire_jobs() even though the limit wqas 3
- acquired_jobs = await datastore.acquire_jobs('worker1', 3)
+ acquired_jobs = await datastore.acquire_jobs("worker1", 3)
assert [job.id for job in acquired_jobs] == [job.id for job in jobs[:2]]
# Release one job, and the worker should be able to acquire the third job
await datastore.release_job(
- 'worker1', acquired_jobs[0].task_id,
- JobResult(job_id=acquired_jobs[0].id, outcome=JobOutcome.success,
- return_value=None))
- acquired_jobs = await datastore.acquire_jobs('worker1', 3)
+ "worker1",
+ acquired_jobs[0].task_id,
+ JobResult(
+ job_id=acquired_jobs[0].id,
+ outcome=JobOutcome.success,
+ return_value=None,
+ ),
+ )
+ acquired_jobs = await datastore.acquire_jobs("worker1", 3)
assert [job.id for job in acquired_jobs] == [jobs[2].id]
diff --git a/tests/test_eventbrokers.py b/tests/test_eventbrokers.py
index 1d29ede..7ec90ab 100644
--- a/tests/test_eventbrokers.py
+++ b/tests/test_eventbrokers.py
@@ -33,7 +33,7 @@ def local_async_broker() -> AsyncEventBroker:
def redis_broker(serializer: Serializer) -> EventBroker:
from apscheduler.eventbrokers.redis import RedisEventBroker
- broker = RedisEventBroker.from_url('redis://localhost:6379')
+ broker = RedisEventBroker.from_url("redis://localhost:6379")
broker.serializer = serializer
return broker
@@ -53,29 +53,40 @@ async def asyncpg_broker(serializer: Serializer) -> AsyncEventBroker:
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
- pool = await create_pool('postgres://postgres:secret@localhost:5432/testdb')
+ pool = await create_pool("postgres://postgres:secret@localhost:5432/testdb")
broker = AsyncpgEventBroker.from_asyncpg_pool(pool)
broker.serializer = serializer
yield broker
await pool.close()
-@pytest.fixture(params=[
- pytest.param(lazy_fixture('local_broker'), id='local'),
- pytest.param(lazy_fixture('redis_broker'), id='redis',
- marks=[pytest.mark.external_service]),
- pytest.param(lazy_fixture('mqtt_broker'), id='mqtt',
- marks=[pytest.mark.external_service])
-])
+@pytest.fixture(
+ params=[
+ pytest.param(lazy_fixture("local_broker"), id="local"),
+ pytest.param(
+ lazy_fixture("redis_broker"),
+ id="redis",
+ marks=[pytest.mark.external_service],
+ ),
+ pytest.param(
+ lazy_fixture("mqtt_broker"), id="mqtt", marks=[pytest.mark.external_service]
+ ),
+ ]
+)
def broker(request: SubRequest) -> Callable[[], EventBroker]:
return request.param
-@pytest.fixture(params=[
- pytest.param(lazy_fixture('local_async_broker'), id='local'),
- pytest.param(lazy_fixture('asyncpg_broker'), id='asyncpg',
- marks=[pytest.mark.external_service])
-])
+@pytest.fixture(
+ params=[
+ pytest.param(lazy_fixture("local_async_broker"), id="local"),
+ pytest.param(
+ lazy_fixture("asyncpg_broker"),
+ id="asyncpg",
+ marks=[pytest.mark.external_service],
+ ),
+ ]
+)
def async_broker(request: SubRequest) -> Callable[[], AsyncEventBroker]:
return request.param
@@ -87,8 +98,9 @@ class TestEventBroker:
broker.subscribe(queue.put_nowait)
broker.subscribe(queue.put_nowait)
event = ScheduleAdded(
- schedule_id='schedule1',
- next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc))
+ schedule_id="schedule1",
+ next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc),
+ )
broker.publish(event)
event1 = queue.get(timeout=3)
event2 = queue.get(timeout=1)
@@ -96,27 +108,31 @@ class TestEventBroker:
assert event1 == event2
assert isinstance(event1, ScheduleAdded)
assert isinstance(event1.timestamp, datetime)
- assert event1.schedule_id == 'schedule1'
- assert event1.next_fire_time == datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc)
+ assert event1.schedule_id == "schedule1"
+ assert event1.next_fire_time == datetime(
+ 2021, 9, 11, 12, 31, 56, 254867, timezone.utc
+ )
def test_subscribe_one_shot(self, broker: EventBroker) -> None:
queue: Queue[Event] = Queue()
with broker:
broker.subscribe(queue.put_nowait, one_shot=True)
event = ScheduleAdded(
- schedule_id='schedule1',
- next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc))
+ schedule_id="schedule1",
+ next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc),
+ )
broker.publish(event)
event = ScheduleAdded(
- schedule_id='schedule2',
- next_fire_time=datetime(2021, 9, 12, 8, 42, 11, 968481, timezone.utc))
+ schedule_id="schedule2",
+ next_fire_time=datetime(2021, 9, 12, 8, 42, 11, 968481, timezone.utc),
+ )
broker.publish(event)
received_event = queue.get(timeout=3)
with pytest.raises(Empty):
queue.get(timeout=0.1)
assert isinstance(received_event, ScheduleAdded)
- assert received_event.schedule_id == 'schedule1'
+ assert received_event.schedule_id == "schedule1"
def test_unsubscribe(self, broker: EventBroker, caplog) -> None:
queue: Queue[Event] = Queue()
@@ -130,15 +146,19 @@ class TestEventBroker:
with pytest.raises(Empty):
queue.get(timeout=0.1)
- def test_publish_no_subscribers(self, broker: EventBroker, caplog: LogCaptureFixture) -> None:
+ def test_publish_no_subscribers(
+ self, broker: EventBroker, caplog: LogCaptureFixture
+ ) -> None:
with broker:
broker.publish(Event())
assert not caplog.text
- def test_publish_exception(self, broker: EventBroker, caplog: LogCaptureFixture) -> None:
+ def test_publish_exception(
+ self, broker: EventBroker, caplog: LogCaptureFixture
+ ) -> None:
def bad_subscriber(event: Event) -> None:
- raise Exception('foo')
+ raise Exception("foo")
timestamp = datetime.now(timezone.utc)
event_future: Future[Event] = Future()
@@ -150,7 +170,7 @@ class TestEventBroker:
event = event_future.result(3)
assert isinstance(event, Event)
assert event.timestamp == timestamp
- assert 'Error delivering Event' in caplog.text
+ assert "Error delivering Event" in caplog.text
@pytest.mark.anyio
@@ -161,8 +181,9 @@ class TestAsyncEventBroker:
async_broker.subscribe(send.send)
async_broker.subscribe(send.send_nowait)
event = ScheduleAdded(
- schedule_id='schedule1',
- next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc))
+ schedule_id="schedule1",
+ next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc),
+ )
await async_broker.publish(event)
with fail_after(3):
@@ -172,20 +193,24 @@ class TestAsyncEventBroker:
assert event1 == event2
assert isinstance(event1, ScheduleAdded)
assert isinstance(event1.timestamp, datetime)
- assert event1.schedule_id == 'schedule1'
- assert event1.next_fire_time == datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc)
+ assert event1.schedule_id == "schedule1"
+ assert event1.next_fire_time == datetime(
+ 2021, 9, 11, 12, 31, 56, 254867, timezone.utc
+ )
async def test_subscribe_one_shot(self, async_broker: AsyncEventBroker) -> None:
send, receive = create_memory_object_stream(2)
async with async_broker:
async_broker.subscribe(send.send, one_shot=True)
event = ScheduleAdded(
- schedule_id='schedule1',
- next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc))
+ schedule_id="schedule1",
+ next_fire_time=datetime(2021, 9, 11, 12, 31, 56, 254867, timezone.utc),
+ )
await async_broker.publish(event)
event = ScheduleAdded(
- schedule_id='schedule2',
- next_fire_time=datetime(2021, 9, 12, 8, 42, 11, 968481, timezone.utc))
+ schedule_id="schedule2",
+ next_fire_time=datetime(2021, 9, 12, 8, 42, 11, 968481, timezone.utc),
+ )
await async_broker.publish(event)
with fail_after(3):
@@ -195,7 +220,7 @@ class TestAsyncEventBroker:
await receive.receive()
assert isinstance(received_event, ScheduleAdded)
- assert received_event.schedule_id == 'schedule1'
+ assert received_event.schedule_id == "schedule1"
async def test_unsubscribe(self, async_broker: AsyncEventBroker) -> None:
send, receive = create_memory_object_stream()
@@ -210,17 +235,19 @@ class TestAsyncEventBroker:
with pytest.raises(TimeoutError), fail_after(0.1):
await receive.receive()
- async def test_publish_no_subscribers(self, async_broker: AsyncEventBroker,
- caplog: LogCaptureFixture) -> None:
+ async def test_publish_no_subscribers(
+ self, async_broker: AsyncEventBroker, caplog: LogCaptureFixture
+ ) -> None:
async with async_broker:
await async_broker.publish(Event())
assert not caplog.text
- async def test_publish_exception(self, async_broker: AsyncEventBroker,
- caplog: LogCaptureFixture) -> None:
+ async def test_publish_exception(
+ self, async_broker: AsyncEventBroker, caplog: LogCaptureFixture
+ ) -> None:
def bad_subscriber(event: Event) -> None:
- raise Exception('foo')
+ raise Exception("foo")
timestamp = datetime.now(timezone.utc)
send, receive = create_memory_object_stream()
@@ -231,4 +258,4 @@ class TestAsyncEventBroker:
received_event = await receive.receive()
assert received_event.timestamp == timestamp
- assert 'Error delivering Event' in caplog.text
+ assert "Error delivering Event" in caplog.text
diff --git a/tests/test_marshalling.py b/tests/test_marshalling.py
index c209388..36c8aca 100644
--- a/tests/test_marshalling.py
+++ b/tests/test_marshalling.py
@@ -39,10 +39,14 @@ class InheritedDummyClass(DummyClass):
class TestCallableToRef:
- @pytest.mark.parametrize('obj, error', [
- (partial(DummyClass.meth), 'Cannot create a reference to a partial()'),
- (lambda: None, 'Cannot create a reference to a lambda')
- ], ids=['partial', 'lambda'])
+ @pytest.mark.parametrize(
+ "obj, error",
+ [
+ (partial(DummyClass.meth), "Cannot create a reference to a partial()"),
+ (lambda: None, "Cannot create a reference to a lambda"),
+ ],
+ ids=["partial", "lambda"],
+ )
def test_errors(self, obj, error):
exc = pytest.raises(SerializationError, callable_to_ref, obj)
assert str(exc.value) == error
@@ -52,18 +56,33 @@ class TestCallableToRef:
pass
exc = pytest.raises(SerializationError, callable_to_ref, nested)
- assert str(exc.value) == 'Cannot create a reference to a nested function'
-
- @pytest.mark.parametrize('input,expected', [
- (DummyClass.meth, 'test_marshalling:DummyClass.meth'),
- (DummyClass.classmeth, 'test_marshalling:DummyClass.classmeth'),
- (DummyClass.InnerDummyClass.innerclassmeth,
- 'test_marshalling:DummyClass.InnerDummyClass.innerclassmeth'),
- (DummyClass.staticmeth, 'test_marshalling:DummyClass.staticmeth'),
- (InheritedDummyClass.classmeth, 'test_marshalling:InheritedDummyClass.classmeth'),
- (timedelta, 'datetime:timedelta'),
- ], ids=['unbound method', 'class method', 'inner class method', 'static method',
- 'inherited class method', 'timedelta'])
+ assert str(exc.value) == "Cannot create a reference to a nested function"
+
+ @pytest.mark.parametrize(
+ "input,expected",
+ [
+ (DummyClass.meth, "test_marshalling:DummyClass.meth"),
+ (DummyClass.classmeth, "test_marshalling:DummyClass.classmeth"),
+ (
+ DummyClass.InnerDummyClass.innerclassmeth,
+ "test_marshalling:DummyClass.InnerDummyClass.innerclassmeth",
+ ),
+ (DummyClass.staticmeth, "test_marshalling:DummyClass.staticmeth"),
+ (
+ InheritedDummyClass.classmeth,
+ "test_marshalling:InheritedDummyClass.classmeth",
+ ),
+ (timedelta, "datetime:timedelta"),
+ ],
+ ids=[
+ "unbound method",
+ "class method",
+ "inner class method",
+ "static method",
+ "inherited class method",
+ "timedelta",
+ ],
+ )
def test_valid_refs(self, input, expected):
assert callable_to_ref(input) == expected
@@ -71,21 +90,25 @@ class TestCallableToRef:
class TestCallableFromRef:
def test_valid_ref(self):
from logging.handlers import RotatingFileHandler
- assert callable_from_ref('logging.handlers:RotatingFileHandler') is RotatingFileHandler
+
+ assert (
+ callable_from_ref("logging.handlers:RotatingFileHandler")
+ is RotatingFileHandler
+ )
def test_complex_path(self):
- pkg1 = ModuleType('pkg1')
- pkg1.pkg2 = 'blah'
- pkg2 = ModuleType('pkg1.pkg2')
+ pkg1 = ModuleType("pkg1")
+ pkg1.pkg2 = "blah"
+ pkg2 = ModuleType("pkg1.pkg2")
pkg2.varname = lambda: None
- sys.modules['pkg1'] = pkg1
- sys.modules['pkg1.pkg2'] = pkg2
- assert callable_from_ref('pkg1.pkg2:varname') == pkg2.varname
-
- @pytest.mark.parametrize('input,error', [
- (object(), TypeError),
- ('module', ValueError),
- ('module:blah', LookupError)
- ], ids=['raw object', 'module', 'module attribute'])
+ sys.modules["pkg1"] = pkg1
+ sys.modules["pkg1.pkg2"] = pkg2
+ assert callable_from_ref("pkg1.pkg2:varname") == pkg2.varname
+
+ @pytest.mark.parametrize(
+ "input,error",
+ [(object(), TypeError), ("module", ValueError), ("module:blah", LookupError)],
+ ids=["raw object", "module", "module attribute"],
+ )
def test_lookup_error(self, input, error):
pytest.raises(error, callable_from_ref, input)
diff --git a/tests/test_schedulers.py b/tests/test_schedulers.py
index 4a1cc4e..b8c64a2 100644
--- a/tests/test_schedulers.py
+++ b/tests/test_schedulers.py
@@ -14,7 +14,14 @@ from pytest_mock import MockerFixture
from apscheduler.context import current_scheduler, current_worker, job_info
from apscheduler.enums import JobOutcome
from apscheduler.events import (
- Event, JobAdded, ScheduleAdded, ScheduleRemoved, SchedulerStarted, SchedulerStopped, TaskAdded)
+ Event,
+ JobAdded,
+ ScheduleAdded,
+ ScheduleRemoved,
+ SchedulerStarted,
+ SchedulerStopped,
+ TaskAdded,
+)
from apscheduler.exceptions import JobLookupError
from apscheduler.schedulers.async_ import AsyncScheduler
from apscheduler.schedulers.sync import Scheduler
@@ -33,17 +40,17 @@ pytestmark = pytest.mark.anyio
async def dummy_async_job(delay: float = 0, fail: bool = False) -> str:
await anyio.sleep(delay)
if fail:
- raise RuntimeError('failing as requested')
+ raise RuntimeError("failing as requested")
else:
- return 'returnvalue'
+ return "returnvalue"
def dummy_sync_job(delay: float = 0, fail: bool = False) -> str:
time.sleep(delay)
if fail:
- raise RuntimeError('failing as requested')
+ raise RuntimeError("failing as requested")
else:
- return 'returnvalue'
+ return "returnvalue"
class TestAsyncScheduler:
@@ -59,7 +66,7 @@ class TestAsyncScheduler:
scheduler.events.subscribe(listener)
trigger = DateTrigger(datetime.now(timezone.utc))
async with scheduler:
- await scheduler.add_schedule(dummy_async_job, trigger, id='foo')
+ await scheduler.add_schedule(dummy_async_job, trigger, id="foo")
with fail_after(3):
await event.wait()
@@ -70,24 +77,24 @@ class TestAsyncScheduler:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'test_schedulers:dummy_async_job'
+ assert received_event.task_id == "test_schedulers:dummy_async_job"
# Then a schedule was added
received_event = received_events.pop(0)
assert isinstance(received_event, ScheduleAdded)
- assert received_event.schedule_id == 'foo'
+ assert received_event.schedule_id == "foo"
# assert received_event.task_id == 'task_id'
# Then that schedule was processed and a job was added for it
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
- assert received_event.schedule_id == 'foo'
- assert received_event.task_id == 'test_schedulers:dummy_async_job'
+ assert received_event.schedule_id == "foo"
+ assert received_event.task_id == "test_schedulers:dummy_async_job"
# Then the schedule was removed since the trigger had been exhausted
received_event = received_events.pop(0)
assert isinstance(received_event, ScheduleRemoved)
- assert received_event.schedule_id == 'foo'
+ assert received_event.schedule_id == "foo"
# Finally, the scheduler was stopped
received_event = received_events.pop(0)
@@ -96,12 +103,17 @@ class TestAsyncScheduler:
# There should be no more events on the list
assert not received_events
- @pytest.mark.parametrize('max_jitter, expected_upper_bound', [
- pytest.param(2, 2, id='within'),
- pytest.param(4, 2.999999, id='exceed')
- ])
- async def test_jitter(self, mocker: MockerFixture, timezone: ZoneInfo, max_jitter: float,
- expected_upper_bound: float) -> None:
+ @pytest.mark.parametrize(
+ "max_jitter, expected_upper_bound",
+ [pytest.param(2, 2, id="within"), pytest.param(4, 2.999999, id="exceed")],
+ )
+ async def test_jitter(
+ self,
+ mocker: MockerFixture,
+ timezone: ZoneInfo,
+ max_jitter: float,
+ expected_upper_bound: float,
+ ) -> None:
job_id: UUID | None = None
def job_added_listener(event: Event) -> None:
@@ -112,14 +124,15 @@ class TestAsyncScheduler:
jitter = 1.569374
orig_start_time = datetime.now(timezone) - timedelta(seconds=1)
- fake_uniform = mocker.patch('random.uniform')
+ fake_uniform = mocker.patch("random.uniform")
fake_uniform.configure_mock(side_effect=lambda a, b: jitter)
async with AsyncScheduler(start_worker=False) as scheduler:
trigger = IntervalTrigger(seconds=3, start_time=orig_start_time)
job_added_event = anyio.Event()
scheduler.events.subscribe(job_added_listener, {JobAdded})
- schedule_id = await scheduler.add_schedule(dummy_async_job, trigger,
- max_jitter=max_jitter)
+ schedule_id = await scheduler.add_schedule(
+ dummy_async_job, trigger, max_jitter=max_jitter
+ )
schedule = await scheduler.get_schedule(schedule_id)
assert schedule.max_jitter == timedelta(seconds=max_jitter)
@@ -132,61 +145,72 @@ class TestAsyncScheduler:
# Check that the job was created with the proper amount of jitter in its scheduled time
jobs = await scheduler.data_store.get_jobs({job_id})
assert jobs[0].jitter == timedelta(seconds=jitter)
- assert jobs[0].scheduled_fire_time == orig_start_time + timedelta(seconds=jitter)
+ assert jobs[0].scheduled_fire_time == orig_start_time + timedelta(
+ seconds=jitter
+ )
assert jobs[0].original_scheduled_time == orig_start_time
async def test_get_job_result_success(self) -> None:
async with AsyncScheduler() as scheduler:
- job_id = await scheduler.add_job(dummy_async_job, kwargs={'delay': 0.2})
+ job_id = await scheduler.add_job(dummy_async_job, kwargs={"delay": 0.2})
result = await scheduler.get_job_result(job_id)
assert result.job_id == job_id
assert result.outcome is JobOutcome.success
- assert result.return_value == 'returnvalue'
+ assert result.return_value == "returnvalue"
async def test_get_job_result_error(self) -> None:
async with AsyncScheduler() as scheduler:
- job_id = await scheduler.add_job(dummy_async_job, kwargs={'delay': 0.2, 'fail': True})
+ job_id = await scheduler.add_job(
+ dummy_async_job, kwargs={"delay": 0.2, "fail": True}
+ )
result = await scheduler.get_job_result(job_id)
assert result.job_id == job_id
assert result.outcome is JobOutcome.error
assert isinstance(result.exception, RuntimeError)
- assert str(result.exception) == 'failing as requested'
+ assert str(result.exception) == "failing as requested"
async def test_get_job_result_nowait_not_yet_ready(self) -> None:
async with AsyncScheduler() as scheduler:
- job_id = await scheduler.add_job(dummy_async_job, kwargs={'delay': 0.2})
+ job_id = await scheduler.add_job(dummy_async_job, kwargs={"delay": 0.2})
with pytest.raises(JobLookupError):
await scheduler.get_job_result(job_id, wait=False)
async def test_run_job_success(self) -> None:
async with AsyncScheduler() as scheduler:
return_value = await scheduler.run_job(dummy_async_job)
- assert return_value == 'returnvalue'
+ assert return_value == "returnvalue"
async def test_run_job_failure(self) -> None:
async with AsyncScheduler() as scheduler:
- with pytest.raises(RuntimeError, match='failing as requested'):
- await scheduler.run_job(dummy_async_job, kwargs={'fail': True})
+ with pytest.raises(RuntimeError, match="failing as requested"):
+ await scheduler.run_job(dummy_async_job, kwargs={"fail": True})
async def test_contextvars(self) -> None:
def check_contextvars() -> None:
assert current_scheduler.get() is scheduler
assert current_worker.get() is scheduler.worker
info = job_info.get()
- assert info.task_id == 'task_id'
- assert info.schedule_id == 'foo'
+ assert info.task_id == "task_id"
+ assert info.schedule_id == "foo"
assert info.scheduled_fire_time == scheduled_fire_time
assert info.jitter == timedelta(seconds=2.16)
assert info.start_deadline == start_deadline
- assert info.tags == {'foo', 'bar'}
+ assert info.tags == {"foo", "bar"}
scheduled_fire_time = datetime.now(timezone.utc)
start_deadline = datetime.now(timezone.utc) + timedelta(seconds=10)
async with AsyncScheduler() as scheduler:
- await scheduler.data_store.add_task(Task(id='task_id', func=check_contextvars))
- job = Job(task_id='task_id', schedule_id='foo',
- scheduled_fire_time=scheduled_fire_time, jitter=timedelta(seconds=2.16),
- start_deadline=start_deadline, tags={'foo', 'bar'})
+ await scheduler.data_store.add_task(
+ Task(id="task_id", func=check_contextvars)
+ )
+ job = Job(
+ task_id="task_id",
+ schedule_id="foo",
+ scheduled_fire_time=scheduled_fire_time,
+ jitter=timedelta(seconds=2.16),
+ start_deadline=start_deadline,
+ tags={"foo", "bar"},
+ )
await scheduler.data_store.add_job(job)
result = await scheduler.get_job_result(job.id)
if result.outcome is JobOutcome.error:
@@ -208,7 +232,7 @@ class TestSyncScheduler:
scheduler.events.subscribe(listener)
trigger = DateTrigger(datetime.now(timezone.utc))
with scheduler:
- scheduler.add_schedule(dummy_sync_job, trigger, id='foo')
+ scheduler.add_schedule(dummy_sync_job, trigger, id="foo")
event.wait(3)
# The scheduler was first started
@@ -218,23 +242,23 @@ class TestSyncScheduler:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'test_schedulers:dummy_sync_job'
+ assert received_event.task_id == "test_schedulers:dummy_sync_job"
# Then a schedule was added
received_event = received_events.pop(0)
assert isinstance(received_event, ScheduleAdded)
- assert received_event.schedule_id == 'foo'
+ assert received_event.schedule_id == "foo"
# Then that schedule was processed and a job was added for it
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
- assert received_event.schedule_id == 'foo'
- assert received_event.task_id == 'test_schedulers:dummy_sync_job'
+ assert received_event.schedule_id == "foo"
+ assert received_event.task_id == "test_schedulers:dummy_sync_job"
# Then the schedule was removed since the trigger had been exhausted
received_event = received_events.pop(0)
assert isinstance(received_event, ScheduleRemoved)
- assert received_event.schedule_id == 'foo'
+ assert received_event.schedule_id == "foo"
# Finally, the scheduler was stopped
received_event = received_events.pop(0)
@@ -243,12 +267,17 @@ class TestSyncScheduler:
# There should be no more events on the list
assert not received_events
- @pytest.mark.parametrize('max_jitter, expected_upper_bound', [
- pytest.param(2, 2, id='within'),
- pytest.param(4, 2.999999, id='exceed')
- ])
- def test_jitter(self, mocker: MockerFixture, timezone: ZoneInfo, max_jitter: float,
- expected_upper_bound: float) -> None:
+ @pytest.mark.parametrize(
+ "max_jitter, expected_upper_bound",
+ [pytest.param(2, 2, id="within"), pytest.param(4, 2.999999, id="exceed")],
+ )
+ def test_jitter(
+ self,
+ mocker: MockerFixture,
+ timezone: ZoneInfo,
+ max_jitter: float,
+ expected_upper_bound: float,
+ ) -> None:
job_id: UUID | None = None
def job_added_listener(event: Event) -> None:
@@ -259,13 +288,15 @@ class TestSyncScheduler:
jitter = 1.569374
orig_start_time = datetime.now(timezone) - timedelta(seconds=1)
- fake_uniform = mocker.patch('random.uniform')
+ fake_uniform = mocker.patch("random.uniform")
fake_uniform.configure_mock(side_effect=lambda a, b: jitter)
with Scheduler(start_worker=False) as scheduler:
trigger = IntervalTrigger(seconds=3, start_time=orig_start_time)
job_added_event = threading.Event()
scheduler.events.subscribe(job_added_listener, {JobAdded})
- schedule_id = scheduler.add_schedule(dummy_async_job, trigger, max_jitter=max_jitter)
+ schedule_id = scheduler.add_schedule(
+ dummy_async_job, trigger, max_jitter=max_jitter
+ )
schedule = scheduler.get_schedule(schedule_id)
assert schedule.max_jitter == timedelta(seconds=max_jitter)
@@ -277,7 +308,9 @@ class TestSyncScheduler:
# Check that the job was created with the proper amount of jitter in its scheduled time
jobs = scheduler.data_store.get_jobs({job_id})
assert jobs[0].jitter == timedelta(seconds=jitter)
- assert jobs[0].scheduled_fire_time == orig_start_time + timedelta(seconds=jitter)
+ assert jobs[0].scheduled_fire_time == orig_start_time + timedelta(
+ seconds=jitter
+ )
assert jobs[0].original_scheduled_time == orig_start_time
def test_get_job_result(self) -> None:
@@ -285,52 +318,59 @@ class TestSyncScheduler:
job_id = scheduler.add_job(dummy_sync_job)
result = scheduler.get_job_result(job_id)
assert result.outcome is JobOutcome.success
- assert result.return_value == 'returnvalue'
+ assert result.return_value == "returnvalue"
def test_get_job_result_error(self) -> None:
with Scheduler() as scheduler:
- job_id = scheduler.add_job(dummy_sync_job, kwargs={'delay': 0.2, 'fail': True})
+ job_id = scheduler.add_job(
+ dummy_sync_job, kwargs={"delay": 0.2, "fail": True}
+ )
result = scheduler.get_job_result(job_id)
assert result.job_id == job_id
assert result.outcome is JobOutcome.error
assert isinstance(result.exception, RuntimeError)
- assert str(result.exception) == 'failing as requested'
+ assert str(result.exception) == "failing as requested"
def test_get_job_result_nowait_not_yet_ready(self) -> None:
with Scheduler() as scheduler:
- job_id = scheduler.add_job(dummy_sync_job, kwargs={'delay': 0.2})
+ job_id = scheduler.add_job(dummy_sync_job, kwargs={"delay": 0.2})
with pytest.raises(JobLookupError):
scheduler.get_job_result(job_id, wait=False)
def test_run_job_success(self) -> None:
with Scheduler() as scheduler:
return_value = scheduler.run_job(dummy_sync_job)
- assert return_value == 'returnvalue'
+ assert return_value == "returnvalue"
def test_run_job_failure(self) -> None:
with Scheduler() as scheduler:
- with pytest.raises(RuntimeError, match='failing as requested'):
- scheduler.run_job(dummy_sync_job, kwargs={'fail': True})
+ with pytest.raises(RuntimeError, match="failing as requested"):
+ scheduler.run_job(dummy_sync_job, kwargs={"fail": True})
def test_contextvars(self) -> None:
def check_contextvars() -> None:
assert current_scheduler.get() is scheduler
assert current_worker.get() is scheduler.worker
info = job_info.get()
- assert info.task_id == 'task_id'
- assert info.schedule_id == 'foo'
+ assert info.task_id == "task_id"
+ assert info.schedule_id == "foo"
assert info.scheduled_fire_time == scheduled_fire_time
assert info.jitter == timedelta(seconds=2.16)
assert info.start_deadline == start_deadline
- assert info.tags == {'foo', 'bar'}
+ assert info.tags == {"foo", "bar"}
scheduled_fire_time = datetime.now(timezone.utc)
start_deadline = datetime.now(timezone.utc) + timedelta(seconds=10)
with Scheduler() as scheduler:
- scheduler.data_store.add_task(Task(id='task_id', func=check_contextvars))
- job = Job(task_id='task_id', schedule_id='foo',
- scheduled_fire_time=scheduled_fire_time, jitter=timedelta(seconds=2.16),
- start_deadline=start_deadline, tags={'foo', 'bar'})
+ scheduler.data_store.add_task(Task(id="task_id", func=check_contextvars))
+ job = Job(
+ task_id="task_id",
+ schedule_id="foo",
+ scheduled_fire_time=scheduled_fire_time,
+ jitter=timedelta(seconds=2.16),
+ start_deadline=start_deadline,
+ tags={"foo", "bar"},
+ )
scheduler.data_store.add_job(job)
result = scheduler.get_job_result(job.id)
if result.outcome is JobOutcome.error:
diff --git a/tests/test_workers.py b/tests/test_workers.py
index dcfafb1..6e5568f 100644
--- a/tests/test_workers.py
+++ b/tests/test_workers.py
@@ -12,7 +12,14 @@ from apscheduler.abc import Job
from apscheduler.datastores.memory import MemoryDataStore
from apscheduler.enums import JobOutcome
from apscheduler.events import (
- Event, JobAcquired, JobAdded, JobReleased, TaskAdded, WorkerStarted, WorkerStopped)
+ Event,
+ JobAcquired,
+ JobAdded,
+ JobReleased,
+ TaskAdded,
+ WorkerStarted,
+ WorkerStopped,
+)
from apscheduler.structures import Task
from apscheduler.workers.async_ import AsyncWorker
from apscheduler.workers.sync import Worker
@@ -22,26 +29,30 @@ pytestmark = pytest.mark.anyio
def sync_func(*args, fail: bool, **kwargs):
if fail:
- raise Exception('failing as requested')
+ raise Exception("failing as requested")
else:
return args, kwargs
async def async_func(*args, fail: bool, **kwargs):
if fail:
- raise Exception('failing as requested')
+ raise Exception("failing as requested")
else:
return args, kwargs
def fail_func():
- pytest.fail('This function should never be run')
+ pytest.fail("This function should never be run")
class TestAsyncWorker:
- @pytest.mark.parametrize('target_func', [sync_func, async_func], ids=['sync', 'async'])
- @pytest.mark.parametrize('fail', [False, True], ids=['success', 'fail'])
- async def test_run_job_nonscheduled_success(self, target_func: Callable, fail: bool) -> None:
+ @pytest.mark.parametrize(
+ "target_func", [sync_func, async_func], ids=["sync", "async"]
+ )
+ @pytest.mark.parametrize("fail", [False, True], ids=["success", "fail"])
+ async def test_run_job_nonscheduled_success(
+ self, target_func: Callable, fail: bool
+ ) -> None:
def listener(received_event: Event):
received_events.append(received_event)
if len(received_events) == 5:
@@ -53,8 +64,8 @@ class TestAsyncWorker:
worker = AsyncWorker(data_store)
worker.events.subscribe(listener)
async with worker:
- await worker.data_store.add_task(Task(id='task_id', func=target_func))
- job = Job(task_id='task_id', args=(1, 2), kwargs={'x': 'foo', 'fail': fail})
+ await worker.data_store.add_task(Task(id="task_id", func=target_func))
+ job = Job(task_id="task_id", args=(1, 2), kwargs={"x": "foo", "fail": fail})
await worker.data_store.add_job(job)
with fail_after(3):
await event.wait()
@@ -66,13 +77,13 @@ class TestAsyncWorker:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
# Then a job was added
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
assert received_event.job_id == job.id
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
assert received_event.schedule_id is None
# Then the job was started
@@ -111,10 +122,13 @@ class TestAsyncWorker:
worker = AsyncWorker(data_store)
worker.events.subscribe(listener)
async with worker:
- await worker.data_store.add_task(Task(id='task_id', func=fail_func))
- job = Job(task_id='task_id', schedule_id='foo',
- scheduled_fire_time=scheduled_start_time,
- start_deadline=datetime(2020, 9, 14, 1, tzinfo=timezone.utc))
+ await worker.data_store.add_task(Task(id="task_id", func=fail_func))
+ job = Job(
+ task_id="task_id",
+ schedule_id="foo",
+ scheduled_fire_time=scheduled_start_time,
+ start_deadline=datetime(2020, 9, 14, 1, tzinfo=timezone.utc),
+ )
await worker.data_store.add_job(job)
with fail_after(3):
await event.wait()
@@ -126,14 +140,14 @@ class TestAsyncWorker:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
# Then a job was added
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
assert received_event.job_id == job.id
- assert received_event.task_id == 'task_id'
- assert received_event.schedule_id == 'foo'
+ assert received_event.task_id == "task_id"
+ assert received_event.schedule_id == "foo"
# The worker acquired the job
received_event = received_events.pop(0)
@@ -157,7 +171,7 @@ class TestAsyncWorker:
class TestSyncWorker:
- @pytest.mark.parametrize('fail', [False, True], ids=['success', 'fail'])
+ @pytest.mark.parametrize("fail", [False, True], ids=["success", "fail"])
def test_run_job_nonscheduled(self, fail: bool) -> None:
def listener(received_event: Event):
received_events.append(received_event)
@@ -170,8 +184,8 @@ class TestSyncWorker:
worker = Worker(data_store)
worker.events.subscribe(listener)
with worker:
- worker.data_store.add_task(Task(id='task_id', func=sync_func))
- job = Job(task_id='task_id', args=(1, 2), kwargs={'x': 'foo', 'fail': fail})
+ worker.data_store.add_task(Task(id="task_id", func=sync_func))
+ job = Job(task_id="task_id", args=(1, 2), kwargs={"x": "foo", "fail": fail})
worker.data_store.add_job(job)
event.wait(5)
@@ -182,13 +196,13 @@ class TestSyncWorker:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
# Then a job was added
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
assert received_event.job_id == job.id
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
assert received_event.schedule_id is None
# Then the job was started
@@ -227,10 +241,13 @@ class TestSyncWorker:
worker = Worker(data_store)
worker.events.subscribe(listener)
with worker:
- worker.data_store.add_task(Task(id='task_id', func=fail_func))
- job = Job(task_id='task_id', schedule_id='foo',
- scheduled_fire_time=scheduled_start_time,
- start_deadline=datetime(2020, 9, 14, 1, tzinfo=timezone.utc))
+ worker.data_store.add_task(Task(id="task_id", func=fail_func))
+ job = Job(
+ task_id="task_id",
+ schedule_id="foo",
+ scheduled_fire_time=scheduled_start_time,
+ start_deadline=datetime(2020, 9, 14, 1, tzinfo=timezone.utc),
+ )
worker.data_store.add_job(job)
event.wait(3)
@@ -241,14 +258,14 @@ class TestSyncWorker:
# Then the task was added
received_event = received_events.pop(0)
assert isinstance(received_event, TaskAdded)
- assert received_event.task_id == 'task_id'
+ assert received_event.task_id == "task_id"
# Then a job was added
received_event = received_events.pop(0)
assert isinstance(received_event, JobAdded)
assert received_event.job_id == job.id
- assert received_event.task_id == 'task_id'
- assert received_event.schedule_id == 'foo'
+ assert received_event.task_id == "task_id"
+ assert received_event.schedule_id == "foo"
# The worker acquired the job
received_event = received_events.pop(0)
diff --git a/tests/triggers/test_calendarinterval.py b/tests/triggers/test_calendarinterval.py
index dd45212..4466c34 100644
--- a/tests/triggers/test_calendarinterval.py
+++ b/tests/triggers/test_calendarinterval.py
@@ -9,20 +9,27 @@ from apscheduler.triggers.calendarinterval import CalendarIntervalTrigger
def test_bad_interval(timezone):
exc = pytest.raises(ValueError, CalendarIntervalTrigger, timezone=timezone)
- exc.match('interval must be at least 1 day long')
+ exc.match("interval must be at least 1 day long")
def test_bad_start_end_dates(timezone):
- exc = pytest.raises(ValueError, CalendarIntervalTrigger, days=1,
- start_date=date(2016, 3, 4), end_date=date(2016, 3, 3), timezone=timezone)
- exc.match('end_date cannot be earlier than start_date')
+ exc = pytest.raises(
+ ValueError,
+ CalendarIntervalTrigger,
+ days=1,
+ start_date=date(2016, 3, 4),
+ end_date=date(2016, 3, 3),
+ timezone=timezone,
+ )
+ exc.match("end_date cannot be earlier than start_date")
def test_end_date(timezone, serializer):
"""Test that end_date is respected."""
start_end_date = date(2020, 12, 31)
- trigger = CalendarIntervalTrigger(days=1, start_date=start_end_date, end_date=start_end_date,
- timezone=timezone)
+ trigger = CalendarIntervalTrigger(
+ days=1, start_date=start_end_date, end_date=start_end_date, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -36,8 +43,9 @@ def test_missing_time(timezone, serializer):
skipped entirely.
"""
- trigger = CalendarIntervalTrigger(days=1, hour=2, minute=30, start_date=date(2016, 3, 27),
- timezone=timezone)
+ trigger = CalendarIntervalTrigger(
+ days=1, hour=2, minute=30, start_date=date(2016, 3, 27), timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -50,8 +58,9 @@ def test_repeated_time(timezone, serializer):
is executed on the earlier occurrence of that time.
"""
- trigger = CalendarIntervalTrigger(days=2, hour=2, minute=30, start_date=date(2016, 10, 30),
- timezone=timezone)
+ trigger = CalendarIntervalTrigger(
+ days=2, hour=2, minute=30, start_date=date(2016, 10, 30), timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -60,7 +69,9 @@ def test_repeated_time(timezone, serializer):
def test_nonexistent_days(timezone, serializer):
"""Test that invalid dates are skipped."""
- trigger = CalendarIntervalTrigger(months=1, start_date=date(2016, 3, 31), timezone=timezone)
+ trigger = CalendarIntervalTrigger(
+ months=1, start_date=date(2016, 3, 31), timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -70,11 +81,21 @@ def test_nonexistent_days(timezone, serializer):
def test_repr(timezone, serializer):
trigger = CalendarIntervalTrigger(
- years=1, months=5, weeks=6, days=8, hour=3, second=8, start_date=date(2016, 3, 5),
- end_date=date(2020, 12, 25), timezone=timezone)
+ years=1,
+ months=5,
+ weeks=6,
+ days=8,
+ hour=3,
+ second=8,
+ start_date=date(2016, 3, 5),
+ end_date=date(2020, 12, 25),
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
- assert repr(trigger) == ("CalendarIntervalTrigger(years=1, months=5, weeks=6, days=8, "
- "time='03:00:08', start_date='2016-03-05', end_date='2020-12-25', "
- "timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CalendarIntervalTrigger(years=1, months=5, weeks=6, days=8, "
+ "time='03:00:08', start_date='2016-03-05', end_date='2020-12-25', "
+ "timezone='Europe/Berlin')"
+ )
diff --git a/tests/triggers/test_combining.py b/tests/triggers/test_combining.py
index fbdbfd0..9a59ecb 100644
--- a/tests/triggers/test_combining.py
+++ b/tests/triggers/test_combining.py
@@ -11,11 +11,13 @@ from apscheduler.triggers.interval import IntervalTrigger
class TestAndTrigger:
- @pytest.mark.parametrize('threshold', [1, 0])
+ @pytest.mark.parametrize("threshold", [1, 0])
def test_two_datetriggers(self, timezone, serializer, threshold):
date1 = datetime(2020, 5, 16, 14, 17, 30, 254212, tzinfo=timezone)
date2 = datetime(2020, 5, 16, 14, 17, 31, 254212, tzinfo=timezone)
- trigger = AndTrigger([DateTrigger(date1), DateTrigger(date2)], threshold=threshold)
+ trigger = AndTrigger(
+ [DateTrigger(date1), DateTrigger(date2)], threshold=threshold
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -27,10 +29,14 @@ class TestAndTrigger:
def test_max_iterations(self, timezone, serializer):
start_time = datetime(2020, 5, 16, 14, 17, 30, 254212, tzinfo=timezone)
- trigger = AndTrigger([
- IntervalTrigger(seconds=4, start_time=start_time),
- IntervalTrigger(seconds=4, start_time=start_time + timedelta(seconds=2))
- ])
+ trigger = AndTrigger(
+ [
+ IntervalTrigger(seconds=4, start_time=start_time),
+ IntervalTrigger(
+ seconds=4, start_time=start_time + timedelta(seconds=2)
+ ),
+ ]
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -38,10 +44,14 @@ class TestAndTrigger:
def test_repr(self, timezone, serializer):
start_time = datetime(2020, 5, 16, 14, 17, 30, 254212, tzinfo=timezone)
- trigger = AndTrigger([
- IntervalTrigger(seconds=4, start_time=start_time),
- IntervalTrigger(seconds=4, start_time=start_time + timedelta(seconds=2))
- ])
+ trigger = AndTrigger(
+ [
+ IntervalTrigger(seconds=4, start_time=start_time),
+ IntervalTrigger(
+ seconds=4, start_time=start_time + timedelta(seconds=2)
+ ),
+ ]
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -68,10 +78,12 @@ class TestOrTrigger:
start_time = datetime(2020, 5, 16, 14, 17, 30, 254212, tzinfo=timezone)
end_time1 = start_time + timedelta(seconds=16)
end_time2 = start_time + timedelta(seconds=18)
- trigger = OrTrigger([
- IntervalTrigger(seconds=4, start_time=start_time, end_time=end_time1),
- IntervalTrigger(seconds=6, start_time=start_time, end_time=end_time2)
- ])
+ trigger = OrTrigger(
+ [
+ IntervalTrigger(seconds=4, start_time=start_time, end_time=end_time1),
+ IntervalTrigger(seconds=6, start_time=start_time, end_time=end_time2),
+ ]
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -90,5 +102,7 @@ class TestOrTrigger:
date1 = datetime(2020, 5, 16, 14, 17, 30, 254212, tzinfo=timezone)
date2 = datetime(2020, 5, 18, 15, 1, 53, 940564, tzinfo=timezone)
trigger = OrTrigger([DateTrigger(date1), DateTrigger(date2)])
- assert repr(trigger) == ("OrTrigger([DateTrigger('2020-05-16 14:17:30.254212+02:00'), "
- "DateTrigger('2020-05-18 15:01:53.940564+02:00')])")
+ assert repr(trigger) == (
+ "OrTrigger([DateTrigger('2020-05-16 14:17:30.254212+02:00'), "
+ "DateTrigger('2020-05-18 15:01:53.940564+02:00')])"
+ )
diff --git a/tests/triggers/test_cron.py b/tests/triggers/test_cron.py
index 578dd7d..924475a 100644
--- a/tests/triggers/test_cron.py
+++ b/tests/triggers/test_cron.py
@@ -14,55 +14,81 @@ else:
def test_invalid_expression():
- exc = pytest.raises(ValueError, CronTrigger, year='2009-fault')
+ exc = pytest.raises(ValueError, CronTrigger, year="2009-fault")
exc.match("Unrecognized expression '2009-fault' for field 'year'")
def test_invalid_step():
- exc = pytest.raises(ValueError, CronTrigger, year='2009/0')
+ exc = pytest.raises(ValueError, CronTrigger, year="2009/0")
exc.match("Step must be higher than 0")
def test_invalid_range():
- exc = pytest.raises(ValueError, CronTrigger, year='2009-2008')
+ exc = pytest.raises(ValueError, CronTrigger, year="2009-2008")
exc.match("The minimum value in a range must not be higher than the maximum")
-@pytest.mark.parametrize('expr', ['fab', 'jan-fab'], ids=['start', 'end'])
+@pytest.mark.parametrize("expr", ["fab", "jan-fab"], ids=["start", "end"])
def test_invalid_month_name(expr):
exc = pytest.raises(ValueError, CronTrigger, month=expr)
exc.match("Invalid month name 'fab'")
-@pytest.mark.parametrize('expr', ['web', 'mon-web'], ids=['start', 'end'])
+@pytest.mark.parametrize("expr", ["web", "mon-web"], ids=["start", "end"])
def test_invalid_weekday_name(expr):
exc = pytest.raises(ValueError, CronTrigger, day_of_week=expr)
exc.match("Invalid weekday name 'web'")
def test_invalid_weekday_position_name():
- exc = pytest.raises(ValueError, CronTrigger, day='1st web')
+ exc = pytest.raises(ValueError, CronTrigger, day="1st web")
exc.match("Invalid weekday name 'web'")
-@pytest.mark.parametrize('values, expected', [
- (dict(day='*/31'), r"Error validating expression '\*/31': the step value \(31\) is higher "
- r"than the total range of the expression \(30\)"),
- (dict(day='4-6/3'), r"Error validating expression '4-6/3': the step value \(3\) is higher "
- r"than the total range of the expression \(2\)"),
- (dict(hour='0-24'), r"Error validating expression '0-24': the last value \(24\) is higher "
- r"than the maximum value \(23\)"),
- (dict(day='0-3'), r"Error validating expression '0-3': the first value \(0\) is lower "
- r"than the minimum value \(1\)")
-], ids=['too_large_step_all', 'too_large_step_range', 'too_high_last', 'too_low_first'])
+@pytest.mark.parametrize(
+ "values, expected",
+ [
+ (
+ dict(day="*/31"),
+ r"Error validating expression '\*/31': the step value \(31\) is higher "
+ r"than the total range of the expression \(30\)",
+ ),
+ (
+ dict(day="4-6/3"),
+ r"Error validating expression '4-6/3': the step value \(3\) is higher "
+ r"than the total range of the expression \(2\)",
+ ),
+ (
+ dict(hour="0-24"),
+ r"Error validating expression '0-24': the last value \(24\) is higher "
+ r"than the maximum value \(23\)",
+ ),
+ (
+ dict(day="0-3"),
+ r"Error validating expression '0-3': the first value \(0\) is lower "
+ r"than the minimum value \(1\)",
+ ),
+ ],
+ ids=[
+ "too_large_step_all",
+ "too_large_step_range",
+ "too_high_last",
+ "too_low_first",
+ ],
+)
def test_invalid_ranges(values, expected):
pytest.raises(ValueError, CronTrigger, **values).match(expected)
def test_cron_trigger_1(timezone, serializer):
start_time = datetime(2008, 12, 1, tzinfo=timezone)
- trigger = CronTrigger(year='2009/2', month='1-4/3', day='5-6', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year="2009/2",
+ month="1-4/3",
+ day="5-6",
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -71,15 +97,18 @@ def test_cron_trigger_1(timezone, serializer):
assert trigger.next() == datetime(2009, 4, 5, tzinfo=timezone)
assert trigger.next() == datetime(2009, 4, 6, tzinfo=timezone)
assert trigger.next() == datetime(2011, 1, 5, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009/2', month='1-4/3', day='5-6', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2008-12-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009/2', month='1-4/3', day='5-6', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2008-12-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_cron_trigger_2(timezone, serializer):
start_time = datetime(2009, 10, 14, tzinfo=timezone)
- trigger = CronTrigger(year='2009/2', month='1-3', day='5', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year="2009/2", month="1-3", day="5", start_time=start_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -87,57 +116,83 @@ def test_cron_trigger_2(timezone, serializer):
assert trigger.next() == datetime(2011, 2, 5, tzinfo=timezone)
assert trigger.next() == datetime(2011, 3, 5, tzinfo=timezone)
assert trigger.next() == datetime(2013, 1, 5, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009/2', month='1-3', day='5', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2009-10-14T00:00:00+02:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009/2', month='1-3', day='5', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2009-10-14T00:00:00+02:00', timezone='Europe/Berlin')"
+ )
def test_cron_trigger_3(timezone, serializer):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year='2009', month='feb-dec', hour='8-9', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year="2009",
+ month="feb-dec",
+ hour="8-9",
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2009, 2, 1, 8, tzinfo=timezone)
assert trigger.next() == datetime(2009, 2, 1, 9, tzinfo=timezone)
assert trigger.next() == datetime(2009, 2, 2, 8, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009', month='feb-dec', day='*', week='*', "
- "day_of_week='*', hour='8-9', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='feb-dec', day='*', week='*', "
+ "day_of_week='*', hour='8-9', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_cron_trigger_4(timezone, serializer):
start_time = datetime(2012, 2, 1, tzinfo=timezone)
- trigger = CronTrigger(year='2012', month='2', day='last', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year="2012", month="2", day="last", start_time=start_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2012, 2, 29, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2012', month='2', day='last', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2012-02-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2012', month='2', day='last', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2012-02-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
-@pytest.mark.parametrize('expr', ['3-5', 'wed-fri'], ids=['numeric', 'text'])
+@pytest.mark.parametrize("expr", ["3-5", "wed-fri"], ids=["numeric", "text"])
def test_weekday_overlap(timezone, serializer, expr):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, month=1, day='6-10', day_of_week=expr, start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year=2009,
+ month=1,
+ day="6-10",
+ day_of_week=expr,
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2009, 1, 7, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009', month='1', day='6-10', week='*', "
- "day_of_week='wed-fri', hour='0', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='1', day='6-10', week='*', "
+ "day_of_week='wed-fri', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_weekday_range(timezone, serializer):
start_time = datetime(2020, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2020, month=1, week=1, day_of_week='fri-sun', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year=2020,
+ month=1,
+ week=1,
+ day_of_week="fri-sun",
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -145,23 +200,29 @@ def test_weekday_range(timezone, serializer):
assert trigger.next() == datetime(2020, 1, 4, tzinfo=timezone)
assert trigger.next() == datetime(2020, 1, 5, tzinfo=timezone)
assert trigger.next() is None
- assert repr(trigger) == ("CronTrigger(year='2020', month='1', day='*', week='1', "
- "day_of_week='fri-sun', hour='0', minute='0', second='0', "
- "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2020', month='1', day='*', week='1', "
+ "day_of_week='fri-sun', hour='0', minute='0', second='0', "
+ "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_last_weekday(timezone, serializer):
start_time = datetime(2020, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2020, day='last sun', start_time=start_time, timezone=timezone)
+ trigger = CronTrigger(
+ year=2020, day="last sun", start_time=start_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2020, 1, 26, tzinfo=timezone)
assert trigger.next() == datetime(2020, 2, 23, tzinfo=timezone)
assert trigger.next() == datetime(2020, 3, 29, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2020', month='*', day='last sun', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2020', month='*', day='last sun', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_increment_weekday(timezone, serializer):
@@ -171,14 +232,16 @@ def test_increment_weekday(timezone, serializer):
"""
start_time = datetime(2009, 9, 25, 7, tzinfo=timezone)
- trigger = CronTrigger(hour='5-6', start_time=start_time, timezone=timezone)
+ trigger = CronTrigger(hour="5-6", start_time=start_time, timezone=timezone)
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2009, 9, 26, 5, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='*', month='*', day='*', week='*', "
- "day_of_week='*', hour='5-6', minute='0', second='0', "
- "start_time='2009-09-25T07:00:00+02:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='*', hour='5-6', minute='0', second='0', "
+ "start_time='2009-09-25T07:00:00+02:00', timezone='Europe/Berlin')"
+ )
def test_month_rollover(timezone, serializer):
@@ -191,52 +254,69 @@ def test_month_rollover(timezone, serializer):
assert trigger.next() == datetime(2016, 4, 30, tzinfo=timezone)
-@pytest.mark.parametrize('weekday', ['1,0', 'mon,sun'], ids=['numeric', 'text'])
+@pytest.mark.parametrize("weekday", ["1,0", "mon,sun"], ids=["numeric", "text"])
def test_weekday_nomatch(timezone, serializer, weekday):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, month=1, day='6-10', day_of_week=weekday,
- start_time=start_time, timezone=timezone)
+ trigger = CronTrigger(
+ year=2009,
+ month=1,
+ day="6-10",
+ day_of_week=weekday,
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() is None
- assert repr(trigger) == ("CronTrigger(year='2009', month='1', day='6-10', week='*', "
- "day_of_week='mon,sun', hour='0', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='1', day='6-10', week='*', "
+ "day_of_week='mon,sun', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_weekday_positional(timezone, serializer):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, month=1, day='4th wed', start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year=2009, month=1, day="4th wed", start_time=start_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2009, 1, 28, tzinfo=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009', month='1', day='4th wed', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='1', day='4th wed', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_end_time(timezone, serializer):
"""Test that next() won't produce"""
start_time = datetime(2014, 4, 13, 2, tzinfo=timezone)
end_time = datetime(2014, 4, 13, 4, tzinfo=timezone)
- trigger = CronTrigger(hour=4, start_time=start_time, end_time=end_time, timezone=timezone)
+ trigger = CronTrigger(
+ hour=4, start_time=start_time, end_time=end_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2014, 4, 13, 4, tzinfo=timezone)
assert trigger.next() is None
- assert repr(trigger) == ("CronTrigger(year='*', month='*', day='*', week='*', "
- "day_of_week='*', hour='4', minute='0', second='0', "
- "start_time='2014-04-13T02:00:00+02:00', "
- "end_time='2014-04-13T04:00:00+02:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='*', hour='4', minute='0', second='0', "
+ "start_time='2014-04-13T02:00:00+02:00', "
+ "end_time='2014-04-13T04:00:00+02:00', timezone='Europe/Berlin')"
+ )
def test_week_1(timezone, serializer):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, month=2, week=8, start_time=start_time, timezone=timezone)
+ trigger = CronTrigger(
+ year=2009, month=2, week=8, start_time=start_time, timezone=timezone
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -244,100 +324,163 @@ def test_week_1(timezone, serializer):
assert trigger.next() == datetime(2009, 2, day, tzinfo=timezone)
assert trigger.next() is None
- assert repr(trigger) == ("CronTrigger(year='2009', month='2', day='*', week='8', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='2', day='*', week='8', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
-@pytest.mark.parametrize('weekday', [3, 'wed'], ids=['numeric', 'text'])
+@pytest.mark.parametrize("weekday", [3, "wed"], ids=["numeric", "text"])
def test_week_2(timezone, serializer, weekday):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, week=15, day_of_week=weekday, start_time=start_time,
- timezone=timezone)
+ trigger = CronTrigger(
+ year=2009,
+ week=15,
+ day_of_week=weekday,
+ start_time=start_time,
+ timezone=timezone,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
assert trigger.next() == datetime(2009, 4, 8, tzinfo=timezone)
assert trigger.next() is None
- assert repr(trigger) == ("CronTrigger(year='2009', month='*', day='*', week='15', "
- "day_of_week='wed', hour='0', minute='0', second='0', "
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='*', day='*', week='15', "
+ "day_of_week='wed', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
@pytest.mark.parametrize(
- 'trigger_args, start_time, start_time_fold, correct_next_date,' 'correct_next_date_fold',
+ "trigger_args, start_time, start_time_fold, correct_next_date,"
+ "correct_next_date_fold",
[
- ({'hour': 8}, datetime(2013, 3, 9, 12), 0, datetime(2013, 3, 10, 8), 0),
- ({'hour': 8}, datetime(2013, 11, 2, 12), 0, datetime(2013, 11, 3, 8), 0),
- ({'minute': '*/30'}, datetime(2013, 3, 10, 1, 35), 0, datetime(2013, 3, 10, 3), 0),
- ({'minute': '*/30'}, datetime(2013, 11, 3, 1, 35), 0, datetime(2013, 11, 3, 1), 1)
- ], ids=['absolute_spring', 'absolute_autumn', 'interval_spring', 'interval_autumn'])
-def test_dst_change(trigger_args, start_time, start_time_fold, correct_next_date,
- correct_next_date_fold, serializer):
+ ({"hour": 8}, datetime(2013, 3, 9, 12), 0, datetime(2013, 3, 10, 8), 0),
+ ({"hour": 8}, datetime(2013, 11, 2, 12), 0, datetime(2013, 11, 3, 8), 0),
+ (
+ {"minute": "*/30"},
+ datetime(2013, 3, 10, 1, 35),
+ 0,
+ datetime(2013, 3, 10, 3),
+ 0,
+ ),
+ (
+ {"minute": "*/30"},
+ datetime(2013, 11, 3, 1, 35),
+ 0,
+ datetime(2013, 11, 3, 1),
+ 1,
+ ),
+ ],
+ ids=["absolute_spring", "absolute_autumn", "interval_spring", "interval_autumn"],
+)
+def test_dst_change(
+ trigger_args,
+ start_time,
+ start_time_fold,
+ correct_next_date,
+ correct_next_date_fold,
+ serializer,
+):
"""
Making sure that CronTrigger works correctly when crossing the DST switch threshold.
Note that you should explicitly compare datetimes as strings to avoid the internal datetime
comparison which would test for equality in the UTC timezone.
"""
- timezone = ZoneInfo('US/Eastern')
+ timezone = ZoneInfo("US/Eastern")
start_time = start_time.replace(tzinfo=timezone, fold=start_time_fold)
trigger = CronTrigger(timezone=timezone, start_time=start_time, **trigger_args)
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
- assert trigger.next() == correct_next_date.replace(tzinfo=timezone,
- fold=correct_next_date_fold)
+ assert trigger.next() == correct_next_date.replace(
+ tzinfo=timezone, fold=correct_next_date_fold
+ )
def test_zero_value(timezone):
start_time = datetime(2020, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year=2009, month=2, hour=0, start_time=start_time, timezone=timezone)
- assert repr(trigger) == ("CronTrigger(year='2009', month='2', day='*', week='*', "
- "day_of_week='*', hour='0', minute='0', second='0', "
- "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')")
+ trigger = CronTrigger(
+ year=2009, month=2, hour=0, start_time=start_time, timezone=timezone
+ )
+ assert repr(trigger) == (
+ "CronTrigger(year='2009', month='2', day='*', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2020-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
def test_year_list(timezone, serializer):
start_time = datetime(2009, 1, 1, tzinfo=timezone)
- trigger = CronTrigger(year='2009,2008', start_time=start_time, timezone=timezone)
- assert repr(trigger) == "CronTrigger(year='2009,2008', month='1', day='1', week='*', " \
- "day_of_week='*', hour='0', minute='0', second='0', " \
- "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ trigger = CronTrigger(year="2009,2008", start_time=start_time, timezone=timezone)
+ assert (
+ repr(trigger) == "CronTrigger(year='2009,2008', month='1', day='1', week='*', "
+ "day_of_week='*', hour='0', minute='0', second='0', "
+ "start_time='2009-01-01T00:00:00+01:00', timezone='Europe/Berlin')"
+ )
assert trigger.next() == datetime(2009, 1, 1, tzinfo=timezone)
assert trigger.next() is None
-@pytest.mark.parametrize('expr, expected_repr', [
- ('* * * * *',
- "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='*', hour='*', minute='*', "
- "second='0', start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')"),
- ('0-14 * 14-28 jul fri',
- "CronTrigger(year='*', month='jul', day='14-28', week='*', day_of_week='fri', hour='*', "
- "minute='0-14', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
- (' 0-14 * 14-28 jul fri',
- "CronTrigger(year='*', month='jul', day='14-28', week='*', day_of_week='fri', hour='*', "
- "minute='0-14', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
- ('* * * * 1-5',
- "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='mon-fri', hour='*', "
- "minute='*', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
- ('* * * * 0-3',
- "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='mon-wed,sun', hour='*', "
- "minute='*', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
- ('* * * * 6-1',
- "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='mon,sat-sun', hour='*', "
- "minute='*', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
- ('* * * * 6-7',
- "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='sat-sun', hour='*', "
- "minute='*', second='0', start_time='2020-05-19T19:53:22+02:00', "
- "timezone='Europe/Berlin')"),
-], ids=['always', 'assorted', 'multiple_spaces_in_format', 'working_week', 'sunday_first',
- 'saturday_first', 'weekend'])
+@pytest.mark.parametrize(
+ "expr, expected_repr",
+ [
+ (
+ "* * * * *",
+ "CronTrigger(year='*', month='*', day='*', week='*', day_of_week='*',"
+ "hour='*', minute='*', second='0', start_time='2020-05-19T19:53:22+02:00', "
+ "timezone='Europe/Berlin')",
+ ),
+ (
+ "0-14 * 14-28 jul fri",
+ "CronTrigger(year='*', month='jul', day='14-28', week='*', "
+ "day_of_week='fri', hour='*', minute='0-14', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ (
+ " 0-14 * 14-28 jul fri",
+ "CronTrigger(year='*', month='jul', day='14-28', week='*', "
+ "day_of_week='fri', hour='*', minute='0-14', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ (
+ "* * * * 1-5",
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='mon-fri', hour='*', minute='*', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ (
+ "* * * * 0-3",
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='mon-wed,sun', hour='*', minute='*', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ (
+ "* * * * 6-1",
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='mon,sat-sun', hour='*', minute='*', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ (
+ "* * * * 6-7",
+ "CronTrigger(year='*', month='*', day='*', week='*', "
+ "day_of_week='sat-sun', hour='*', minute='*', second='0', "
+ "start_time='2020-05-19T19:53:22+02:00', timezone='Europe/Berlin')",
+ ),
+ ],
+ ids=[
+ "always",
+ "assorted",
+ "multiple_spaces_in_format",
+ "working_week",
+ "sunday_first",
+ "saturday_first",
+ "weekend",
+ ],
+)
def test_from_crontab(expr, expected_repr, timezone, serializer):
trigger = CronTrigger.from_crontab(expr, timezone)
trigger.start_time = datetime(2020, 5, 19, 19, 53, 22, tzinfo=timezone)
@@ -348,5 +491,5 @@ def test_from_crontab(expr, expected_repr, timezone, serializer):
def test_from_crontab_wrong_number_of_fields():
- exc = pytest.raises(ValueError, CronTrigger.from_crontab, '*')
- exc.match('Wrong number of fields; got 1, expected 5')
+ exc = pytest.raises(ValueError, CronTrigger.from_crontab, "*")
+ exc.match("Wrong number of fields; got 1, expected 5")
diff --git a/tests/triggers/test_interval.py b/tests/triggers/test_interval.py
index 1778f61..04056fd 100644
--- a/tests/triggers/test_interval.py
+++ b/tests/triggers/test_interval.py
@@ -9,22 +9,25 @@ from apscheduler.triggers.interval import IntervalTrigger
def test_bad_interval():
exc = pytest.raises(ValueError, IntervalTrigger)
- exc.match('The time interval must be positive')
+ exc.match("The time interval must be positive")
def test_bad_end_time(timezone):
start_time = datetime(2020, 5, 16, tzinfo=timezone)
end_time = datetime(2020, 5, 15, tzinfo=timezone)
- exc = pytest.raises(ValueError, IntervalTrigger, seconds=1, start_time=start_time,
- end_time=end_time)
- exc.match('end_time cannot be earlier than start_time')
+ exc = pytest.raises(
+ ValueError, IntervalTrigger, seconds=1, start_time=start_time, end_time=end_time
+ )
+ exc.match("end_time cannot be earlier than start_time")
def test_end_time(timezone, serializer):
start_time = datetime(2020, 5, 16, 19, 32, 44, 649521, tzinfo=timezone)
end_time = datetime(2020, 5, 16, 22, 33, 1, tzinfo=timezone)
interval = timedelta(hours=1, seconds=6)
- trigger = IntervalTrigger(start_time=start_time, end_time=end_time, hours=1, seconds=6)
+ trigger = IntervalTrigger(
+ start_time=start_time, end_time=end_time, hours=1, seconds=6
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
@@ -37,11 +40,21 @@ def test_end_time(timezone, serializer):
def test_repr(timezone, serializer):
start_time = datetime(2020, 5, 15, 12, 55, 32, 954032, tzinfo=timezone)
end_time = datetime(2020, 6, 4, 16, 18, 49, 306942, tzinfo=timezone)
- trigger = IntervalTrigger(weeks=1, days=2, hours=3, minutes=4, seconds=5, microseconds=123525,
- start_time=start_time, end_time=end_time)
+ trigger = IntervalTrigger(
+ weeks=1,
+ days=2,
+ hours=3,
+ minutes=4,
+ seconds=5,
+ microseconds=123525,
+ start_time=start_time,
+ end_time=end_time,
+ )
if serializer:
trigger = serializer.deserialize(serializer.serialize(trigger))
- assert repr(trigger) == ("IntervalTrigger(weeks=1, days=2, hours=3, minutes=4, seconds=5, "
- "microseconds=123525, start_time='2020-05-15 12:55:32.954032+02:00', "
- "end_time='2020-06-04 16:18:49.306942+02:00')")
+ assert repr(trigger) == (
+ "IntervalTrigger(weeks=1, days=2, hours=3, minutes=4, seconds=5, "
+ "microseconds=123525, start_time='2020-05-15 12:55:32.954032+02:00', "
+ "end_time='2020-06-04 16:18:49.306942+02:00')"
+ )