summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2022-09-12 22:09:05 +0300
committerAlex Grönholm <alex.gronholm@nextday.fi>2022-09-21 02:40:02 +0300
commitc5727432736b55b7d76753307f14efdb962c2edf (patch)
tree005bd129694b56bd601d65c4cdf43828cfcd4381 /examples
parent26c4db062145fcb4f623ecfda96c42ce2414e8e1 (diff)
downloadapscheduler-c5727432736b55b7d76753307f14efdb962c2edf.tar.gz
Major refactoring
- Made SyncScheduler a synchronous wrapper for AsyncScheduler - Removed workers as a user interface - Removed synchronous interfaces for data stores and event brokers and refactored existing implementations to use the async interface - Added the current_async_scheduler contextvar - Added job executors
Diffstat (limited to 'examples')
-rw-r--r--examples/separate_worker/async_scheduler.py10
-rw-r--r--examples/separate_worker/async_worker.py14
-rw-r--r--examples/separate_worker/sync_worker.py12
3 files changed, 18 insertions, 18 deletions
diff --git a/examples/separate_worker/async_scheduler.py b/examples/separate_worker/async_scheduler.py
index 6ffdbcd..2ac53c5 100644
--- a/examples/separate_worker/async_scheduler.py
+++ b/examples/separate_worker/async_scheduler.py
@@ -19,7 +19,7 @@ import logging
from example_tasks import tick
from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler.datastores.async_sqlalchemy import AsyncSQLAlchemyDataStore
+from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.async_ import AsyncScheduler
from apscheduler.triggers.interval import IntervalTrigger
@@ -29,15 +29,15 @@ async def main():
engine = create_async_engine(
"postgresql+asyncpg://postgres:secret@localhost/testdb"
)
- data_store = AsyncSQLAlchemyDataStore(engine)
+ data_store = SQLAlchemyDataStore(engine)
event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the Redis event broker instead
- # from apscheduler.eventbrokers.async_redis import AsyncRedisEventBroker
- # event_broker = AsyncRedisEventBroker.from_url("redis://localhost")
+ # from apscheduler.eventbrokers.redis import RedisEventBroker
+ # event_broker = RedisEventBroker.from_url("redis://localhost")
async with AsyncScheduler(
- data_store, event_broker, start_worker=False
+ data_store, event_broker, process_jobs=False
) as scheduler:
await scheduler.add_schedule(tick, IntervalTrigger(seconds=1), id="tick")
await scheduler.run_until_stopped()
diff --git a/examples/separate_worker/async_worker.py b/examples/separate_worker/async_worker.py
index 700720e..51c51e9 100644
--- a/examples/separate_worker/async_worker.py
+++ b/examples/separate_worker/async_worker.py
@@ -18,24 +18,24 @@ import logging
from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler.datastores.async_sqlalchemy import AsyncSQLAlchemyDataStore
+from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
-from apscheduler.workers.async_ import AsyncWorker
+from apscheduler.schedulers.async_ import AsyncScheduler
async def main():
engine = create_async_engine(
"postgresql+asyncpg://postgres:secret@localhost/testdb"
)
- data_store = AsyncSQLAlchemyDataStore(engine)
+ data_store = SQLAlchemyDataStore(engine)
event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the Redis event broker instead
- # from apscheduler.eventbrokers.async_redis import AsyncRedisEventBroker
- # event_broker = AsyncRedisEventBroker.from_url("redis://localhost")
+ # from apscheduler.eventbrokers.redis import RedisEventBroker
+ # event_broker = RedisEventBroker.from_url("redis://localhost")
- worker = AsyncWorker(data_store, event_broker)
- await worker.run_until_stopped()
+ scheduler = AsyncScheduler(data_store, event_broker, process_schedules=False)
+ await scheduler.run_until_stopped()
logging.basicConfig(level=logging.INFO)
diff --git a/examples/separate_worker/sync_worker.py b/examples/separate_worker/sync_worker.py
index e57be64..4329d02 100644
--- a/examples/separate_worker/sync_worker.py
+++ b/examples/separate_worker/sync_worker.py
@@ -1,7 +1,7 @@
"""
-Example demonstrating the separation of scheduler and worker.
-This script runs the worker part. You need to be running both this and the scheduler
-script simultaneously in order for the scheduled task to be run.
+Example demonstrating a scheduler that only runs jobs but does not process schedules.
+You need to be running both this and the scheduler script simultaneously in order for
+the scheduled task to be run.
Requires the "postgresql" and "redis" services to be running.
To install prerequisites: pip install sqlalchemy psycopg2 redis
@@ -19,7 +19,7 @@ from sqlalchemy.future import create_engine
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.redis import RedisEventBroker
-from apscheduler.workers.sync import Worker
+from apscheduler.schedulers.sync import Scheduler
logging.basicConfig(level=logging.INFO)
engine = create_engine("postgresql+psycopg2://postgres:secret@localhost/testdb")
@@ -30,5 +30,5 @@ event_broker = RedisEventBroker.from_url("redis://localhost")
# from apscheduler.eventbrokers.mqtt import MQTTEventBroker
# event_broker = MQTTEventBroker()
-worker = Worker(data_store, event_broker)
-worker.run_until_stopped()
+with Scheduler(data_store, event_broker, process_schedules=False) as scheduler:
+ scheduler.run_until_stopped()