summaryrefslogtreecommitdiff
path: root/examples/separate_worker/async_worker.py
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2022-09-12 22:09:05 +0300
committerAlex Grönholm <alex.gronholm@nextday.fi>2022-09-21 02:40:02 +0300
commitc5727432736b55b7d76753307f14efdb962c2edf (patch)
tree005bd129694b56bd601d65c4cdf43828cfcd4381 /examples/separate_worker/async_worker.py
parent26c4db062145fcb4f623ecfda96c42ce2414e8e1 (diff)
downloadapscheduler-c5727432736b55b7d76753307f14efdb962c2edf.tar.gz
Major refactoring
- Made SyncScheduler a synchronous wrapper for AsyncScheduler - Removed workers as a user interface - Removed synchronous interfaces for data stores and event brokers and refactored existing implementations to use the async interface - Added the current_async_scheduler contextvar - Added job executors
Diffstat (limited to 'examples/separate_worker/async_worker.py')
-rw-r--r--examples/separate_worker/async_worker.py14
1 files changed, 7 insertions, 7 deletions
diff --git a/examples/separate_worker/async_worker.py b/examples/separate_worker/async_worker.py
index 700720e..51c51e9 100644
--- a/examples/separate_worker/async_worker.py
+++ b/examples/separate_worker/async_worker.py
@@ -18,24 +18,24 @@ import logging
from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler.datastores.async_sqlalchemy import AsyncSQLAlchemyDataStore
+from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
-from apscheduler.workers.async_ import AsyncWorker
+from apscheduler.schedulers.async_ import AsyncScheduler
async def main():
engine = create_async_engine(
"postgresql+asyncpg://postgres:secret@localhost/testdb"
)
- data_store = AsyncSQLAlchemyDataStore(engine)
+ data_store = SQLAlchemyDataStore(engine)
event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the Redis event broker instead
- # from apscheduler.eventbrokers.async_redis import AsyncRedisEventBroker
- # event_broker = AsyncRedisEventBroker.from_url("redis://localhost")
+ # from apscheduler.eventbrokers.redis import RedisEventBroker
+ # event_broker = RedisEventBroker.from_url("redis://localhost")
- worker = AsyncWorker(data_store, event_broker)
- await worker.run_until_stopped()
+ scheduler = AsyncScheduler(data_store, event_broker, process_schedules=False)
+ await scheduler.run_until_stopped()
logging.basicConfig(level=logging.INFO)