summaryrefslogtreecommitdiff
path: root/examples/separate_worker/sync_worker.py
diff options
context:
space:
mode:
Diffstat (limited to 'examples/separate_worker/sync_worker.py')
-rw-r--r--examples/separate_worker/sync_worker.py28
1 files changed, 13 insertions, 15 deletions
diff --git a/examples/separate_worker/sync_worker.py b/examples/separate_worker/sync_worker.py
index 24018ba..27d66b0 100644
--- a/examples/separate_worker/sync_worker.py
+++ b/examples/separate_worker/sync_worker.py
@@ -1,35 +1,33 @@
"""
-Example demonstrating a scheduler that only runs jobs but does not process schedules.
-You need to be running both this and the scheduler script simultaneously in order for
-the scheduled task to be run.
-
-Requires the "postgresql" and "redis" services to be running.
-To install prerequisites: pip install sqlalchemy psycopg2 redis
+This is an example demonstrating how to run a scheduler to process schedules added by
+another scheduler elsewhere. Prior to starting this script, you need to run the script
+(either async_scheduler.py or sync_scheduler.py) that adds or updates a schedule to the
+data store. This script will then pick up that schedule and start spawning jobs that
+will print a line on the console on one-second intervals.
+
+This script requires the "postgresql" service to be running.
+To install prerequisites: pip install sqlalchemy asyncpg
To run: python sync_worker.py
-
-When run together with sync_scheduler.py, it should print a line on the
-console on a one-second interval.
"""
from __future__ import annotations
import logging
-from sqlalchemy.future import create_engine
+from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler import SchedulerRole
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
-from apscheduler.eventbrokers.redis import RedisEventBroker
+from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.sync import Scheduler
logging.basicConfig(level=logging.INFO)
-engine = create_engine("postgresql+psycopg2://postgres:secret@localhost/testdb")
+engine = create_async_engine("postgresql+asyncpg://postgres:secret@localhost/testdb")
data_store = SQLAlchemyDataStore(engine)
-event_broker = RedisEventBroker.from_url("redis://localhost")
+event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the MQTT event broker instead
# from apscheduler.eventbrokers.mqtt import MQTTEventBroker
# event_broker = MQTTEventBroker()
-with Scheduler(data_store, event_broker, role=SchedulerRole.worker) as scheduler:
+with Scheduler(data_store, event_broker) as scheduler:
scheduler.run_until_stopped()