summaryrefslogtreecommitdiff
path: root/examples
diff options
context:
space:
mode:
authorAlex Grönholm <alex.gronholm@nextday.fi>2022-09-22 00:19:37 +0300
committerAlex Grönholm <alex.gronholm@nextday.fi>2022-09-22 00:19:37 +0300
commit40cf1621c6574cdbff9aaffa7b6ae83e1bf349d9 (patch)
treecd779fa0e27393fce724af241deb904fe8cdf1ed /examples
parent3734b110e33cac99074c1fde2b240395d0739cf2 (diff)
downloadapscheduler-40cf1621c6574cdbff9aaffa7b6ae83e1bf349d9.tar.gz
Replaced the separate scheduler/worker example with a more practical alternative
Diffstat (limited to 'examples')
-rw-r--r--examples/separate_worker/async_scheduler.py19
-rw-r--r--examples/separate_worker/async_worker.py34
-rw-r--r--examples/separate_worker/sync_scheduler.py27
-rw-r--r--examples/separate_worker/sync_worker.py28
4 files changed, 47 insertions, 61 deletions
diff --git a/examples/separate_worker/async_scheduler.py b/examples/separate_worker/async_scheduler.py
index 59c294c..570ff28 100644
--- a/examples/separate_worker/async_scheduler.py
+++ b/examples/separate_worker/async_scheduler.py
@@ -1,14 +1,12 @@
"""
-Example demonstrating the separation of scheduler and worker.
-This script runs the scheduler part. You need to be running both this and the worker
-script simultaneously in order for the scheduled task to be run.
+This is an example demonstrating the use of the scheduler as only an interface to the
+scheduling system. This script adds or updates a single schedule and then exits. To see
+the schedule acted on, you need to run the corresponding worker script (either
+async_worker.py or sync_worker.py).
-Requires the "postgresql" service to be running.
+This script requires the "postgresql" service to be running.
To install prerequisites: pip install sqlalchemy asyncpg
To run: python async_scheduler.py
-
-When run together with async_worker.py, it should print a line on the console
-on a one-second interval.
"""
from __future__ import annotations
@@ -19,7 +17,6 @@ import logging
from example_tasks import tick
from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler import SchedulerRole
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.async_ import AsyncScheduler
@@ -37,11 +34,9 @@ async def main():
# from apscheduler.eventbrokers.redis import RedisEventBroker
# event_broker = RedisEventBroker.from_url("redis://localhost")
- async with AsyncScheduler(
- data_store, event_broker, role=SchedulerRole.scheduler
- ) as scheduler:
+ async with AsyncScheduler(data_store, event_broker) as scheduler:
await scheduler.add_schedule(tick, IntervalTrigger(seconds=1), id="tick")
- await scheduler.run_until_stopped()
+ # Note: we don't actually start the scheduler here!
logging.basicConfig(level=logging.INFO)
diff --git a/examples/separate_worker/async_worker.py b/examples/separate_worker/async_worker.py
index a4e0ef9..0fa1579 100644
--- a/examples/separate_worker/async_worker.py
+++ b/examples/separate_worker/async_worker.py
@@ -1,14 +1,13 @@
"""
-Example demonstrating the separation of scheduler and worker.
-This script runs the scheduler part. You need to be running both this and the worker
-script simultaneously in order for the scheduled task to be run.
+This is an example demonstrating how to run a scheduler to process schedules added by
+another scheduler elsewhere. Prior to starting this script, you need to run the script
+(either async_scheduler.py or sync_scheduler.py) that adds or updates a schedule to the
+data store. This script will then pick up that schedule and start spawning jobs that
+will print a line on the console on one-second intervals.
-Requires the "postgresql" service to be running.
+This script requires the "postgresql" service to be running.
To install prerequisites: pip install sqlalchemy asyncpg
To run: python async_worker.py
-
-When run together with async_scheduler.py, it should print a line on the console
-on a one-second interval.
"""
from __future__ import annotations
@@ -18,26 +17,23 @@ import logging
from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler import SchedulerRole
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.async_ import AsyncScheduler
async def main():
- engine = create_async_engine(
- "postgresql+asyncpg://postgres:secret@localhost/testdb"
- )
- data_store = SQLAlchemyDataStore(engine)
- event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
+ async with AsyncScheduler(data_store, event_broker) as scheduler:
+ await scheduler.run_until_stopped()
- # Uncomment the next two lines to use the Redis event broker instead
- # from apscheduler.eventbrokers.redis import RedisEventBroker
- # event_broker = RedisEventBroker.from_url("redis://localhost")
- scheduler = AsyncScheduler(data_store, event_broker, role=SchedulerRole.worker)
- await scheduler.run_until_stopped()
+logging.basicConfig(level=logging.INFO)
+engine = create_async_engine("postgresql+asyncpg://postgres:secret@localhost/testdb")
+data_store = SQLAlchemyDataStore(engine)
+event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
+# Uncomment the next two lines to use the Redis event broker instead
+# from apscheduler.eventbrokers.redis import RedisEventBroker
+# event_broker = RedisEventBroker.from_url("redis://localhost")
-logging.basicConfig(level=logging.INFO)
asyncio.run(main())
diff --git a/examples/separate_worker/sync_scheduler.py b/examples/separate_worker/sync_scheduler.py
index 45d337d..4699018 100644
--- a/examples/separate_worker/sync_scheduler.py
+++ b/examples/separate_worker/sync_scheduler.py
@@ -1,14 +1,12 @@
"""
-Example demonstrating the separation of scheduler and worker.
-This script runs the scheduler part. You need to be running both this and the worker
-script simultaneously in order for the scheduled task to be run.
+This is an example demonstrating the use of the scheduler as only an interface to the
+scheduling system. This script adds or updates a single schedule and then exits. To see
+the schedule acted on, you need to run the corresponding worker script (either
+async_worker.py or sync_worker.py).
-Requires the "postgresql" and "redis" services to be running.
-To install prerequisites: pip install sqlalchemy psycopg2 redis
+This script requires the "postgresql" service to be running.
+To install prerequisites: pip install sqlalchemy asyncpg
To run: python sync_scheduler.py
-
-When run together with sync_worker.py, it should print a line on the console
-on a one-second interval.
"""
from __future__ import annotations
@@ -16,23 +14,22 @@ from __future__ import annotations
import logging
from example_tasks import tick
-from sqlalchemy.future import create_engine
+from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler import SchedulerRole
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
-from apscheduler.eventbrokers.redis import RedisEventBroker
+from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.sync import Scheduler
from apscheduler.triggers.interval import IntervalTrigger
logging.basicConfig(level=logging.INFO)
-engine = create_engine("postgresql+psycopg2://postgres:secret@localhost/testdb")
+engine = create_async_engine("postgresql+asyncpg://postgres:secret@localhost/testdb")
data_store = SQLAlchemyDataStore(engine)
-event_broker = RedisEventBroker.from_url("redis://localhost")
+event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the MQTT event broker instead
# from apscheduler.eventbrokers.mqtt import MQTTEventBroker
# event_broker = MQTTEventBroker()
-with Scheduler(data_store, event_broker, role=SchedulerRole.scheduler) as scheduler:
+with Scheduler(data_store, event_broker) as scheduler:
scheduler.add_schedule(tick, IntervalTrigger(seconds=1), id="tick")
- scheduler.run_until_stopped()
+ # Note: we don't actually start the scheduler here!
diff --git a/examples/separate_worker/sync_worker.py b/examples/separate_worker/sync_worker.py
index 24018ba..27d66b0 100644
--- a/examples/separate_worker/sync_worker.py
+++ b/examples/separate_worker/sync_worker.py
@@ -1,35 +1,33 @@
"""
-Example demonstrating a scheduler that only runs jobs but does not process schedules.
-You need to be running both this and the scheduler script simultaneously in order for
-the scheduled task to be run.
-
-Requires the "postgresql" and "redis" services to be running.
-To install prerequisites: pip install sqlalchemy psycopg2 redis
+This is an example demonstrating how to run a scheduler to process schedules added by
+another scheduler elsewhere. Prior to starting this script, you need to run the script
+(either async_scheduler.py or sync_scheduler.py) that adds or updates a schedule to the
+data store. This script will then pick up that schedule and start spawning jobs that
+will print a line on the console on one-second intervals.
+
+This script requires the "postgresql" service to be running.
+To install prerequisites: pip install sqlalchemy asyncpg
To run: python sync_worker.py
-
-When run together with sync_scheduler.py, it should print a line on the
-console on a one-second interval.
"""
from __future__ import annotations
import logging
-from sqlalchemy.future import create_engine
+from sqlalchemy.ext.asyncio import create_async_engine
-from apscheduler import SchedulerRole
from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
-from apscheduler.eventbrokers.redis import RedisEventBroker
+from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.sync import Scheduler
logging.basicConfig(level=logging.INFO)
-engine = create_engine("postgresql+psycopg2://postgres:secret@localhost/testdb")
+engine = create_async_engine("postgresql+asyncpg://postgres:secret@localhost/testdb")
data_store = SQLAlchemyDataStore(engine)
-event_broker = RedisEventBroker.from_url("redis://localhost")
+event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)
# Uncomment the next two lines to use the MQTT event broker instead
# from apscheduler.eventbrokers.mqtt import MQTTEventBroker
# event_broker = MQTTEventBroker()
-with Scheduler(data_store, event_broker, role=SchedulerRole.worker) as scheduler:
+with Scheduler(data_store, event_broker) as scheduler:
scheduler.run_until_stopped()