summaryrefslogtreecommitdiff
path: root/examples/separate_worker/sync_worker.py
blob: 27d66b08abef9646d797672885125315ec15ed11 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
"""
This is an example demonstrating how to run a scheduler to process schedules added by
another scheduler elsewhere. Prior to starting this script, you need to run the script
(either async_scheduler.py or sync_scheduler.py) that adds or updates a schedule to the
data store. This script will then pick up that schedule and start spawning jobs that
will print a line on the console on one-second intervals.

This script requires the "postgresql" service to be running.
To install prerequisites: pip install sqlalchemy asyncpg
To run: python sync_worker.py
"""

from __future__ import annotations

import logging

from sqlalchemy.ext.asyncio import create_async_engine

from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
from apscheduler.schedulers.sync import Scheduler

logging.basicConfig(level=logging.INFO)
engine = create_async_engine("postgresql+asyncpg://postgres:secret@localhost/testdb")
data_store = SQLAlchemyDataStore(engine)
event_broker = AsyncpgEventBroker.from_async_sqla_engine(engine)

# Uncomment the next two lines to use the MQTT event broker instead
# from apscheduler.eventbrokers.mqtt import MQTTEventBroker
# event_broker = MQTTEventBroker()

with Scheduler(data_store, event_broker) as scheduler:
    scheduler.run_until_stopped()