summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.pep81
-rw-r--r--apscheduler/events.py15
-rw-r--r--apscheduler/job.py35
-rw-r--r--apscheduler/jobstores/mongodb_store.py17
-rw-r--r--apscheduler/jobstores/redis_store.py12
-rw-r--r--apscheduler/jobstores/sqlalchemy_store.py11
-rw-r--r--apscheduler/scheduler.py141
-rw-r--r--apscheduler/threadpool.py15
-rw-r--r--apscheduler/triggers/cron/__init__.py42
-rw-r--r--apscheduler/triggers/cron/expressions.py20
-rw-r--r--apscheduler/triggers/cron/fields.py23
-rw-r--r--apscheduler/util.py37
-rw-r--r--examples/interval.py3
-rw-r--r--examples/persistent.py7
-rw-r--r--examples/reference.py3
-rw-r--r--examples/threaded.py3
-rw-r--r--setup.py7
-rw-r--r--tests/testintegration.py11
-rw-r--r--tests/testjob.py13
-rw-r--r--tests/testjobstores.py3
-rw-r--r--tests/testscheduler.py46
-rw-r--r--tests/testtriggers.py33
-rw-r--r--tests/testutil.py3
23 files changed, 196 insertions, 305 deletions
diff --git a/.pep8 b/.pep8
index f53502f..bfce227 100644
--- a/.pep8
+++ b/.pep8
@@ -1,2 +1,3 @@
[pep8]
+ignore=E501
exclude=.tox,docs
diff --git a/apscheduler/events.py b/apscheduler/events.py
index 80bde8e..2c262d4 100644
--- a/apscheduler/events.py
+++ b/apscheduler/events.py
@@ -1,8 +1,6 @@
-__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN',
- 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED',
- 'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED',
- 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR', 'EVENT_JOB_MISSED',
- 'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent')
+__all__ = ('EVENT_SCHEDULER_START', 'EVENT_SCHEDULER_SHUTDOWN', 'EVENT_JOBSTORE_ADDED', 'EVENT_JOBSTORE_REMOVED',
+ 'EVENT_JOBSTORE_JOB_ADDED', 'EVENT_JOBSTORE_JOB_REMOVED', 'EVENT_JOB_EXECUTED', 'EVENT_JOB_ERROR',
+ 'EVENT_JOB_MISSED', 'EVENT_ALL', 'SchedulerEvent', 'JobStoreEvent', 'JobEvent')
EVENT_SCHEDULER_START = 1 # The scheduler was started
@@ -14,10 +12,9 @@ EVENT_JOBSTORE_JOB_REMOVED = 32 # A job was removed from a job store
EVENT_JOB_EXECUTED = 64 # A job was executed successfully
EVENT_JOB_ERROR = 128 # A job raised an exception during execution
EVENT_JOB_MISSED = 256 # A job's execution was missed
-EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN |
- EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |
- EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED |
- EVENT_JOB_EXECUTED | EVENT_JOB_ERROR | EVENT_JOB_MISSED)
+EVENT_ALL = (EVENT_SCHEDULER_START | EVENT_SCHEDULER_SHUTDOWN | EVENT_JOBSTORE_ADDED | EVENT_JOBSTORE_REMOVED |
+ EVENT_JOBSTORE_JOB_ADDED | EVENT_JOBSTORE_JOB_REMOVED | EVENT_JOB_EXECUTED | EVENT_JOB_ERROR |
+ EVENT_JOB_MISSED)
class SchedulerEvent(object):
diff --git a/apscheduler/job.py b/apscheduler/job.py
index 0a11b96..adc7586 100644
--- a/apscheduler/job.py
+++ b/apscheduler/job.py
@@ -15,32 +15,28 @@ class MaxInstancesReachedError(Exception):
class Job(object):
"""
- Encapsulates the actual Job along with its metadata. Job instances
- are created by the scheduler when adding jobs, and should not be
- directly instantiated. These options can be set when adding jobs
- to the scheduler (see :ref:`job_options`).
+ Encapsulates the actual Job along with its metadata. Job instances are created by the scheduler when adding jobs,
+ and should not be directly instantiated. These options can be set when adding jobs to the scheduler
+ (see :ref:`job_options`).
:var trigger: trigger that determines the execution times
:var func: callable to call when the trigger is triggered
:var args: list of positional arguments to call func with
:var kwargs: dict of keyword arguments to call func with
:var name: name of the job
- :var misfire_grace_time: seconds after the designated run time that
- the job is still allowed to be run
- :var coalesce: run once instead of many times if the scheduler determines
- that the job should be run more than once in succession
- :var max_runs: maximum number of times this job is allowed to be
- triggered
- :var max_instances: maximum number of concurrently running
- instances allowed for this job
+ :var misfire_grace_time: seconds after the designated run time that the job is still allowed to be run
+ :var coalesce: run once instead of many times if the scheduler determines that the job should be run more than once
+ in succession
+ :var max_runs: maximum number of times this job is allowed to be triggered
+ :var max_instances: maximum number of concurrently running instances allowed for this job
:var runs: number of times this job has been triggered
:var instances: number of concurrently running instances of this job
"""
id = None
next_run_time = None
- def __init__(self, trigger, func, args, kwargs, misfire_grace_time,
- coalesce, name=None, max_runs=None, max_instances=1):
+ def __init__(self, trigger, func, args, kwargs, misfire_grace_time, coalesce, name=None, max_runs=None,
+ max_instances=1):
if not trigger:
raise ValueError('The trigger must not be None')
if not hasattr(args, '__getitem__'):
@@ -65,8 +61,7 @@ class Job(object):
# If this happens, this Job won't be serializable
self.func_ref = None
else:
- raise TypeError('func must be a callable or a textual '
- 'reference to one')
+ raise TypeError('func must be a callable or a textual reference to one')
self._lock = Lock()
@@ -96,8 +91,7 @@ class Job(object):
run_times = []
run_time = self.next_run_time
increment = timedelta(microseconds=1)
- while ((not self.max_runs or self.runs < self.max_runs) and
- run_time and run_time <= now):
+ while (not self.max_runs or self.runs < self.max_runs) and run_time and run_time <= now:
run_times.append(run_time)
run_time = self.trigger.get_next_fire_time(run_time + increment)
@@ -134,8 +128,7 @@ class Job(object):
return NotImplemented
def __repr__(self):
- return '<Job (name=%s, trigger=%s)>' % (self.name, repr(self.trigger))
+ return '<Job (name=%s, trigger=%r)>' % (self.name, self.trigger)
def __str__(self):
- return '%s (trigger: %s, next run at: %s)' % (
- self.name, str(self.trigger), str(self.next_run_time))
+ return '%s (trigger: %s, next run at: %s)' % (self.name, self.trigger, self.next_run_time)
diff --git a/apscheduler/jobstores/mongodb_store.py b/apscheduler/jobstores/mongodb_store.py
index 3f522c2..258299b 100644
--- a/apscheduler/jobstores/mongodb_store.py
+++ b/apscheduler/jobstores/mongodb_store.py
@@ -21,9 +21,8 @@ logger = logging.getLogger(__name__)
class MongoDBJobStore(JobStore):
- def __init__(self, database='apscheduler', collection='jobs',
- connection=None, pickle_protocol=pickle.HIGHEST_PROTOCOL,
- **connect_args):
+ def __init__(self, database='apscheduler', collection='jobs', connection=None,
+ pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
self.jobs = []
self.pickle_protocol = pickle_protocol
@@ -41,12 +40,9 @@ class MongoDBJobStore(JobStore):
def add_job(self, job):
job_dict = job.__getstate__()
- job_dict['trigger'] = Binary(pickle.dumps(job.trigger,
- self.pickle_protocol))
- job_dict['args'] = Binary(pickle.dumps(job.args,
- self.pickle_protocol))
- job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs,
- self.pickle_protocol))
+ job_dict['trigger'] = Binary(pickle.dumps(job.trigger, self.pickle_protocol))
+ job_dict['args'] = Binary(pickle.dumps(job.args, self.pickle_protocol))
+ job_dict['kwargs'] = Binary(pickle.dumps(job.kwargs, self.pickle_protocol))
job.id = self.collection.insert(job_dict)
self.jobs.append(job)
@@ -72,8 +68,7 @@ class MongoDBJobStore(JobStore):
def update_job(self, job):
spec = {'_id': job.id}
- document = {'$set': {'next_run_time': job.next_run_time},
- '$inc': {'runs': 1}}
+ document = {'$set': {'next_run_time': job.next_run_time}, '$inc': {'runs': 1}}
self.collection.update(spec, document)
def close(self):
diff --git a/apscheduler/jobstores/redis_store.py b/apscheduler/jobstores/redis_store.py
index 814ce0d..59a3a3b 100644
--- a/apscheduler/jobstores/redis_store.py
+++ b/apscheduler/jobstores/redis_store.py
@@ -27,8 +27,7 @@ logger = logging.getLogger(__name__)
class RedisJobStore(JobStore):
- def __init__(self, db='apscheduler', key_prefix='jobs.',
- pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
+ def __init__(self, db='apscheduler', key_prefix='jobs.', pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
self.jobs = []
self.pickle_protocol = pickle_protocol
self.key_prefix = key_prefix
@@ -46,7 +45,8 @@ class RedisJobStore(JobStore):
job_dict = {
'job_state': pickle.dumps(job_state, self.pickle_protocol),
'runs': '0',
- 'next_run_time': job_state.pop('next_run_time').isoformat()}
+ 'next_run_time': job_state.pop('next_run_time').isoformat()
+ }
self.redis.hmset(self.key_prefix + job.id, job_dict)
self.jobs.append(job)
@@ -69,8 +69,7 @@ class RedisJobStore(JobStore):
job_state = pickle.loads(job_dict['job_state'.encode()])
job_state['runs'] = long(job_dict['runs'.encode()])
dateval = job_dict['next_run_time'.encode()].decode()
- job_state['next_run_time'] = datetime.strptime(
- dateval, '%Y-%m-%dT%H:%M:%S')
+ job_state['next_run_time'] = datetime.strptime(dateval, '%Y-%m-%dT%H:%M:%S')
job.__setstate__(job_state)
jobs.append(job)
except Exception:
@@ -81,7 +80,8 @@ class RedisJobStore(JobStore):
def update_job(self, job):
attrs = {
'next_run_time': job.next_run_time.isoformat(),
- 'runs': job.runs}
+ 'runs': job.runs
+ }
self.redis.hmset(self.key_prefix + job.id, attrs)
def close(self):
diff --git a/apscheduler/jobstores/sqlalchemy_store.py b/apscheduler/jobstores/sqlalchemy_store.py
index 5b64a35..b4161cf 100644
--- a/apscheduler/jobstores/sqlalchemy_store.py
+++ b/apscheduler/jobstores/sqlalchemy_store.py
@@ -18,8 +18,8 @@ logger = logging.getLogger(__name__)
class SQLAlchemyJobStore(JobStore):
- def __init__(self, url=None, engine=None, tablename='apscheduler_jobs',
- metadata=None, pickle_protocol=pickle.HIGHEST_PROTOCOL):
+ def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None,
+ pickle_protocol=pickle.HIGHEST_PROTOCOL):
self.jobs = []
self.pickle_protocol = pickle_protocol
@@ -36,9 +36,7 @@ class SQLAlchemyJobStore(JobStore):
pickle_coltype = PickleType(pickle_protocol)
self.jobs_t = Table(
tablename, metadata or MetaData(),
- Column('id', Integer,
- Sequence(tablename + '_id_seq', optional=True),
- primary_key=True),
+ Column('id', Integer, Sequence(tablename + '_id_seq', optional=True), primary_key=True),
Column('trigger', pickle_coltype, nullable=False),
Column('func_ref', String(1024), nullable=False),
Column('args', pickle_coltype, nullable=False),
@@ -80,8 +78,7 @@ class SQLAlchemyJobStore(JobStore):
def update_job(self, job):
job_dict = job.__getstate__()
update = self.jobs_t.update().where(self.jobs_t.c.id == job.id).\
- values(next_run_time=job_dict['next_run_time'],
- runs=job_dict['runs'])
+ values(next_run_time=job_dict['next_run_time'], runs=job_dict['runs'])
self.engine.execute(update)
def close(self):
diff --git a/apscheduler/scheduler.py b/apscheduler/scheduler.py
index 45df72c..599073a 100644
--- a/apscheduler/scheduler.py
+++ b/apscheduler/scheduler.py
@@ -1,6 +1,5 @@
"""
-This module is the main part of the library. It houses the Scheduler class
-and related exceptions.
+This module is the main part of the library. It houses the Scheduler class and related exceptions.
"""
from threading import Thread, Event, Lock
@@ -21,8 +20,7 @@ logger = getLogger(__name__)
class SchedulerAlreadyRunningError(Exception):
"""
- Raised when attempting to start or configure the scheduler when it's
- already running.
+ Raised when attempting to start or configure the scheduler when it's already running.
"""
def __str__(self):
@@ -31,8 +29,7 @@ class SchedulerAlreadyRunningError(Exception):
class Scheduler(object):
"""
- This class is responsible for scheduling jobs and triggering
- their execution.
+ This class is responsible for scheduling jobs and triggering their execution.
"""
_stopped = False
@@ -49,8 +46,7 @@ class Scheduler(object):
def configure(self, gconfig={}, **options):
"""
- Reconfigures the scheduler with the given options. Can only be done
- when the scheduler isn't running.
+ Reconfigures the scheduler with the given options. Can only be done when the scheduler isn't running.
"""
if self.running:
raise SchedulerAlreadyRunningError
@@ -87,11 +83,9 @@ class Scheduler(object):
"""
Starts the scheduler in a new thread.
- In threaded mode (the default), this method will return immediately
- after starting the scheduler thread.
+ In threaded mode (the default), this method will return immediately after starting the scheduler thread.
- In standalone mode, this method will block until there are no more
- scheduled jobs.
+ In standalone mode, this method will block until there are no more scheduled jobs.
"""
if self.running:
raise SchedulerAlreadyRunningError
@@ -116,11 +110,10 @@ class Scheduler(object):
def shutdown(self, wait=True, shutdown_threadpool=True,
close_jobstores=True):
"""
- Shuts down the scheduler and terminates the thread.
- Does not interrupt any currently running jobs.
+ Shuts down the scheduler and terminates the thread. Does not interrupt any currently running jobs.
- :param wait: ``True`` to wait until all currently executing jobs have
- finished (if ``shutdown_threadpool`` is also ``True``)
+ :param wait: ``True`` to wait until all currently executing jobs have finished (if ``shutdown_threadpool`` is
+ also ``True``)
:param shutdown_threadpool: ``True`` to shut down the thread pool
:param close_jobstores: ``True`` to close all job stores after shutdown
"""
@@ -154,8 +147,7 @@ class Scheduler(object):
:param jobstore: job store to be added
:param alias: alias for the job store
:param quiet: True to suppress scheduler thread wakeup
- :type jobstore: instance of
- :class:`~apscheduler.jobstores.base.JobStore`
+ :type jobstore: instance of :class:`~apscheduler.jobstores.base.JobStore`
:type alias: str
"""
with self._jobstores_lock:
@@ -192,10 +184,9 @@ class Scheduler(object):
def add_listener(self, callback, mask=EVENT_ALL):
"""
- Adds a listener for scheduler events. When a matching event occurs,
- ``callback`` is executed with the event object as its sole argument.
- If the ``mask`` parameter is not provided, the callback will receive
- events of all types.
+ Adds a listener for scheduler events. When a matching event occurs, ``callback`` is executed with the event
+ object as its sole argument. If the ``mask`` parameter is not provided, the callback will receive events of all
+ types.
:param callback: any callable that takes one argument
:param mask: bitmask that indicates which events should be listened to
@@ -249,19 +240,16 @@ class Scheduler(object):
**options):
"""
Adds the given job to the job list and notifies the scheduler thread.
-
- The ``func`` argument can be given either as a callable object or a
- textual reference in the ``package.module:some.object`` format, where
- the first half (separated by ``:``) is an importable module and the
- second half is a reference to the callable object, relative to the
- module.
-
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ The ``func`` argument can be given either as a callable object or a textual reference in the
+ ``package.module:some.object`` format, where the first half (separated by ``:``) is an importable module and the
+ second half is a reference to the callable object, relative to the module.
+
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
:param trigger: trigger that determines when ``func`` is called
- :param func: callable (or a textual reference to one) to run at the
- given time
+ :param func: callable (or a textual reference to one) to run at the given time
:param args: list of positional arguments to call func with
:param kwargs: dict of keyword arguments to call func with
:param jobstore: alias of the job store to store the job in
@@ -272,8 +260,7 @@ class Scheduler(object):
options.pop('coalesce', self.coalesce), **options)
if not self.running:
self._pending_jobs.append((job, jobstore))
- logger.info('Adding job tentatively -- it will be properly '
- 'scheduled when the scheduler starts')
+ logger.info('Adding job tentatively -- it will be properly scheduled when the scheduler starts')
else:
self._real_add_job(job, jobstore, True)
return job
@@ -290,15 +277,14 @@ class Scheduler(object):
def add_date_job(self, func, date, args=None, kwargs=None, **options):
"""
Schedules a job to be completed on a specific date and time.
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
:param func: callable to run at the given time
:param date: the date/time to run the job at
:param name: name of the job
:param jobstore: stored the job in the named (or given) job store
- :param misfire_grace_time: seconds after the designated run time that
- the job is still allowed to be run
+ :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run
:type date: :class:`datetime.date`
:rtype: :class:`~apscheduler.job.Job`
"""
@@ -310,8 +296,8 @@ class Scheduler(object):
**options):
"""
Schedules a job to be completed on specified intervals.
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
:param func: callable to run
:param weeks: number of weeks to wait
@@ -319,14 +305,12 @@ class Scheduler(object):
:param hours: number of hours to wait
:param minutes: number of minutes to wait
:param seconds: number of seconds to wait
- :param start_date: when to first execute the job and start the
- counter (default is after the given interval)
+ :param start_date: when to first execute the job and start the counter (default is after the given interval)
:param args: list of positional arguments to call func with
:param kwargs: dict of keyword arguments to call func with
:param name: name of the job
:param jobstore: alias of the job store to add the job to
- :param misfire_grace_time: seconds after the designated run time that
- the job is still allowed to be run
+ :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run
:rtype: :class:`~apscheduler.job.Job`
"""
interval = timedelta(weeks=weeks, days=days, hours=hours,
@@ -338,10 +322,9 @@ class Scheduler(object):
day_of_week=None, hour=None, minute=None, second=None,
start_date=None, args=None, kwargs=None, **options):
"""
- Schedules a job to be completed on times that match the given
- expressions.
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+ Schedules a job to be completed on times that match the given expressions.
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
:param func: callable to run
:param year: year to run on
@@ -355,8 +338,7 @@ class Scheduler(object):
:param kwargs: dict of keyword arguments to call func with
:param name: name of the job
:param jobstore: alias of the job store to add the job to
- :param misfire_grace_time: seconds after the designated run time that
- the job is still allowed to be run
+ :param misfire_grace_time: seconds after the designated run time that the job is still allowed to be run
:return: the scheduled job
:rtype: :class:`~apscheduler.job.Job`
"""
@@ -369,11 +351,14 @@ class Scheduler(object):
def cron_schedule(self, **options):
"""
Decorator version of :meth:`add_cron_job`.
+
This decorator does not wrap its host function.
- Unscheduling decorated functions is possible by passing the ``job``
- attribute of the scheduled function to :meth:`unschedule_job`.
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ Unscheduling decorated functions is possible by passing the ``job`` attribute of the scheduled function to
+ :meth:`unschedule_job`.
+
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
"""
def inner(func):
func.job = self.add_cron_job(func, **options)
@@ -383,11 +368,14 @@ class Scheduler(object):
def interval_schedule(self, **options):
"""
Decorator version of :meth:`add_interval_job`.
+
This decorator does not wrap its host function.
- Unscheduling decorated functions is possible by passing the ``job``
- attribute of the scheduled function to :meth:`unschedule_job`.
- Any extra keyword arguments are passed along to the constructor of the
- :class:`~apscheduler.job.Job` class (see :ref:`job_options`).
+
+ Unscheduling decorated functions is possible by passing the ``job`` attribute of the scheduled function to
+ :meth:`unschedule_job`.
+
+ Any extra keyword arguments are passed along to the constructor of the :class:`~apscheduler.job.Job` class
+ (see :ref:`job_options`).
"""
def inner(func):
func.job = self.add_interval_job(func, **options)
@@ -431,16 +419,14 @@ class Scheduler(object):
found = True
if not found:
- raise KeyError('The given function is not scheduled in this '
- 'scheduler')
+ raise KeyError('The given function is not scheduled in this scheduler')
def print_jobs(self, out=None):
"""
Prints out a textual listing of all jobs currently scheduled on this
scheduler.
- :param out: a file-like object to print to (defaults to **sys.stdout**
- if nothing is given)
+ :param out: a file-like object to print to (defaults to **sys.stdout** if nothing is given)
"""
out = out or sys.stdout
job_strs = []
@@ -468,36 +454,31 @@ class Scheduler(object):
# Notify listeners about a missed run
event = JobEvent(EVENT_JOB_MISSED, job, run_time)
self._notify_listeners(event)
- logger.warning('Run time of job "%s" was missed by %s',
- job, difference)
+ logger.warning('Run time of job "%s" was missed by %s', job, difference)
else:
try:
job.add_instance()
except MaxInstancesReachedError:
event = JobEvent(EVENT_JOB_MISSED, job, run_time)
self._notify_listeners(event)
- logger.warning('Execution of job "%s" skipped: '
- 'maximum number of running instances '
- 'reached (%d)', job, job.max_instances)
+ logger.warning('Execution of job "%s" skipped: maximum number of running instances reached (%d)',
+ job, job.max_instances)
break
- logger.info('Running job "%s" (scheduled at %s)', job,
- run_time)
+ logger.info('Running job "%s" (scheduled at %s)', job, run_time)
try:
retval = job.func(*job.args, **job.kwargs)
except:
# Notify listeners about the exception
exc, tb = sys.exc_info()[1:]
- event = JobEvent(EVENT_JOB_ERROR, job, run_time,
- exception=exc, traceback=tb)
+ event = JobEvent(EVENT_JOB_ERROR, job, run_time, exception=exc, traceback=tb)
self._notify_listeners(event)
logger.exception('Job "%s" raised an exception', job)
else:
# Notify listeners about successful execution
- event = JobEvent(EVENT_JOB_EXECUTED, job, run_time,
- retval=retval)
+ event = JobEvent(EVENT_JOB_EXECUTED, job, run_time, retval=retval)
self._notify_listeners(event)
logger.info('Job "%s" executed successfully', job)
@@ -510,8 +491,7 @@ class Scheduler(object):
def _process_jobs(self, now):
"""
- Iterates through jobs in every jobstore, starts pending jobs
- and figures out the next wakeup time.
+ Iterates through jobs in every jobstore, starts pending jobs and figures out the next wakeup time.
"""
next_wakeup_time = None
with self._jobstores_lock:
@@ -528,8 +508,7 @@ class Scheduler(object):
job.runs += len(run_times)
# Update the job, but don't keep finished jobs around
- if job.compute_next_run_time(
- now + timedelta(microseconds=1)):
+ if job.compute_next_run_time(now + timedelta(microseconds=1)):
jobstore.update_job(job)
else:
self._remove_job(job, alias, jobstore)
@@ -537,8 +516,7 @@ class Scheduler(object):
if not next_wakeup_time:
next_wakeup_time = job.next_run_time
elif job.next_run_time:
- next_wakeup_time = min(next_wakeup_time,
- job.next_run_time)
+ next_wakeup_time = min(next_wakeup_time, job.next_run_time)
return next_wakeup_time
def _main_loop(self):
@@ -557,8 +535,7 @@ class Scheduler(object):
# a new job is added or the scheduler is stopped
if next_wakeup_time is not None:
wait_seconds = time_difference(next_wakeup_time, now)
- logger.debug('Next wakeup is due at %s (in %f seconds)',
- next_wakeup_time, wait_seconds)
+ logger.debug('Next wakeup is due at %s (in %f seconds)', next_wakeup_time, wait_seconds)
self._wakeup.wait(wait_seconds)
self._wakeup.clear()
elif self.standalone:
diff --git a/apscheduler/threadpool.py b/apscheduler/threadpool.py
index 7a6fe0b..6419941 100644
--- a/apscheduler/threadpool.py
+++ b/apscheduler/threadpool.py
@@ -1,7 +1,6 @@
"""
Generic thread pool class. Modeled after Java's ThreadPoolExecutor.
-Please note that this ThreadPool does *not* fully implement the PEP 3148
-ThreadPool!
+Please note that this ThreadPool does *not* fully implement the PEP 3148 ThreadPool!
"""
from threading import Thread, Lock, currentThread
@@ -18,9 +17,8 @@ logger = logging.getLogger(__name__)
_threadpools = set()
-# Worker threads are daemonic in order to let the interpreter exit without
-# an explicit shutdown of the thread pool. The following trick is necessary
-# to allow worker threads to finish cleanly.
+# Worker threads are daemonic in order to let the interpreter exit without an explicit shutdown of the thread pool.
+# The following trick is necessary to allow worker threads to finish cleanly.
def _shutdown_all():
for pool_ref in tuple(_threadpools):
pool = pool_ref()
@@ -36,8 +34,7 @@ class ThreadPool(object):
:param core_threads: maximum number of persistent threads in the pool
:param max_threads: maximum number of total threads in the pool
:param thread_class: callable that creates a Thread object
- :param keepalive: seconds to keep non-core worker threads waiting
- for new tasks
+ :param keepalive: seconds to keep non-core worker threads waiting for new tasks
"""
self.core_threads = core_threads
self.max_threads = max(max_threads, core_threads, 1)
@@ -48,8 +45,8 @@ class ThreadPool(object):
self._shutdown = False
_threadpools.add(ref(self))
- logger.info('Started thread pool with %d core threads and %s maximum '
- 'threads', core_threads, max_threads or 'unlimited')
+ logger.info('Started thread pool with %d core threads and %s maximum threads', core_threads,
+ max_threads or 'unlimited')
def _adjust_threadcount(self):
with self._threads_lock:
diff --git a/apscheduler/triggers/cron/__init__.py b/apscheduler/triggers/cron/__init__.py
index 9e69f72..f216fae 100644
--- a/apscheduler/triggers/cron/__init__.py
+++ b/apscheduler/triggers/cron/__init__.py
@@ -5,16 +5,17 @@ from apscheduler.util import datetime_ceil, convert_to_datetime, iteritems
class CronTrigger(object):
- FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour',
- 'minute', 'second')
- FIELDS_MAP = {'year': BaseField,
- 'month': BaseField,
- 'week': WeekField,
- 'day': DayOfMonthField,
- 'day_of_week': DayOfWeekField,
- 'hour': BaseField,
- 'minute': BaseField,
- 'second': BaseField}
+ FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second')
+ FIELDS_MAP = {
+ 'year': BaseField,
+ 'month': BaseField,
+ 'week': WeekField,
+ 'day': DayOfMonthField,
+ 'day_of_week': DayOfWeekField,
+ 'hour': BaseField,
+ 'minute': BaseField,
+ 'second': BaseField
+ }
def __init__(self, **values):
self.start_date = values.pop('start_date', None)
@@ -48,15 +49,13 @@ class CronTrigger(object):
def _increment_field_value(self, dateval, fieldnum):
"""
- Increments the designated field and resets all less significant fields
- to their minimum values.
+ Increments the designated field and resets all less significant fields to their minimum values.
:type dateval: datetime
:type fieldnum: int
:type amount: int
:rtype: tuple
- :return: a tuple containing the new date, and the number of the field
- that was actually incremented
+ :return: a tuple containing the new date, and the number of the field that was actually incremented
"""
i = 0
values = {}
@@ -113,17 +112,14 @@ class CronTrigger(object):
if next_value is None:
# No valid value was found
- next_date, fieldnum = self._increment_field_value(
- next_date, fieldnum - 1)
+ next_date, fieldnum = self._increment_field_value(next_date, fieldnum - 1)
elif next_value > curr_value:
# A valid, but higher than the starting value, was found
if field.REAL:
- next_date = self._set_field_value(
- next_date, fieldnum, next_value)
+ next_date = self._set_field_value(next_date, fieldnum, next_value)
fieldnum += 1
else:
- next_date, fieldnum = self._increment_field_value(
- next_date, fieldnum)
+ next_date, fieldnum = self._increment_field_value(next_date, fieldnum)
else:
# A valid value was found, no changes necessary
fieldnum += 1
@@ -132,13 +128,11 @@ class CronTrigger(object):
return next_date
def __str__(self):
- options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
- if not f.is_default]
+ options = ["%s='%s'" % (f.name, str(f)) for f in self.fields if not f.is_default]
return 'cron[%s]' % (', '.join(options))
def __repr__(self):
- options = ["%s='%s'" % (f.name, str(f)) for f in self.fields
- if not f.is_default]
+ options = ["%s='%s'" % (f.name, str(f)) for f in self.fields if not f.is_default]
if self.start_date:
options.append("start_date='%s'" % self.start_date.isoformat(' '))
return '<%s (%s)>' % (self.__class__.__name__, ', '.join(options))
diff --git a/apscheduler/triggers/cron/expressions.py b/apscheduler/triggers/cron/expressions.py
index b5d2919..55272db 100644
--- a/apscheduler/triggers/cron/expressions.py
+++ b/apscheduler/triggers/cron/expressions.py
@@ -7,8 +7,8 @@ import re
from apscheduler.util import asint
-__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression',
- 'WeekdayPositionExpression', 'LastDayOfMonthExpression')
+__all__ = ('AllExpression', 'RangeExpression', 'WeekdayRangeExpression', 'WeekdayPositionExpression',
+ 'LastDayOfMonthExpression')
WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
@@ -57,8 +57,7 @@ class RangeExpression(AllExpression):
if last is None and step is None:
last = first
if last is not None and first > last:
- raise ValueError('The minimum value in a range must not be '
- 'higher than the maximum')
+ raise ValueError('The minimum value in a range must not be higher than the maximum')
self.first = first
self.last = last
@@ -102,8 +101,7 @@ class RangeExpression(AllExpression):
class WeekdayRangeExpression(RangeExpression):
- value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?',
- re.IGNORECASE)
+ value_re = re.compile(r'(?P<first>[a-z]+)(?:-(?P<last>[a-z]+))?', re.IGNORECASE)
def __init__(self, first, last=None):
try:
@@ -135,8 +133,7 @@ class WeekdayRangeExpression(RangeExpression):
class WeekdayPositionExpression(AllExpression):
options = ['1st', '2nd', '3rd', '4th', '5th', 'last']
- value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))'
- % '|'.join(options), re.IGNORECASE)
+ value_re = re.compile(r'(?P<option_name>%s) +(?P<weekday_name>(?:\d+|\w+))' % '|'.join(options), re.IGNORECASE)
def __init__(self, option_name, weekday_name):
try:
@@ -169,13 +166,10 @@ class WeekdayPositionExpression(AllExpression):
return target_day
def __str__(self):
- return '%s %s' % (self.options[self.option_num],
- WEEKDAYS[self.weekday])
+ return '%s %s' % (self.options[self.option_num], WEEKDAYS[self.weekday])
def __repr__(self):
- return "%s('%s', '%s')" % (self.__class__.__name__,
- self.options[self.option_num],
- WEEKDAYS[self.weekday])
+ return "%s('%s', '%s')" % (self.__class__.__name__, self.options[self.option_num], WEEKDAYS[self.weekday])
class LastDayOfMonthExpression(AllExpression):
diff --git a/apscheduler/triggers/cron/fields.py b/apscheduler/triggers/cron/fields.py
index be5e5e3..27ab1ec 100644
--- a/apscheduler/triggers/cron/fields.py
+++ b/apscheduler/triggers/cron/fields.py
@@ -7,16 +7,14 @@ from calendar import monthrange
from apscheduler.triggers.cron.expressions import *
-__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField',
- 'WeekField', 'DayOfMonthField', 'DayOfWeekField')
+__all__ = ('MIN_VALUES', 'MAX_VALUES', 'DEFAULT_VALUES', 'BaseField', 'WeekField', 'DayOfMonthField', 'DayOfWeekField')
-MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1,
- 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0}
-MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53,
- 'day_of_week': 6, 'hour': 23, 'minute': 59, 'second': 59}
-DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*',
- 'day_of_week': '*', 'hour': 0, 'minute': 0, 'second': 0}
+MIN_VALUES = {'year': 1970, 'month': 1, 'day': 1, 'week': 1, 'day_of_week': 0, 'hour': 0, 'minute': 0, 'second': 0}
+MAX_VALUES = {'year': 2 ** 63, 'month': 12, 'day:': 31, 'week': 53, 'day_of_week': 6, 'hour': 23, 'minute': 59,
+ 'second': 59}
+DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week': '*', 'hour': 0, 'minute': 0,
+ 'second': 0}
class BaseField(object):
@@ -65,16 +63,14 @@ class BaseField(object):
self.expressions.append(compiled_expr)
return
- raise ValueError('Unrecognized expression "%s" for field "%s"' %
- (expr, self.name))
+ raise ValueError('Unrecognized expression "%s" for field "%s"' % (expr, self.name))
def __str__(self):
expr_strings = (str(e) for e in self.expressions)
return ','.join(expr_strings)
def __repr__(self):
- return "%s('%s', '%s')" % (self.__class__.__name__, self.name,
- str(self))
+ return "%s('%s', '%s')" % (self.__class__.__name__, self.name, self)
class WeekField(BaseField):
@@ -85,8 +81,7 @@ class WeekField(BaseField):
class DayOfMonthField(BaseField):
- COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression,
- LastDayOfMonthExpression]
+ COMPILERS = BaseField.COMPILERS + [WeekdayPositionExpression, LastDayOfMonthExpression]
def get_max(self, dateval):
return monthrange(dateval.year, dateval.month)[1]
diff --git a/apscheduler/util.py b/apscheduler/util.py
index dcede4c..6e42ab8 100644
--- a/apscheduler/util.py
+++ b/apscheduler/util.py
@@ -7,16 +7,14 @@ from time import mktime
import re
import sys
-__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds',
- 'time_difference', 'datetime_ceil', 'combine_opts',
- 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref',
- 'to_unicode', 'iteritems', 'itervalues', 'xrange')
+__all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds', 'time_difference', 'datetime_ceil',
+ 'combine_opts', 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'to_unicode', 'iteritems',
+ 'itervalues', 'xrange')
def asint(text):
"""
- Safely converts a string to an integer, returning None if the string
- is None.
+ Safely converts a string to an integer, returning None if the string is None.
:type text: str
:rtype: int
@@ -86,8 +84,8 @@ def timedelta_seconds(delta):
def time_difference(date1, date2):
"""
- Returns the time difference in seconds between the given two
- datetime objects. The difference is calculated as: date1 - date2.
+ Returns the time difference in seconds between the given two datetime objects.
+ The difference is calculated as: date1 - date2.
:param date1: the later datetime
:type date1: datetime
@@ -114,9 +112,8 @@ def datetime_ceil(dateval):
def combine_opts(global_config, prefix, local_config={}):
"""
- Returns a subdictionary from keys and values of ``global_config`` where
- the key starts with the given prefix, combined with options from
- local_config. The keys in the subdictionary have the prefix removed.
+ Returns a subdictionary from keys and values of ``global_config`` where the key starts with the given prefix,
+ combined with options from local_config. The keys in the subdictionary have the prefix removed.
:type global_config: dict
:type prefix: str
@@ -154,8 +151,7 @@ def get_callable_name(func):
# instance of a class with a __call__ method
return func.__class__.__name__
- raise TypeError('Unable to determine a name for %s -- '
- 'maybe it is not a callable?' % repr(func))
+ raise TypeError('Unable to determine a name for %r -- maybe it is not a callable?' % func)
def obj_to_ref(obj):
@@ -168,7 +164,7 @@ def obj_to_ref(obj):
if obj != obj2:
raise ValueError
except Exception:
- raise ValueError('Cannot determine the reference to %s' % repr(obj))
+ raise ValueError('Cannot determine the reference to %r' % obj)
return ref
@@ -186,22 +182,20 @@ def ref_to_obj(ref):
try:
obj = __import__(modulename)
except ImportError:
- raise LookupError('Error resolving reference %s: '
- 'could not import module' % ref)
+ raise LookupError('Error resolving reference %s: could not import module' % ref)
try:
for name in modulename.split('.')[1:] + rest.split('.'):
obj = getattr(obj, name)
return obj
except Exception:
- raise LookupError('Error resolving reference %s: '
- 'error looking up object' % ref)
+ raise LookupError('Error resolving reference %s: error looking up object' % ref)
def maybe_ref(ref):
"""
- Returns the object that the given reference points to, if it is indeed
- a reference. If it is not a reference, the object is returned as-is.
+ Returns the object that the given reference points to, if it is indeed a reference.
+ If it is not a reference, the object is returned as-is.
"""
if not isinstance(ref, str):
return ref
@@ -210,8 +204,7 @@ def maybe_ref(ref):
def to_unicode(string, encoding='ascii'):
"""
- Safely converts a string to a unicode representation on any
- Python version.
+ Safely converts a string to a unicode representation on any Python version.
"""
if hasattr(string, 'decode'):
return string.decode(encoding, 'ignore')
diff --git a/examples/interval.py b/examples/interval.py
index e16596c..cb7c4f4 100644
--- a/examples/interval.py
+++ b/examples/interval.py
@@ -1,6 +1,5 @@
"""
-Basic example showing how to start the scheduler and schedule a job that
-executes on 3 second intervals.
+Basic example showing how to start the scheduler and schedule a job that executes on 3 second intervals.
"""
from datetime import datetime
diff --git a/examples/persistent.py b/examples/persistent.py
index 4233e3c..51c2571 100644
--- a/examples/persistent.py
+++ b/examples/persistent.py
@@ -1,8 +1,7 @@
"""
-This example demonstrates the use of persistent job stores. On each run, it
-adds a new alarm that fires after ten seconds. You can exit the program,
-restart it and observe that any previous alarms that have not fired yet are
-still active.
+This example demonstrates the use of persistent job stores.
+On each run, it adds a new alarm that fires after ten seconds.
+You can exit the program, restart it and observe that any previous alarms that have not fired yet are still active.
"""
from datetime import datetime, timedelta
diff --git a/examples/reference.py b/examples/reference.py
index a31c543..8be45e9 100644
--- a/examples/reference.py
+++ b/examples/reference.py
@@ -1,6 +1,5 @@
"""
-Basic example showing how to schedule a callable using a textual
-reference.
+Basic example showing how to schedule a callable using a textual reference.
"""
from apscheduler.scheduler import Scheduler
diff --git a/examples/threaded.py b/examples/threaded.py
index b80f36f..2c612cd 100644
--- a/examples/threaded.py
+++ b/examples/threaded.py
@@ -1,6 +1,5 @@
"""
-Basic example showing how the scheduler integrates with the application it's
-running alongside with.
+Basic example showing how the scheduler integrates with the application it's running alongside with.
"""
from datetime import datetime
diff --git a/setup.py b/setup.py
index 1cdc6e3..924dd73 100644
--- a/setup.py
+++ b/setup.py
@@ -4,9 +4,7 @@ import os.path
try:
from setuptools import setup
- extras = dict(zip_safe=False,
- test_suite='nose.collector',
- tests_require=['nose'])
+ extras = dict(zip_safe=False, test_suite='nose.collector', tests_require=['nose'])
except ImportError:
from distutils.core import setup
extras = {}
@@ -39,6 +37,5 @@ setup(
],
keywords='scheduling cron',
license='MIT',
- packages=('apscheduler', 'apscheduler.jobstores', 'apscheduler.triggers',
- 'apscheduler.triggers.cron'),
+ packages=('apscheduler', 'apscheduler.jobstores', 'apscheduler.triggers', 'apscheduler.triggers.cron'),
)
diff --git a/tests/testintegration.py b/tests/testintegration.py
index 2abff1a..4141d4e 100644
--- a/tests/testintegration.py
+++ b/tests/testintegration.py
@@ -47,19 +47,16 @@ class IntegrationTestBase(object):
# running when the next appointed time hits.
vals = [0]
- self.scheduler.add_interval_job(increment, jobstore='persistent',
- seconds=1, args=[vals, 2])
+ self.scheduler.add_interval_job(increment, jobstore='persistent', seconds=1, args=[vals, 2])
sleep(2.5)
eq_(vals, [1])
def test_max_instances(self):
vals = [0]
events = []
- self.scheduler.add_listener(events.append,
- EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
- self.scheduler.add_interval_job(
- increment, jobstore='persistent',
- seconds=0.3, max_instances=2, max_runs=4, args=[vals, 1])
+ self.scheduler.add_listener(events.append, EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
+ self.scheduler.add_interval_job(increment, jobstore='persistent', seconds=0.3, max_instances=2, max_runs=4,
+ args=[vals, 1])
sleep(2.4)
eq_(vals, [2])
eq_(len(events), 4)
diff --git a/tests/testjob.py b/tests/testjob.py
index fa42647..726ceea 100644
--- a/tests/testjob.py
+++ b/tests/testjob.py
@@ -119,12 +119,9 @@ class TestJob(object):
def test_repr(self):
self.job.compute_next_run_time(self.RUNTIME)
eq_(repr(self.job),
- "<Job (name=dummyfunc, "
- "trigger=<SimpleTrigger "
- "(run_date=datetime.datetime(2010, 12, 13, 0, 8))>)>")
+ "<Job (name=dummyfunc, trigger=<SimpleTrigger (run_date=datetime.datetime(2010, 12, 13, 0, 8))>)>")
eq_(str(self.job),
- "dummyfunc (trigger: date[2010-12-13 00:08:00], "
- "next run at: 2010-12-13 00:08:00)")
+ "dummyfunc (trigger: date[2010-12-13 00:08:00], next run at: 2010-12-13 00:08:00)")
@raises(ValueError)
@@ -149,11 +146,9 @@ def test_create_job_invalid_misfire():
@raises(ValueError)
def test_create_job_invalid_maxruns():
- Job(SimpleTrigger(datetime.now()), lambda: None, [], {}, 1, False,
- max_runs=0)
+ Job(SimpleTrigger(datetime.now()), lambda: None, [], {}, 1, False, max_runs=0)
@raises(ValueError)
def test_create_job_invalid_maxinstances():
- Job(SimpleTrigger(datetime.now()), lambda: None, [], {}, 1, False,
- max_instances=0)
+ Job(SimpleTrigger(datetime.now()), lambda: None, [], {}, 1, False, max_instances=0)
diff --git a/tests/testjobstores.py b/tests/testjobstores.py
index 2e39ec1..4263b23 100644
--- a/tests/testjobstores.py
+++ b/tests/testjobstores.py
@@ -175,8 +175,7 @@ class TestMongoDBJobStore(PersistentJobstoreTestBase):
cls.jobstore.close()
def test_repr(self):
- eq_(repr(self.jobstore),
- "<MongoDBJobStore (connection=Connection('localhost', 27017))>")
+ eq_(repr(self.jobstore), "<MongoDBJobStore (connection=Connection('localhost', 27017))>")
class TestRedisJobStore(PersistentJobstoreTestBase):
diff --git a/tests/testscheduler.py b/tests/testscheduler.py
index 3ae2d73..a23dafb 100644
--- a/tests/testscheduler.py
+++ b/tests/testscheduler.py
@@ -8,9 +8,8 @@ from nose.tools import eq_, raises
from apscheduler.jobstores.ram_store import RAMJobStore
from apscheduler.scheduler import Scheduler, SchedulerAlreadyRunningError
from apscheduler.job import Job
-from apscheduler.events import (EVENT_JOB_EXECUTED, SchedulerEvent,
- EVENT_SCHEDULER_START,
- EVENT_SCHEDULER_SHUTDOWN, EVENT_JOB_MISSED)
+from apscheduler.events import (EVENT_JOB_EXECUTED, SchedulerEvent, EVENT_SCHEDULER_START, EVENT_SCHEDULER_SHUTDOWN,
+ EVENT_JOB_MISSED)
from apscheduler import scheduler
try:
@@ -33,8 +32,7 @@ class TestOfflineScheduler(object):
self.scheduler.add_jobstore(RAMJobStore(), 'dummy')
def test_add_tentative_job(self):
- job = self.scheduler.add_date_job(lambda: None, datetime(2200, 7, 24),
- jobstore='dummy')
+ job = self.scheduler.add_date_job(lambda: None, datetime(2200, 7, 24), jobstore='dummy')
assert isinstance(job, Job)
eq_(self.scheduler.get_jobs(), [])
@@ -44,8 +42,7 @@ class TestOfflineScheduler(object):
eq_(job.func_ref, 'copy:copy')
def test_configure_jobstore(self):
- conf = {'apscheduler.jobstore.ramstore.class':
- 'apscheduler.jobstores.ram_store:RAMJobStore'}
+ conf = {'apscheduler.jobstore.ramstore.class': 'apscheduler.jobstores.ram_store:RAMJobStore'}
self.scheduler.configure(conf)
self.scheduler.remove_jobstore('ramstore')
@@ -53,15 +50,13 @@ class TestOfflineScheduler(object):
self.scheduler.shutdown()
def test_configure_no_prefix(self):
- global_options = {'misfire_grace_time': '2',
- 'daemonic': 'false'}
+ global_options = {'misfire_grace_time': '2', 'daemonic': 'false'}
self.scheduler.configure(global_options)
eq_(self.scheduler.misfire_grace_time, 1)
eq_(self.scheduler.daemonic, True)
def test_configure_prefix(self):
- global_options = {'apscheduler.misfire_grace_time': 2,
- 'apscheduler.daemonic': False}
+ global_options = {'apscheduler.misfire_grace_time': 2, 'apscheduler.daemonic': False}
self.scheduler.configure(global_options)
eq_(self.scheduler.misfire_grace_time, 2)
eq_(self.scheduler.daemonic, False)
@@ -143,10 +138,9 @@ class TestJobExecution(object):
def my_job():
pass
- job = self.scheduler.add_interval_job(my_job,
- start_date=datetime(2010, 5, 19))
- eq_(repr(job), '<Job (name=my_job, '
- 'trigger=<IntervalTrigger (interval=datetime.timedelta(0, 1), '
+ job = self.scheduler.add_interval_job(my_job, start_date=datetime(2010, 5, 19))
+ eq_(repr(job),
+ '<Job (name=my_job, trigger=<IntervalTrigger (interval=datetime.timedelta(0, 1), '
'start_date=datetime.datetime(2010, 5, 19, 0, 0))>)>')
def test_schedule_object(self):
@@ -234,8 +228,7 @@ class TestJobExecution(object):
job = self.scheduler.add_interval_job(lambda: None, seconds=1)
eq_(job.misfire_grace_time, 3)
- job = self.scheduler.add_interval_job(lambda: None, seconds=1,
- misfire_grace_time=2)
+ job = self.scheduler.add_interval_job(lambda: None, seconds=1, misfire_grace_time=2)
eq_(job.misfire_grace_time, 2)
def test_coalesce_on(self):
@@ -247,11 +240,9 @@ class TestJobExecution(object):
vals = [0]
events = []
scheduler.datetime = FakeDateTime
- self.scheduler.add_listener(events.append,
- EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
- job = self.scheduler.add_interval_job(
- increment, seconds=1, start_date=FakeDateTime.now(),
- coalesce=True, misfire_grace_time=2)
+ self.scheduler.add_listener(events.append, EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
+ job = self.scheduler.add_interval_job(increment, seconds=1, start_date=FakeDateTime.now(), coalesce=True,
+ misfire_grace_time=2)
# Turn the clock 14 seconds forward
FakeDateTime._now += timedelta(seconds=2)
@@ -271,11 +262,9 @@ class TestJobExecution(object):
vals = [0]
events = []
scheduler.datetime = FakeDateTime
- self.scheduler.add_listener(events.append,
- EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
- job = self.scheduler.add_interval_job(
- increment, seconds=1, start_date=FakeDateTime.now(),
- coalesce=False, misfire_grace_time=2)
+ self.scheduler.add_listener(events.append, EVENT_JOB_EXECUTED | EVENT_JOB_MISSED)
+ job = self.scheduler.add_interval_job(increment, seconds=1, start_date=FakeDateTime.now(), coalesce=False,
+ misfire_grace_time=2)
# Turn the clock 2 seconds forward
FakeDateTime._now += timedelta(seconds=2)
@@ -376,8 +365,7 @@ class TestJobExecution(object):
def test_jobstore(self):
self.scheduler.add_jobstore(RAMJobStore(), 'dummy')
- job = self.scheduler.add_date_job(lambda: None, datetime(2200, 7, 24),
- jobstore='dummy')
+ job = self.scheduler.add_date_job(lambda: None, datetime(2200, 7, 24), jobstore='dummy')
eq_(self.scheduler.get_jobs(), [job])
self.scheduler.remove_jobstore('dummy')
eq_(self.scheduler.get_jobs(), [])
diff --git a/tests/testtriggers.py b/tests/testtriggers.py
index 4787242..86cc64f 100644
--- a/tests/testtriggers.py
+++ b/tests/testtriggers.py
@@ -7,8 +7,7 @@ from apscheduler.triggers import CronTrigger, SimpleTrigger, IntervalTrigger
def test_cron_trigger_1():
trigger = CronTrigger(year='2009/2', month='1/3', day='5-13')
- eq_(repr(trigger),
- "<CronTrigger (year='2009/2', month='1/3', day='5-13')>")
+ eq_(repr(trigger), "<CronTrigger (year='2009/2', month='1/3', day='5-13')>")
eq_(str(trigger), "cron[year='2009/2', month='1/3', day='5-13']")
start_date = datetime(2008, 12, 1)
correct_next_date = datetime(2009, 1, 5)
@@ -55,9 +54,7 @@ def test_cron_year_list():
def test_cron_start_date():
trigger = CronTrigger(year='2009', month='2', hour='8-10',
start_date='2009-02-03 11:00:00')
- eq_(repr(trigger),
- "<CronTrigger (year='2009', month='2', hour='8-10', "
- "start_date='2009-02-03 11:00:00')>")
+ eq_(repr(trigger), "<CronTrigger (year='2009', month='2', hour='8-10', start_date='2009-02-03 11:00:00')>")
eq_(str(trigger), "cron[year='2009', month='2', hour='8-10']")
start_date = datetime(2009, 1, 1)
correct_next_date = datetime(2009, 2, 4, 8)
@@ -67,24 +64,17 @@ def test_cron_start_date():
def test_cron_weekday_overlap():
trigger = CronTrigger(year=2009, month=1, day='6-10',
day_of_week='2-4')
- eq_(repr(trigger),
- "<CronTrigger (year='2009', month='1', "
- "day='6-10', day_of_week='2-4')>")
- eq_(str(trigger),
- "cron[year='2009', month='1', day='6-10', day_of_week='2-4']")
+ eq_(repr(trigger), "<CronTrigger (year='2009', month='1', day='6-10', day_of_week='2-4')>")
+ eq_(str(trigger), "cron[year='2009', month='1', day='6-10', day_of_week='2-4']")
start_date = datetime(2009, 1, 1)
correct_next_date = datetime(2009, 1, 7)
eq_(trigger.get_next_fire_time(start_date), correct_next_date)
def test_cron_weekday_nomatch():
- trigger = CronTrigger(year=2009, month=1, day='6-10',
- day_of_week='0,6')
- eq_(repr(trigger),
- "<CronTrigger (year='2009', month='1', "
- "day='6-10', day_of_week='0,6')>")
- eq_(str(trigger),
- "cron[year='2009', month='1', day='6-10', day_of_week='0,6']")
+ trigger = CronTrigger(year=2009, month=1, day='6-10', day_of_week='0,6')
+ eq_(repr(trigger), "<CronTrigger (year='2009', month='1', day='6-10', day_of_week='0,6')>")
+ eq_(str(trigger), "cron[year='2009', month='1', day='6-10', day_of_week='0,6']")
start_date = datetime(2009, 1, 1)
correct_next_date = None
eq_(trigger.get_next_fire_time(start_date), correct_next_date)
@@ -110,8 +100,7 @@ def test_week_1():
def test_week_2():
trigger = CronTrigger(year=2009, week=15, day_of_week=2)
- eq_(repr(trigger),
- "<CronTrigger (year='2009', week='15', day_of_week='2')>")
+ eq_(repr(trigger), "<CronTrigger (year='2009', week='15', day_of_week='2')>")
eq_(str(trigger), "cron[year='2009', week='15', day_of_week='2']")
start_date = datetime(2009, 1, 1)
correct_next_date = datetime(2009, 4, 8)
@@ -152,8 +141,7 @@ def test_cron_bad_kwarg():
def test_date_trigger_earlier():
fire_date = datetime(2009, 7, 6)
trigger = SimpleTrigger(fire_date)
- eq_(repr(trigger),
- "<SimpleTrigger (run_date=datetime.datetime(2009, 7, 6, 0, 0))>")
+ eq_(repr(trigger), "<SimpleTrigger (run_date=datetime.datetime(2009, 7, 6, 0, 0))>")
eq_(str(trigger), "date[2009-07-06 00:00:00]")
start_date = datetime(2008, 12, 1)
eq_(trigger.get_next_fire_time(start_date), fire_date)
@@ -192,8 +180,7 @@ class TestInterval(object):
def test_interval_repr(self):
eq_(repr(self.trigger),
- "<IntervalTrigger (interval=datetime.timedelta(0, 1), "
- "start_date=datetime.datetime(2009, 8, 4, 0, 0, 2))>")
+ "<IntervalTrigger (interval=datetime.timedelta(0, 1), start_date=datetime.datetime(2009, 8, 4, 0, 0, 2))>")
eq_(str(self.trigger), "interval[0:00:01]")
def test_interval_before(self):
diff --git a/tests/testutil.py b/tests/testutil.py
index b9749fb..bc85adf 100644
--- a/tests/testutil.py
+++ b/tests/testutil.py
@@ -192,8 +192,7 @@ def test_obj_to_ref():
def test_inner_obj_to_ref():
if sys.version_info < (3, 3):
raise SkipTest
- eq_(obj_to_ref(DummyClass.InnerDummyClass.innerclassmeth),
- 'testutil:DummyClass.InnerDummyClass.innerclassmeth')
+ eq_(obj_to_ref(DummyClass.InnerDummyClass.innerclassmeth), 'testutil:DummyClass.InnerDummyClass.innerclassmeth')
def test_ref_to_obj():