summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml17
-rw-r--r--apscheduler/events.py10
-rw-r--r--apscheduler/executors/asyncio.py21
-rw-r--r--apscheduler/executors/base.py53
-rw-r--r--apscheduler/executors/base_py3.py41
-rw-r--r--apscheduler/executors/gevent.py1
-rw-r--r--apscheduler/executors/pool.py6
-rw-r--r--apscheduler/executors/tornado.py16
-rw-r--r--apscheduler/executors/twisted.py4
-rw-r--r--apscheduler/job.py31
-rw-r--r--apscheduler/jobstores/base.py10
-rw-r--r--apscheduler/jobstores/memory.py4
-rw-r--r--apscheduler/jobstores/mongodb.py11
-rw-r--r--apscheduler/jobstores/redis.py14
-rw-r--r--apscheduler/jobstores/rethinkdb.py11
-rw-r--r--apscheduler/jobstores/sqlalchemy.py11
-rw-r--r--apscheduler/jobstores/zookeeper.py12
-rw-r--r--apscheduler/schedulers/asyncio.py5
-rw-r--r--apscheduler/schedulers/background.py5
-rw-r--r--apscheduler/schedulers/base.py55
-rw-r--r--apscheduler/schedulers/blocking.py9
-rw-r--r--apscheduler/schedulers/gevent.py3
-rw-r--r--apscheduler/schedulers/tornado.py5
-rw-r--r--apscheduler/schedulers/twisted.py5
-rw-r--r--apscheduler/triggers/base.py4
-rw-r--r--apscheduler/triggers/cron/__init__.py3
-rw-r--r--apscheduler/triggers/cron/expressions.py16
-rw-r--r--apscheduler/triggers/cron/fields.py6
-rw-r--r--apscheduler/util.py42
-rw-r--r--docs/versionhistory.rst2
-rw-r--r--examples/schedulers/asyncio_.py8
-rw-r--r--setup.cfg3
-rw-r--r--setup.py20
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/conftest.py33
-rw-r--r--tests/test_executors.py109
-rw-r--r--tests/test_executors_py35.py102
-rw-r--r--tests/test_job.py37
-rw-r--r--tests/test_schedulers.py19
-rw-r--r--tests/test_triggers.py8
-rw-r--r--tests/test_util.py32
-rw-r--r--tox.ini3
42 files changed, 289 insertions, 518 deletions
diff --git a/.travis.yml b/.travis.yml
index 697588c..f7de5ef 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -16,26 +16,15 @@ jobs:
- &test
stage: test
- env: TOXENV=pypy
- python: pypy2.7-6.0
+ env: TOXENV=pypy3
+ python: pypy3
before_install: docker-compose up -d
after_success:
- pip install coveralls
- coveralls
- <<: *test
- env: TOXENV=pypy3
- python: pypy3.5-6.0
-
- - <<: *test
- env: TOXENV=py27
- python: "2.7"
-
- - <<: *test
- env: TOXENV=py34
- python: "3.4"
-
- - <<: *test
+ stage: test
env: TOXENV=py35
python: "3.5"
diff --git a/apscheduler/events.py b/apscheduler/events.py
index 016da03..3919674 100644
--- a/apscheduler/events.py
+++ b/apscheduler/events.py
@@ -30,7 +30,7 @@ EVENT_ALL = (EVENT_SCHEDULER_STARTED | EVENT_SCHEDULER_SHUTDOWN | EVENT_SCHEDULE
EVENT_JOB_ERROR | EVENT_JOB_MISSED | EVENT_JOB_SUBMITTED | EVENT_JOB_MAX_INSTANCES)
-class SchedulerEvent(object):
+class SchedulerEvent:
"""
An event that concerns the scheduler itself.
@@ -39,7 +39,7 @@ class SchedulerEvent(object):
"""
def __init__(self, code, alias=None):
- super(SchedulerEvent, self).__init__()
+ super().__init__()
self.code = code
self.alias = alias
@@ -57,7 +57,7 @@ class JobEvent(SchedulerEvent):
"""
def __init__(self, code, job_id, jobstore):
- super(JobEvent, self).__init__(code)
+ super().__init__(code)
self.code = code
self.job_id = job_id
self.jobstore = jobstore
@@ -71,7 +71,7 @@ class JobSubmissionEvent(JobEvent):
"""
def __init__(self, code, job_id, jobstore, scheduled_run_times):
- super(JobSubmissionEvent, self).__init__(code, job_id, jobstore)
+ super().__init__(code, job_id, jobstore)
self.scheduled_run_times = scheduled_run_times
@@ -87,7 +87,7 @@ class JobExecutionEvent(JobEvent):
def __init__(self, code, job_id, jobstore, scheduled_run_time, retval=None, exception=None,
traceback=None):
- super(JobExecutionEvent, self).__init__(code, job_id, jobstore)
+ super().__init__(code, job_id, jobstore)
self.scheduled_run_time = scheduled_run_time
self.retval = retval
self.exception = exception
diff --git a/apscheduler/executors/asyncio.py b/apscheduler/executors/asyncio.py
index 5139622..a62551d 100644
--- a/apscheduler/executors/asyncio.py
+++ b/apscheduler/executors/asyncio.py
@@ -1,15 +1,7 @@
-from __future__ import absolute_import
-
+from asyncio import iscoroutinefunction
import sys
-from apscheduler.executors.base import BaseExecutor, run_job
-
-try:
- from asyncio import iscoroutinefunction
- from apscheduler.executors.base_py3 import run_coroutine_job
-except ImportError:
- from trollius import iscoroutinefunction
- run_coroutine_job = None
+from apscheduler.executors.base import BaseExecutor, run_job, run_coroutine_job
class AsyncIOExecutor(BaseExecutor):
@@ -24,7 +16,7 @@ class AsyncIOExecutor(BaseExecutor):
"""
def start(self, scheduler, alias):
- super(AsyncIOExecutor, self).start(scheduler, alias)
+ super().start(scheduler, alias)
self._eventloop = scheduler._eventloop
self._pending_futures = set()
@@ -47,11 +39,8 @@ class AsyncIOExecutor(BaseExecutor):
self._run_job_success(job.id, events)
if iscoroutinefunction(job.func):
- if run_coroutine_job is not None:
- coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
- f = self._eventloop.create_task(coro)
- else:
- raise Exception('Executing coroutine based jobs is not supported with Trollius')
+ coro = run_coroutine_job(job, job._jobstore_alias, run_times, self._logger.name)
+ f = self._eventloop.create_task(coro)
else:
f = self._eventloop.run_in_executor(None, run_job, job, job._jobstore_alias, run_times,
self._logger.name)
diff --git a/apscheduler/executors/base.py b/apscheduler/executors/base.py
index 4c09fc1..032248f 100644
--- a/apscheduler/executors/base.py
+++ b/apscheduler/executors/base.py
@@ -1,12 +1,12 @@
+import logging
+import sys
+import traceback
from abc import ABCMeta, abstractmethod
from collections import defaultdict
from datetime import datetime, timedelta
from traceback import format_tb
-import logging
-import sys
from pytz import utc
-import six
from apscheduler.events import (
JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
@@ -14,12 +14,12 @@ from apscheduler.events import (
class MaxInstancesReachedError(Exception):
def __init__(self, job):
- super(MaxInstancesReachedError, self).__init__(
+ super().__init__(
'Job "%s" has already reached its maximum number of instances (%d)' %
(job.id, job.max_instances))
-class BaseExecutor(six.with_metaclass(ABCMeta, object)):
+class BaseExecutor(metaclass=ABCMeta):
"""Abstract base class that defines the interface that every executor must implement."""
_scheduler = None
@@ -27,7 +27,7 @@ class BaseExecutor(six.with_metaclass(ABCMeta, object)):
_logger = logging.getLogger('apscheduler.executors')
def __init__(self):
- super(BaseExecutor, self).__init__()
+ super().__init__()
self._instances = defaultdict(lambda: 0)
def start(self, scheduler, alias):
@@ -131,13 +131,40 @@ def run_job(job, jobstore_alias, run_times, logger_name):
logger.exception('Job "%s" raised an exception', job)
# This is to prevent cyclic references that would lead to memory leaks
- if six.PY2:
- sys.exc_clear()
- del tb
- else:
- import traceback
- traceback.clear_frames(tb)
- del tb
+ traceback.clear_frames(tb)
+ del tb
+ else:
+ events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
+ retval=retval))
+ logger.info('Job "%s" executed successfully', job)
+
+ return events
+
+
+async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
+ """Coroutine version of run_job()."""
+ events = []
+ logger = logging.getLogger(logger_name)
+ for run_time in run_times:
+ # See if the job missed its run time window, and handle possible misfires accordingly
+ if job.misfire_grace_time is not None:
+ difference = datetime.now(utc) - run_time
+ grace_time = timedelta(seconds=job.misfire_grace_time)
+ if difference > grace_time:
+ events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
+ run_time))
+ logger.warning('Run time of job "%s" was missed by %s', job, difference)
+ continue
+
+ logger.info('Running job "%s" (scheduled at %s)', job, run_time)
+ try:
+ retval = await job.func(*job.args, **job.kwargs)
+ except BaseException:
+ exc, tb = sys.exc_info()[1:]
+ formatted_tb = ''.join(format_tb(tb))
+ events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
+ exception=exc, traceback=formatted_tb))
+ logger.exception('Job "%s" raised an exception', job)
else:
events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
retval=retval))
diff --git a/apscheduler/executors/base_py3.py b/apscheduler/executors/base_py3.py
deleted file mode 100644
index 61abd84..0000000
--- a/apscheduler/executors/base_py3.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import logging
-import sys
-from datetime import datetime, timedelta
-from traceback import format_tb
-
-from pytz import utc
-
-from apscheduler.events import (
- JobExecutionEvent, EVENT_JOB_MISSED, EVENT_JOB_ERROR, EVENT_JOB_EXECUTED)
-
-
-async def run_coroutine_job(job, jobstore_alias, run_times, logger_name):
- """Coroutine version of run_job()."""
- events = []
- logger = logging.getLogger(logger_name)
- for run_time in run_times:
- # See if the job missed its run time window, and handle possible misfires accordingly
- if job.misfire_grace_time is not None:
- difference = datetime.now(utc) - run_time
- grace_time = timedelta(seconds=job.misfire_grace_time)
- if difference > grace_time:
- events.append(JobExecutionEvent(EVENT_JOB_MISSED, job.id, jobstore_alias,
- run_time))
- logger.warning('Run time of job "%s" was missed by %s', job, difference)
- continue
-
- logger.info('Running job "%s" (scheduled at %s)', job, run_time)
- try:
- retval = await job.func(*job.args, **job.kwargs)
- except BaseException:
- exc, tb = sys.exc_info()[1:]
- formatted_tb = ''.join(format_tb(tb))
- events.append(JobExecutionEvent(EVENT_JOB_ERROR, job.id, jobstore_alias, run_time,
- exception=exc, traceback=formatted_tb))
- logger.exception('Job "%s" raised an exception', job)
- else:
- events.append(JobExecutionEvent(EVENT_JOB_EXECUTED, job.id, jobstore_alias, run_time,
- retval=retval))
- logger.info('Job "%s" executed successfully', job)
-
- return events
diff --git a/apscheduler/executors/gevent.py b/apscheduler/executors/gevent.py
index 1235bb6..b8c1edf 100644
--- a/apscheduler/executors/gevent.py
+++ b/apscheduler/executors/gevent.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
import sys
from apscheduler.executors.base import BaseExecutor, run_job
diff --git a/apscheduler/executors/pool.py b/apscheduler/executors/pool.py
index 2f4ef45..6c0d30b 100644
--- a/apscheduler/executors/pool.py
+++ b/apscheduler/executors/pool.py
@@ -7,7 +7,7 @@ from apscheduler.executors.base import BaseExecutor, run_job
class BasePoolExecutor(BaseExecutor):
@abstractmethod
def __init__(self, pool):
- super(BasePoolExecutor, self).__init__()
+ super().__init__()
self._pool = pool
def _do_submit_job(self, job, run_times):
@@ -37,7 +37,7 @@ class ThreadPoolExecutor(BasePoolExecutor):
def __init__(self, max_workers=10):
pool = concurrent.futures.ThreadPoolExecutor(int(max_workers))
- super(ThreadPoolExecutor, self).__init__(pool)
+ super().__init__(pool)
class ProcessPoolExecutor(BasePoolExecutor):
@@ -51,4 +51,4 @@ class ProcessPoolExecutor(BasePoolExecutor):
def __init__(self, max_workers=10):
pool = concurrent.futures.ProcessPoolExecutor(int(max_workers))
- super(ProcessPoolExecutor, self).__init__(pool)
+ super().__init__(pool)
diff --git a/apscheduler/executors/tornado.py b/apscheduler/executors/tornado.py
index a4696ce..0421260 100644
--- a/apscheduler/executors/tornado.py
+++ b/apscheduler/executors/tornado.py
@@ -1,18 +1,10 @@
-from __future__ import absolute_import
-
import sys
from concurrent.futures import ThreadPoolExecutor
+from inspect import iscoroutinefunction
from tornado.gen import convert_yielded
-from apscheduler.executors.base import BaseExecutor, run_job
-
-try:
- from inspect import iscoroutinefunction
- from apscheduler.executors.base_py3 import run_coroutine_job
-except ImportError:
- def iscoroutinefunction(func):
- return False
+from apscheduler.executors.base import BaseExecutor, run_job, run_coroutine_job
class TornadoExecutor(BaseExecutor):
@@ -28,11 +20,11 @@ class TornadoExecutor(BaseExecutor):
"""
def __init__(self, max_workers=10):
- super(TornadoExecutor, self).__init__()
+ super().__init__()
self.executor = ThreadPoolExecutor(max_workers)
def start(self, scheduler, alias):
- super(TornadoExecutor, self).start(scheduler, alias)
+ super().start(scheduler, alias)
self._ioloop = scheduler._ioloop
def _do_submit_job(self, job, run_times):
diff --git a/apscheduler/executors/twisted.py b/apscheduler/executors/twisted.py
index c7bcf64..9b6e860 100644
--- a/apscheduler/executors/twisted.py
+++ b/apscheduler/executors/twisted.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from apscheduler.executors.base import BaseExecutor, run_job
@@ -11,7 +9,7 @@ class TwistedExecutor(BaseExecutor):
"""
def start(self, scheduler, alias):
- super(TwistedExecutor, self).start(scheduler, alias)
+ super().start(scheduler, alias)
self._reactor = scheduler._reactor
def _do_submit_job(self, job, run_times):
diff --git a/apscheduler/job.py b/apscheduler/job.py
index d676ca8..31a55f1 100644
--- a/apscheduler/job.py
+++ b/apscheduler/job.py
@@ -1,11 +1,9 @@
from inspect import ismethod, isclass
from uuid import uuid4
-import six
-
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import (
- ref_to_obj, obj_to_ref, datetime_repr, repr_escape, get_callable_name, check_callable_args,
+ ref_to_obj, obj_to_ref, datetime_repr, get_callable_name, check_callable_args,
convert_to_datetime)
try:
@@ -14,7 +12,7 @@ except ImportError:
from collections import Iterable, Mapping
-class Job(object):
+class Job:
"""
Contains the options given when scheduling callables and its current schedule and other state.
This class should never be instantiated by the user.
@@ -43,7 +41,7 @@ class Job(object):
'next_run_time')
def __init__(self, scheduler, id=None, **kwargs):
- super(Job, self).__init__()
+ super().__init__()
self._scheduler = scheduler
self._jobstore_alias = None
self._modify(id=id or uuid4().hex, **kwargs)
@@ -146,7 +144,7 @@ class Job(object):
if 'id' in changes:
value = changes.pop('id')
- if not isinstance(value, six.string_types):
+ if not isinstance(value, str):
raise TypeError("id must be a nonempty string")
if hasattr(self, 'id'):
raise ValueError('The job ID may not be changed')
@@ -157,7 +155,7 @@ class Job(object):
args = changes.pop('args') if 'args' in changes else self.args
kwargs = changes.pop('kwargs') if 'kwargs' in changes else self.kwargs
- if isinstance(func, six.string_types):
+ if isinstance(func, str):
func_ref = func
func = ref_to_obj(func)
elif callable(func):
@@ -172,9 +170,9 @@ class Job(object):
if not hasattr(self, 'name') and changes.get('name', None) is None:
changes['name'] = get_callable_name(func)
- if isinstance(args, six.string_types) or not isinstance(args, Iterable):
+ if isinstance(args, str) or not isinstance(args, Iterable):
raise TypeError('args must be a non-string iterable')
- if isinstance(kwargs, six.string_types) or not isinstance(kwargs, Mapping):
+ if isinstance(kwargs, str) or not isinstance(kwargs, Mapping):
raise TypeError('kwargs must be a dict-like object')
check_callable_args(func, args, kwargs)
@@ -186,13 +184,13 @@ class Job(object):
if 'name' in changes:
value = changes.pop('name')
- if not value or not isinstance(value, six.string_types):
+ if not value or not isinstance(value, str):
raise TypeError("name must be a nonempty string")
approved['name'] = value
if 'misfire_grace_time' in changes:
value = changes.pop('misfire_grace_time')
- if value is not None and (not isinstance(value, six.integer_types) or value <= 0):
+ if value is not None and (not isinstance(value, int) or value <= 0):
raise TypeError('misfire_grace_time must be either None or a positive integer')
approved['misfire_grace_time'] = value
@@ -202,7 +200,7 @@ class Job(object):
if 'max_instances' in changes:
value = changes.pop('max_instances')
- if not isinstance(value, six.integer_types) or value <= 0:
+ if not isinstance(value, int) or value <= 0:
raise TypeError('max_instances must be a positive integer')
approved['max_instances'] = value
@@ -216,7 +214,7 @@ class Job(object):
if 'executor' in changes:
value = changes.pop('executor')
- if not isinstance(value, six.string_types):
+ if not isinstance(value, str):
raise TypeError('executor must be a string')
approved['executor'] = value
@@ -229,7 +227,7 @@ class Job(object):
raise AttributeError('The following are not modifiable attributes of Job: %s' %
', '.join(changes))
- for key, value in six.iteritems(approved):
+ for key, value in approved.items():
setattr(self, key, value)
def __getstate__(self):
@@ -286,12 +284,9 @@ class Job(object):
return NotImplemented
def __repr__(self):
- return '<Job (id=%s name=%s)>' % (repr_escape(self.id), repr_escape(self.name))
+ return '<Job (id={self.id!r} name={self.name!r})>'.format(self=self)
def __str__(self):
- return repr_escape(self.__unicode__())
-
- def __unicode__(self):
if hasattr(self, 'next_run_time'):
status = ('next run at: ' + datetime_repr(self.next_run_time) if
self.next_run_time else 'paused')
diff --git a/apscheduler/jobstores/base.py b/apscheduler/jobstores/base.py
index 9cff66c..b0988e1 100644
--- a/apscheduler/jobstores/base.py
+++ b/apscheduler/jobstores/base.py
@@ -1,21 +1,19 @@
from abc import ABCMeta, abstractmethod
import logging
-import six
-
class JobLookupError(KeyError):
"""Raised when the job store cannot find a job for update or removal."""
def __init__(self, job_id):
- super(JobLookupError, self).__init__(u'No job by the id of %s was found' % job_id)
+ super().__init__(u'No job by the id of %s was found' % job_id)
class ConflictingIdError(KeyError):
"""Raised when the uniqueness of job IDs is being violated."""
def __init__(self, job_id):
- super(ConflictingIdError, self).__init__(
+ super().__init__(
u'Job identifier (%s) conflicts with an existing job' % job_id)
@@ -26,12 +24,12 @@ class TransientJobError(ValueError):
"""
def __init__(self, job_id):
- super(TransientJobError, self).__init__(
+ super().__init__(
u'Job (%s) cannot be added to this job store because a reference to the callable '
u'could not be determined.' % job_id)
-class BaseJobStore(six.with_metaclass(ABCMeta)):
+class BaseJobStore(metaclass=ABCMeta):
"""Abstract base class that defines the interface that every job store must implement."""
_scheduler = None
diff --git a/apscheduler/jobstores/memory.py b/apscheduler/jobstores/memory.py
index abfe7c6..2862d80 100644
--- a/apscheduler/jobstores/memory.py
+++ b/apscheduler/jobstores/memory.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp
@@ -12,7 +10,7 @@ class MemoryJobStore(BaseJobStore):
"""
def __init__(self):
- super(MemoryJobStore, self).__init__()
+ super().__init__()
# list of (job, timestamp), sorted by next_run_time and job id (ascending)
self._jobs = []
self._jobs_index = {} # id -> (job, timestamp) lookup table
diff --git a/apscheduler/jobstores/mongodb.py b/apscheduler/jobstores/mongodb.py
index 7dbc3b1..bb06ade 100644
--- a/apscheduler/jobstores/mongodb.py
+++ b/apscheduler/jobstores/mongodb.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+import pickle
import warnings
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
@@ -6,11 +6,6 @@ from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp
from apscheduler.job import Job
try:
- import cPickle as pickle
-except ImportError: # pragma: nocover
- import pickle
-
-try:
from bson.binary import Binary
from pymongo.errors import DuplicateKeyError
from pymongo import MongoClient, ASCENDING
@@ -36,7 +31,7 @@ class MongoDBJobStore(BaseJobStore):
def __init__(self, database='apscheduler', collection='jobs', client=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
- super(MongoDBJobStore, self).__init__()
+ super().__init__()
self.pickle_protocol = pickle_protocol
if not database:
@@ -53,7 +48,7 @@ class MongoDBJobStore(BaseJobStore):
self.collection = self.client[database][collection]
def start(self, scheduler, alias):
- super(MongoDBJobStore, self).start(scheduler, alias)
+ super().start(scheduler, alias)
self.collection.ensure_index('next_run_time', sparse=True)
@property
diff --git a/apscheduler/jobstores/redis.py b/apscheduler/jobstores/redis.py
index 5bb69d6..8becc6a 100644
--- a/apscheduler/jobstores/redis.py
+++ b/apscheduler/jobstores/redis.py
@@ -1,19 +1,13 @@
-from __future__ import absolute_import
+import pickle
from datetime import datetime
from pytz import utc
-import six
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
- import cPickle as pickle
-except ImportError: # pragma: nocover
- import pickle
-
-try:
from redis import Redis
except ImportError: # pragma: nocover
raise ImportError('RedisJobStore requires redis installed')
@@ -35,7 +29,7 @@ class RedisJobStore(BaseJobStore):
def __init__(self, db=0, jobs_key='apscheduler.jobs', run_times_key='apscheduler.run_times',
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
- super(RedisJobStore, self).__init__()
+ super().__init__()
if db is None:
raise ValueError('The "db" parameter must not be empty')
@@ -58,7 +52,7 @@ class RedisJobStore(BaseJobStore):
job_ids = self.redis.zrangebyscore(self.run_times_key, 0, timestamp)
if job_ids:
job_states = self.redis.hmget(self.jobs_key, *job_ids)
- return self._reconstitute_jobs(six.moves.zip(job_ids, job_states))
+ return self._reconstitute_jobs(zip(job_ids, job_states))
return []
def get_next_run_time(self):
@@ -68,7 +62,7 @@ class RedisJobStore(BaseJobStore):
def get_all_jobs(self):
job_states = self.redis.hgetall(self.jobs_key)
- jobs = self._reconstitute_jobs(six.iteritems(job_states))
+ jobs = self._reconstitute_jobs(job_states.items())
paused_sort_key = datetime(9999, 12, 31, tzinfo=utc)
return sorted(jobs, key=lambda job: job.next_run_time or paused_sort_key)
diff --git a/apscheduler/jobstores/rethinkdb.py b/apscheduler/jobstores/rethinkdb.py
index d8a78cd..e0eda1a 100644
--- a/apscheduler/jobstores/rethinkdb.py
+++ b/apscheduler/jobstores/rethinkdb.py
@@ -1,15 +1,10 @@
-from __future__ import absolute_import
+import pickle
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
- import cPickle as pickle
-except ImportError: # pragma: nocover
- import pickle
-
-try:
from rethinkdb import RethinkDB
except ImportError: # pragma: nocover
raise ImportError('RethinkDBJobStore requires rethinkdb installed')
@@ -32,7 +27,7 @@ class RethinkDBJobStore(BaseJobStore):
def __init__(self, database='apscheduler', table='jobs', client=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
- super(RethinkDBJobStore, self).__init__()
+ super().__init__()
if not database:
raise ValueError('The "database" parameter must not be empty')
@@ -49,7 +44,7 @@ class RethinkDBJobStore(BaseJobStore):
self.conn = None
def start(self, scheduler, alias):
- super(RethinkDBJobStore, self).start(scheduler, alias)
+ super().start(scheduler, alias)
if self.client:
self.conn = maybe_ref(self.client)
diff --git a/apscheduler/jobstores/sqlalchemy.py b/apscheduler/jobstores/sqlalchemy.py
index fecbd83..3634f49 100644
--- a/apscheduler/jobstores/sqlalchemy.py
+++ b/apscheduler/jobstores/sqlalchemy.py
@@ -1,15 +1,10 @@
-from __future__ import absolute_import
+import pickle
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp_to_datetime
from apscheduler.job import Job
try:
- import cPickle as pickle
-except ImportError: # pragma: nocover
- import pickle
-
-try:
from sqlalchemy import (
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select)
from sqlalchemy.exc import IntegrityError
@@ -42,7 +37,7 @@ class SQLAlchemyJobStore(BaseJobStore):
def __init__(self, url=None, engine=None, tablename='apscheduler_jobs', metadata=None,
pickle_protocol=pickle.HIGHEST_PROTOCOL, tableschema=None, engine_options=None):
- super(SQLAlchemyJobStore, self).__init__()
+ super().__init__()
self.pickle_protocol = pickle_protocol
metadata = maybe_ref(metadata) or MetaData()
@@ -64,7 +59,7 @@ class SQLAlchemyJobStore(BaseJobStore):
)
def start(self, scheduler, alias):
- super(SQLAlchemyJobStore, self).start(scheduler, alias)
+ super().start(scheduler, alias)
self.jobs_t.create(self.engine, True)
def lookup_job(self, job_id):
diff --git a/apscheduler/jobstores/zookeeper.py b/apscheduler/jobstores/zookeeper.py
index 2cca83e..b089bac 100644
--- a/apscheduler/jobstores/zookeeper.py
+++ b/apscheduler/jobstores/zookeeper.py
@@ -1,6 +1,5 @@
-from __future__ import absolute_import
-
import os
+import pickle
from datetime import datetime
from pytz import utc
@@ -11,11 +10,6 @@ from apscheduler.util import maybe_ref, datetime_to_utc_timestamp, utc_timestamp
from apscheduler.job import Job
try:
- import cPickle as pickle
-except ImportError: # pragma: nocover
- import pickle
-
-try:
from kazoo.client import KazooClient
except ImportError: # pragma: nocover
raise ImportError('ZooKeeperJobStore requires Kazoo installed')
@@ -38,7 +32,7 @@ class ZooKeeperJobStore(BaseJobStore):
def __init__(self, path='/apscheduler', client=None, close_connection_on_exit=False,
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args):
- super(ZooKeeperJobStore, self).__init__()
+ super().__init__()
self.pickle_protocol = pickle_protocol
self.close_connection_on_exit = close_connection_on_exit
@@ -59,7 +53,7 @@ class ZooKeeperJobStore(BaseJobStore):
self._ensured_path = True
def start(self, scheduler, alias):
- super(ZooKeeperJobStore, self).start(scheduler, alias)
+ super().start(scheduler, alias)
if not self.client.connected:
self.client.start()
diff --git a/apscheduler/schedulers/asyncio.py b/apscheduler/schedulers/asyncio.py
index 289ef13..85df66b 100644
--- a/apscheduler/schedulers/asyncio.py
+++ b/apscheduler/schedulers/asyncio.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from functools import wraps, partial
from apscheduler.schedulers.base import BaseScheduler
@@ -40,12 +39,12 @@ class AsyncIOScheduler(BaseScheduler):
@run_in_event_loop
def shutdown(self, wait=True):
- super(AsyncIOScheduler, self).shutdown(wait)
+ super().shutdown(wait)
self._stop_timer()
def _configure(self, config):
self._eventloop = maybe_ref(config.pop('event_loop', None)) or asyncio.get_event_loop()
- super(AsyncIOScheduler, self)._configure(config)
+ super()._configure(config)
def _start_timer(self, wait_seconds):
self._stop_timer()
diff --git a/apscheduler/schedulers/background.py b/apscheduler/schedulers/background.py
index 03f2982..9e350e2 100644
--- a/apscheduler/schedulers/background.py
+++ b/apscheduler/schedulers/background.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from threading import Thread, Event
@@ -26,7 +25,7 @@ class BackgroundScheduler(BlockingScheduler):
def _configure(self, config):
self._daemon = asbool(config.pop('daemon', True))
- super(BackgroundScheduler, self)._configure(config)
+ super()._configure(config)
def start(self, *args, **kwargs):
self._event = Event()
@@ -36,6 +35,6 @@ class BackgroundScheduler(BlockingScheduler):
self._thread.start()
def shutdown(self, *args, **kwargs):
- super(BackgroundScheduler, self).shutdown(*args, **kwargs)
+ super().shutdown(*args, **kwargs)
self._thread.join()
del self._thread
diff --git a/apscheduler/schedulers/base.py b/apscheduler/schedulers/base.py
index 8e71154..e5dabbf 100644
--- a/apscheduler/schedulers/base.py
+++ b/apscheduler/schedulers/base.py
@@ -1,7 +1,6 @@
-from __future__ import print_function
-
from abc import ABCMeta, abstractmethod
-from threading import RLock
+from collections.abc import MutableMapping
+from threading import RLock, TIMEOUT_MAX
from datetime import datetime, timedelta
from logging import getLogger
import warnings
@@ -9,7 +8,6 @@ import sys
from pkg_resources import iter_entry_points
from tzlocal import get_localzone
-import six
from apscheduler.schedulers import SchedulerAlreadyRunningError, SchedulerNotRunningError
from apscheduler.executors.base import MaxInstancesReachedError, BaseExecutor
@@ -19,18 +17,13 @@ from apscheduler.jobstores.memory import MemoryJobStore
from apscheduler.job import Job
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import (
- asbool, asint, astimezone, maybe_ref, timedelta_seconds, undefined, TIMEOUT_MAX)
+ asbool, asint, astimezone, maybe_ref, timedelta_seconds, undefined)
from apscheduler.events import (
SchedulerEvent, JobEvent, JobSubmissionEvent, EVENT_SCHEDULER_START, EVENT_SCHEDULER_SHUTDOWN,
EVENT_JOBSTORE_ADDED, EVENT_JOBSTORE_REMOVED, EVENT_ALL, EVENT_JOB_MODIFIED, EVENT_JOB_REMOVED,
EVENT_JOB_ADDED, EVENT_EXECUTOR_ADDED, EVENT_EXECUTOR_REMOVED, EVENT_ALL_JOBS_REMOVED,
EVENT_JOB_SUBMITTED, EVENT_JOB_MAX_INSTANCES, EVENT_SCHEDULER_RESUMED, EVENT_SCHEDULER_PAUSED)
-try:
- from collections.abc import MutableMapping
-except ImportError:
- from collections import MutableMapping
-
#: constant indicating a scheduler's stopped state
STATE_STOPPED = 0
#: constant indicating a scheduler's running state (started and processing jobs)
@@ -39,7 +32,7 @@ STATE_RUNNING = 1
STATE_PAUSED = 2
-class BaseScheduler(six.with_metaclass(ABCMeta)):
+class BaseScheduler(metaclass=ABCMeta):
"""
Abstract base class for all schedulers.
@@ -75,7 +68,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
#
def __init__(self, gconfig={}, **options):
- super(BaseScheduler, self).__init__()
+ super().__init__()
self._executors = {}
self._executors_lock = self._create_lock()
self._jobstores = {}
@@ -106,13 +99,13 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
# global configuration dict
if prefix:
prefixlen = len(prefix)
- gconfig = dict((key[prefixlen:], value) for key, value in six.iteritems(gconfig)
+ gconfig = dict((key[prefixlen:], value) for key, value in gconfig.items()
if key.startswith(prefix))
# Create a structure from the dotted options
# (e.g. "a.b.c = d" -> {'a': {'b': {'c': 'd'}}})
config = {}
- for key, value in six.iteritems(gconfig):
+ for key, value in gconfig.items():
parts = key.split('.')
parent = config
key = parts.pop(0)
@@ -145,7 +138,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
self.add_executor(self._create_default_executor(), 'default')
# Start all the executors
- for alias, executor in six.iteritems(self._executors):
+ for alias, executor in self._executors.items():
executor.start(self, alias)
with self._jobstores_lock:
@@ -154,7 +147,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
self.add_jobstore(self._create_default_jobstore(), 'default')
# Start all the job stores
- for alias, store in six.iteritems(self._jobstores):
+ for alias, store in self._jobstores.items():
store.start(self, alias)
# Schedule all pending jobs
@@ -187,12 +180,12 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
# Shut down all executors
with self._executors_lock:
- for executor in six.itervalues(self._executors):
+ for executor in self._executors.values():
executor.shutdown(wait)
# Shut down all job stores
with self._jobstores_lock:
- for jobstore in six.itervalues(self._jobstores):
+ for jobstore in self._jobstores.values():
jobstore.shutdown()
self._logger.info('Scheduler has been shut down')
@@ -253,7 +246,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if isinstance(executor, BaseExecutor):
self._executors[alias] = executor
- elif isinstance(executor, six.string_types):
+ elif isinstance(executor, str):
self._executors[alias] = executor = self._create_plugin_instance(
'executor', executor, executor_opts)
else:
@@ -303,7 +296,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if isinstance(jobstore, BaseJobStore):
self._jobstores[alias] = jobstore
- elif isinstance(jobstore, six.string_types):
+ elif isinstance(jobstore, str):
self._jobstores[alias] = jobstore = self._create_plugin_instance(
'jobstore', jobstore, jobstore_opts)
else:
@@ -429,7 +422,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
'max_instances': max_instances,
'next_run_time': next_run_time
}
- job_kwargs = dict((key, value) for key, value in six.iteritems(job_kwargs) if
+ job_kwargs = dict((key, value) for key, value in job_kwargs.items() if
value is not undefined)
job = Job(self, **job_kwargs)
@@ -565,7 +558,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
if jobstore is None or alias == jobstore:
jobs.append(job)
else:
- for alias, store in six.iteritems(self._jobstores):
+ for alias, store in self._jobstores.items():
if jobstore is None or alias == jobstore:
jobs.extend(store.get_all_jobs())
@@ -608,7 +601,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
else:
# Otherwise, try to remove it from each store until it succeeds or we run out of
# stores to check
- for alias, store in six.iteritems(self._jobstores):
+ for alias, store in self._jobstores.items():
if jobstore in (None, alias):
try:
store.remove_job(job_id)
@@ -641,7 +634,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
else:
self._pending_jobs = []
else:
- for alias, store in six.iteritems(self._jobstores):
+ for alias, store in self._jobstores.items():
if jobstore in (None, alias):
store.remove_all_jobs()
@@ -670,7 +663,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
else:
print(u' No pending jobs', file=out)
else:
- for alias, store in sorted(six.iteritems(self._jobstores)):
+ for alias, store in sorted(self._jobstores.items()):
if jobstore in (None, alias):
print(u'Jobstore %s:' % alias, file=out)
jobs = store.get_all_jobs()
@@ -707,7 +700,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
# Configure executors
self._executors.clear()
- for alias, value in six.iteritems(config.get('executors', {})):
+ for alias, value in config.get('executors', {}).items():
if isinstance(value, BaseExecutor):
self.add_executor(value, alias)
elif isinstance(value, MutableMapping):
@@ -731,7 +724,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
# Configure job stores
self._jobstores.clear()
- for alias, value in six.iteritems(config.get('jobstores', {})):
+ for alias, value in config.get('jobstores', {}).items():
if isinstance(value, BaseJobStore):
self.add_jobstore(value, alias)
elif isinstance(value, MutableMapping):
@@ -807,7 +800,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
return job, None
else:
# Look in all job stores
- for alias, store in six.iteritems(self._jobstores):
+ for alias, store in self._jobstores.items():
if jobstore_alias in (None, alias):
job = store.lookup_job(job_id)
if job is not None:
@@ -849,7 +842,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
"""
# Fill in undefined values with defaults
replacements = {}
- for key, value in six.iteritems(self._job_defaults):
+ for key, value in self._job_defaults.items():
if not hasattr(job, key):
replacements[key] = value
@@ -910,7 +903,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
return trigger
elif trigger is None:
trigger = 'date'
- elif not isinstance(trigger, six.string_types):
+ elif not isinstance(trigger, str):
raise TypeError('Expected a trigger instance or string, got %s instead' %
trigger.__class__.__name__)
@@ -943,7 +936,7 @@ class BaseScheduler(six.with_metaclass(ABCMeta)):
events = []
with self._jobstores_lock:
- for jobstore_alias, jobstore in six.iteritems(self._jobstores):
+ for jobstore_alias, jobstore in self._jobstores.items():
try:
due_jobs = jobstore.get_due_jobs(now)
except Exception as e:
diff --git a/apscheduler/schedulers/blocking.py b/apscheduler/schedulers/blocking.py
index e617157..34d807d 100644
--- a/apscheduler/schedulers/blocking.py
+++ b/apscheduler/schedulers/blocking.py
@@ -1,9 +1,6 @@
-from __future__ import absolute_import
-
-from threading import Event
+from threading import Event, TIMEOUT_MAX
from apscheduler.schedulers.base import BaseScheduler, STATE_STOPPED
-from apscheduler.util import TIMEOUT_MAX
class BlockingScheduler(BaseScheduler):
@@ -15,11 +12,11 @@ class BlockingScheduler(BaseScheduler):
def start(self, *args, **kwargs):
self._event = Event()
- super(BlockingScheduler, self).start(*args, **kwargs)
+ super().start(*args, **kwargs)
self._main_loop()
def shutdown(self, wait=True):
- super(BlockingScheduler, self).shutdown(wait)
+ super().shutdown(wait)
self._event.set()
def _main_loop(self):
diff --git a/apscheduler/schedulers/gevent.py b/apscheduler/schedulers/gevent.py
index d48ed74..9ced968 100644
--- a/apscheduler/schedulers/gevent.py
+++ b/apscheduler/schedulers/gevent.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.base import BaseScheduler
@@ -23,7 +22,7 @@ class GeventScheduler(BlockingScheduler):
return self._greenlet
def shutdown(self, *args, **kwargs):
- super(GeventScheduler, self).shutdown(*args, **kwargs)
+ super().shutdown(*args, **kwargs)
self._greenlet.join()
del self._greenlet
diff --git a/apscheduler/schedulers/tornado.py b/apscheduler/schedulers/tornado.py
index 0a9171f..f5ff3b5 100644
--- a/apscheduler/schedulers/tornado.py
+++ b/apscheduler/schedulers/tornado.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from datetime import timedelta
from functools import wraps
@@ -35,12 +34,12 @@ class TornadoScheduler(BaseScheduler):
@run_in_ioloop
def shutdown(self, wait=True):
- super(TornadoScheduler, self).shutdown(wait)
+ super().shutdown(wait)
self._stop_timer()
def _configure(self, config):
self._ioloop = maybe_ref(config.pop('io_loop', None)) or IOLoop.current()
- super(TornadoScheduler, self)._configure(config)
+ super()._configure(config)
def _start_timer(self, wait_seconds):
self._stop_timer()
diff --git a/apscheduler/schedulers/twisted.py b/apscheduler/schedulers/twisted.py
index 6b43a84..14550d6 100644
--- a/apscheduler/schedulers/twisted.py
+++ b/apscheduler/schedulers/twisted.py
@@ -1,4 +1,3 @@
-from __future__ import absolute_import
from functools import wraps
@@ -34,11 +33,11 @@ class TwistedScheduler(BaseScheduler):
def _configure(self, config):
self._reactor = maybe_ref(config.pop('reactor', default_reactor))
- super(TwistedScheduler, self)._configure(config)
+ super()._configure(config)
@run_in_reactor
def shutdown(self, wait=True):
- super(TwistedScheduler, self).shutdown(wait)
+ super().shutdown(wait)
self._stop_timer()
def _start_timer(self, wait_seconds):
diff --git a/apscheduler/triggers/base.py b/apscheduler/triggers/base.py
index ce2526a..bbfabbd 100644
--- a/apscheduler/triggers/base.py
+++ b/apscheduler/triggers/base.py
@@ -2,10 +2,8 @@ from abc import ABCMeta, abstractmethod
from datetime import timedelta
import random
-import six
-
-class BaseTrigger(six.with_metaclass(ABCMeta)):
+class BaseTrigger(metaclass=ABCMeta):
"""Abstract base class that defines the interface that every trigger must implement."""
__slots__ = ()
diff --git a/apscheduler/triggers/cron/__init__.py b/apscheduler/triggers/cron/__init__.py
index ce675dd..b12edbd 100644
--- a/apscheduler/triggers/cron/__init__.py
+++ b/apscheduler/triggers/cron/__init__.py
@@ -1,7 +1,6 @@
from datetime import datetime, timedelta
from tzlocal import get_localzone
-import six
from apscheduler.triggers.base import BaseTrigger
from apscheduler.triggers.cron.fields import (
@@ -62,7 +61,7 @@ class CronTrigger(BaseTrigger):
self.jitter = jitter
- values = dict((key, value) for (key, value) in six.iteritems(locals())
+ values = dict((key, value) for (key, value) in locals().items()
if key in self.FIELD_NAMES and value is not None)
self.fields = []
assign_defaults = False
diff --git a/apscheduler/triggers/cron/expressions.py b/apscheduler/triggers/cron/expressions.py
index 55a3716..ebd565a 100644
--- a/apscheduler/triggers/cron/expressions.py
+++ b/apscheduler/triggers/cron/expressions.py
@@ -13,7 +13,7 @@ WEEKDAYS = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
MONTHS = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec']
-class AllExpression(object):
+class AllExpression:
value_re = re.compile(r'\*(?:/(?P<step>\d+))?$')
def __init__(self, step=None):
@@ -61,7 +61,7 @@ class RangeExpression(AllExpression):
r'(?P<first>\d+)(?:-(?P<last>\d+))?(?:/(?P<step>\d+))?$')
def __init__(self, first, last=None, step=None):
- super(RangeExpression, self).__init__(step)
+ super().__init__(step)
first = asint(first)
last = asint(last)
if last is None and step is None:
@@ -74,7 +74,7 @@ class RangeExpression(AllExpression):
def validate_range(self, field_name):
from apscheduler.triggers.cron.fields import MIN_VALUES, MAX_VALUES
- super(RangeExpression, self).validate_range(field_name)
+ super().validate_range(field_name)
if self.first < MIN_VALUES[field_name]:
raise ValueError('the first value ({}) is lower than the minimum value ({})'
.format(self.first, MIN_VALUES[field_name]))
@@ -143,7 +143,7 @@ class MonthRangeExpression(RangeExpression):
else:
last_num = None
- super(MonthRangeExpression, self).__init__(first_num, last_num)
+ super().__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
@@ -174,7 +174,7 @@ class WeekdayRangeExpression(RangeExpression):
else:
last_num = None
- super(WeekdayRangeExpression, self).__init__(first_num, last_num)
+ super().__init__(first_num, last_num)
def __str__(self):
if self.last != self.first and self.last is not None:
@@ -194,7 +194,7 @@ class WeekdayPositionExpression(AllExpression):
'|'.join(options), re.IGNORECASE)
def __init__(self, option_name, weekday_name):
- super(WeekdayPositionExpression, self).__init__(None)
+ super().__init__(None)
try:
self.option_num = self.options.index(option_name.lower())
except ValueError:
@@ -224,7 +224,7 @@ class WeekdayPositionExpression(AllExpression):
return target_day
def __eq__(self, other):
- return (super(WeekdayPositionExpression, self).__eq__(other) and
+ return (super().__eq__(other) and
self.option_num == other.option_num and self.weekday == other.weekday)
def __str__(self):
@@ -239,7 +239,7 @@ class LastDayOfMonthExpression(AllExpression):
value_re = re.compile(r'last', re.IGNORECASE)
def __init__(self):
- super(LastDayOfMonthExpression, self).__init__(None)
+ super().__init__(None)
def get_next_value(self, date, field):
return monthrange(date.year, date.month)[1]
diff --git a/apscheduler/triggers/cron/fields.py b/apscheduler/triggers/cron/fields.py
index 86d620c..78f8bac 100644
--- a/apscheduler/triggers/cron/fields.py
+++ b/apscheduler/triggers/cron/fields.py
@@ -3,8 +3,6 @@
from calendar import monthrange
import re
-import six
-
from apscheduler.triggers.cron.expressions import (
AllExpression, RangeExpression, WeekdayPositionExpression, LastDayOfMonthExpression,
WeekdayRangeExpression, MonthRangeExpression)
@@ -23,7 +21,7 @@ DEFAULT_VALUES = {'year': '*', 'month': 1, 'day': 1, 'week': '*', 'day_of_week':
SEPARATOR = re.compile(' *, *')
-class BaseField(object):
+class BaseField:
REAL = True
COMPILERS = [AllExpression, RangeExpression]
@@ -67,7 +65,7 @@ class BaseField(object):
compiled_expr.validate_range(self.name)
except ValueError as e:
exc = ValueError('Error validating expression {!r}: {}'.format(expr, e))
- six.raise_from(exc, None)
+ raise exc from None
self.expressions.append(compiled_expr)
return
diff --git a/apscheduler/util.py b/apscheduler/util.py
index e7b4869..2df014b 100644
--- a/apscheduler/util.py
+++ b/apscheduler/util.py
@@ -1,36 +1,20 @@
"""This module contains several handy functions primarily meant for internal use."""
-from __future__ import division
-
from datetime import date, datetime, time, timedelta, tzinfo
from calendar import timegm
+from inspect import signature
from functools import partial
from inspect import isclass, ismethod
import re
from pytz import timezone, utc, FixedOffset
-import six
-
-try:
- from inspect import signature
-except ImportError: # pragma: nocover
- from funcsigs import signature
-
-try:
- from threading import TIMEOUT_MAX
-except ImportError:
- TIMEOUT_MAX = 4294967 # Maximum value accepted by Event.wait() on Windows
__all__ = ('asint', 'asbool', 'astimezone', 'convert_to_datetime', 'datetime_to_utc_timestamp',
'utc_timestamp_to_datetime', 'timedelta_seconds', 'datetime_ceil', 'get_callable_name',
- 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'repr_escape', 'check_callable_args',
- 'TIMEOUT_MAX')
+ 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'check_callable_args')
-class _Undefined(object):
- def __nonzero__(self):
- return False
-
+class _Undefined:
def __bool__(self):
return False
@@ -77,7 +61,7 @@ def astimezone(obj):
:rtype: tzinfo
"""
- if isinstance(obj, six.string_types):
+ if isinstance(obj, str):
return timezone(obj)
if isinstance(obj, tzinfo):
if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'):
@@ -125,7 +109,7 @@ def convert_to_datetime(input, tz, arg_name):
datetime_ = input
elif isinstance(input, date):
datetime_ = datetime.combine(input, time())
- elif isinstance(input, six.string_types):
+ elif isinstance(input, str):
m = _DATE_REGEX.match(input)
if not m:
raise ValueError('Invalid date string')
@@ -149,7 +133,7 @@ def convert_to_datetime(input, tz, arg_name):
if tz is None:
raise ValueError(
'The "tz" argument must be specified if %s has no timezone information' % arg_name)
- if isinstance(tz, six.string_types):
+ if isinstance(tz, str):
tz = timezone(tz)
try:
@@ -284,7 +268,7 @@ def ref_to_obj(ref):
:type ref: str
"""
- if not isinstance(ref, six.string_types):
+ if not isinstance(ref, str):
raise TypeError('References must be strings')
if ':' not in ref:
raise ValueError('Invalid reference')
@@ -314,16 +298,6 @@ def maybe_ref(ref):
return ref_to_obj(ref)
-if six.PY2:
- def repr_escape(string):
- if isinstance(string, six.text_type):
- return string.encode('ascii', 'backslashreplace')
- return string
-else:
- def repr_escape(string):
- return string
-
-
def check_callable_args(func, args, kwargs):
"""
Ensures that the given callable can be called with the given arguments.
@@ -348,7 +322,7 @@ def check_callable_args(func, args, kwargs):
# signature() doesn't work against every kind of callable
return
- for param in six.itervalues(sig.parameters):
+ for param in sig.parameters.values():
if param.kind == param.POSITIONAL_OR_KEYWORD:
if param.name in unmatched_kwargs and unmatched_args:
pos_kwargs_conflicts.append(param.name)
diff --git a/docs/versionhistory.rst b/docs/versionhistory.rst
index 5cfa7bc..eb09d1c 100644
--- a/docs/versionhistory.rst
+++ b/docs/versionhistory.rst
@@ -15,6 +15,8 @@ APScheduler, see the :doc:`migration section <migration>`.
3.6.0
-----
+* Dropped support for Python 2.X and 3.4
+
* Removed the Qt scheduler due to maintenance difficulties
* Adapted ``RedisJobStore`` to v3.0 of the ``redis`` library
* Adapted ``RethinkDBJobStore`` to v2.4 of the ``rethink`` library
diff --git a/examples/schedulers/asyncio_.py b/examples/schedulers/asyncio_.py
index 88d9a82..a438ae1 100644
--- a/examples/schedulers/asyncio_.py
+++ b/examples/schedulers/asyncio_.py
@@ -3,16 +3,12 @@ Demonstrates how to use the asyncio compatible scheduler to schedule a job that
second intervals.
"""
-from datetime import datetime
+import asyncio
import os
+from datetime import datetime
from apscheduler.schedulers.asyncio import AsyncIOScheduler
-try:
- import asyncio
-except ImportError:
- import trollius as asyncio
-
def tick():
print('Tick! The time is: %s' % datetime.now())
diff --git a/setup.cfg b/setup.cfg
index e91be43..c2d8766 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -18,6 +18,3 @@ show_missing = true
[flake8]
max-line-length = 99
exclude = .tox,build,docs
-
-[bdist_wheel]
-universal = 1
diff --git a/setup.py b/setup.py
index 6988f74..e8e0f2e 100644
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import os.path
from setuptools import setup, find_packages
@@ -16,17 +15,15 @@ setup(
},
description='In-process task scheduler with Cron-like capabilities',
long_description=readme,
- author=u'Alex Grönholm',
- author_email='apscheduler@nextday.fi',
+ author='Alex Grönholm',
+ author_email='alex.gronholm@nextday.fi',
url='https://github.com/agronholm/apscheduler',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
@@ -34,18 +31,16 @@ setup(
keywords='scheduling cron',
license='MIT',
packages=find_packages(exclude=['tests']),
+ python_requires='>= 3.5',
setup_requires=[
'setuptools_scm'
],
install_requires=[
'setuptools >= 0.7',
- 'six >= 1.4.0',
'pytz',
'tzlocal >= 1.2',
],
extras_require={
- ':python_version == "2.7"': ['futures', 'funcsigs'],
- 'asyncio:python_version == "2.7"': ['trollius'],
'gevent': ['gevent'],
'mongodb': ['pymongo >= 2.8'],
'redis': ['redis >= 3.0'],
@@ -55,13 +50,10 @@ setup(
'twisted': ['twisted'],
'zookeeper': ['kazoo'],
'testing': [
- 'pytest < 3.7',
+ 'pytest',
'pytest-cov',
'pytest-tornado5'
],
- 'testing:python_version == "2.7"': ['mock'],
- 'testing:python_version == "3.4"': ['pytest_asyncio < 0.6'],
- 'testing:python_version >= "3.5"': ['pytest_asyncio'],
'doc': [
'sphinx',
'sphinx-rtd-theme',
@@ -80,7 +72,7 @@ setup(
'debug = apscheduler.executors.debug:DebugExecutor',
'threadpool = apscheduler.executors.pool:ThreadPoolExecutor',
'processpool = apscheduler.executors.pool:ProcessPoolExecutor',
- 'asyncio = apscheduler.executors.asyncio:AsyncIOExecutor [asyncio]',
+ 'asyncio = apscheduler.executors.asyncio:AsyncIOExecutor',
'gevent = apscheduler.executors.gevent:GeventExecutor [gevent]',
'tornado = apscheduler.executors.tornado:TornadoExecutor [tornado]',
'twisted = apscheduler.executors.twisted:TwistedExecutor [twisted]'
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/__init__.py
+++ /dev/null
diff --git a/tests/conftest.py b/tests/conftest.py
index 19fba99..ad704e9 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,6 +1,6 @@
-# coding: utf-8
-from datetime import datetime
import sys
+from datetime import datetime
+from unittest.mock import Mock
import pytest
import pytz
@@ -9,35 +9,6 @@ from apscheduler.job import Job
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
-try:
- from unittest.mock import Mock
-except ImportError:
- from mock import Mock
-
-
-def pytest_ignore_collect(path, config):
- return path.basename.endswith('_py35.py') and sys.version_info < (3, 5)
-
-
-def minpython(*version):
- version_str = '.'.join([str(num) for num in version])
-
- def outer(func):
- dec = pytest.mark.skipif(sys.version_info < version,
- reason='Requires Python >= %s' % version_str)
- return dec(func)
- return outer
-
-
-def maxpython(*version):
- version_str = '.'.join([str(num) for num in version])
-
- def outer(func):
- dec = pytest.mark.skipif(sys.version_info >= version,
- reason='Requires Python < %s' % version_str)
- return dec(func)
- return outer
-
@pytest.fixture
def timezone(monkeypatch):
diff --git a/tests/test_executors.py b/tests/test_executors.py
index 16defd2..a1d525f 100644
--- a/tests/test_executors.py
+++ b/tests/test_executors.py
@@ -1,21 +1,22 @@
+import gc
+import time
+from asyncio import CancelledError
from datetime import datetime
from threading import Event
from types import TracebackType
-import gc
-import time
+from unittest.mock import Mock, MagicMock, patch
import pytest
-from pytz import UTC
+from pytz import UTC, utc
from apscheduler.events import EVENT_JOB_ERROR, EVENT_JOB_MISSED, EVENT_JOB_EXECUTED
+from apscheduler.executors.asyncio import AsyncIOExecutor
from apscheduler.executors.base import MaxInstancesReachedError, run_job
+from apscheduler.executors.tornado import TornadoExecutor
from apscheduler.job import Job
+from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.schedulers.base import BaseScheduler
-
-try:
- from unittest.mock import Mock, MagicMock, patch
-except ImportError:
- from mock import Mock, MagicMock, patch
+from apscheduler.schedulers.tornado import TornadoScheduler
@pytest.fixture
@@ -144,3 +145,95 @@ def test_run_job_memory_leak():
foos = [x for x in gc.get_objects() if type(x) is FooBar]
assert len(foos) == 0
+
+
+@pytest.fixture
+def asyncio_scheduler(event_loop):
+ scheduler = AsyncIOScheduler(event_loop=event_loop)
+ scheduler.start(paused=True)
+ yield scheduler
+ scheduler.shutdown(False)
+
+
+@pytest.fixture
+def asyncio_executor(asyncio_scheduler):
+ executor = AsyncIOExecutor()
+ executor.start(asyncio_scheduler, 'default')
+ yield executor
+ executor.shutdown()
+
+
+@pytest.fixture
+def tornado_scheduler(io_loop):
+ scheduler = TornadoScheduler(io_loop=io_loop)
+ scheduler.start(paused=True)
+ yield scheduler
+ scheduler.shutdown(False)
+
+
+@pytest.fixture
+def tornado_executor(tornado_scheduler):
+ executor = TornadoExecutor()
+ executor.start(tornado_scheduler, 'default')
+ yield executor
+ executor.shutdown()
+
+
+async def waiter(sleep, exception):
+ await sleep(0.1)
+ if exception:
+ raise Exception('dummy error')
+ else:
+ return True
+
+
+@pytest.mark.parametrize('exception', [False, True])
+@pytest.mark.asyncio
+async def test_run_coroutine_job(asyncio_scheduler, asyncio_executor, exception):
+ from asyncio import Future, sleep
+
+ future = Future()
+ job = asyncio_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, exception])
+ asyncio_executor._run_job_success = lambda job_id, events: future.set_result(events)
+ asyncio_executor._run_job_error = lambda job_id, exc, tb: future.set_exception(exc)
+ asyncio_executor.submit_job(job, [datetime.now(utc)])
+ events = await future
+ assert len(events) == 1
+ if exception:
+ assert str(events[0].exception) == 'dummy error'
+ else:
+ assert events[0].retval is True
+
+
+@pytest.mark.parametrize('exception', [False, True])
+@pytest.mark.gen_test
+async def test_run_coroutine_job_tornado(tornado_scheduler, tornado_executor, exception):
+ from tornado.concurrent import Future
+ from tornado.gen import sleep
+
+ future = Future()
+ job = tornado_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, exception])
+ tornado_executor._run_job_success = lambda job_id, events: future.set_result(events)
+ tornado_executor._run_job_error = lambda job_id, exc, tb: future.set_exception(exc)
+ tornado_executor.submit_job(job, [datetime.now(utc)])
+ events = await future
+ assert len(events) == 1
+ if exception:
+ assert str(events[0].exception) == 'dummy error'
+ else:
+ assert events[0].retval is True
+
+
+@pytest.mark.asyncio
+async def test_asyncio_executor_shutdown(asyncio_scheduler, asyncio_executor):
+ """Test that the AsyncIO executor cancels its pending tasks on shutdown."""
+ from asyncio import sleep
+
+ job = asyncio_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, None])
+ asyncio_executor.submit_job(job, [datetime.now(utc)])
+ futures = asyncio_executor._pending_futures.copy()
+ assert len(futures) == 1
+
+ asyncio_executor.shutdown()
+ with pytest.raises(CancelledError):
+ await futures.pop()
diff --git a/tests/test_executors_py35.py b/tests/test_executors_py35.py
deleted file mode 100644
index 7849eb0..0000000
--- a/tests/test_executors_py35.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""Contains test functions using Python 3.3+ syntax."""
-from asyncio import CancelledError
-from datetime import datetime
-
-import pytest
-from apscheduler.executors.asyncio import AsyncIOExecutor
-from apscheduler.executors.tornado import TornadoExecutor
-from apscheduler.schedulers.asyncio import AsyncIOScheduler
-from apscheduler.schedulers.tornado import TornadoScheduler
-from pytz import utc
-
-
-@pytest.fixture
-def asyncio_scheduler(event_loop):
- scheduler = AsyncIOScheduler(event_loop=event_loop)
- scheduler.start(paused=True)
- yield scheduler
- scheduler.shutdown(False)
-
-
-@pytest.fixture
-def asyncio_executor(asyncio_scheduler):
- executor = AsyncIOExecutor()
- executor.start(asyncio_scheduler, 'default')
- yield executor
- executor.shutdown()
-
-
-@pytest.fixture
-def tornado_scheduler(io_loop):
- scheduler = TornadoScheduler(io_loop=io_loop)
- scheduler.start(paused=True)
- yield scheduler
- scheduler.shutdown(False)
-
-
-@pytest.fixture
-def tornado_executor(tornado_scheduler):
- executor = TornadoExecutor()
- executor.start(tornado_scheduler, 'default')
- yield executor
- executor.shutdown()
-
-
-async def waiter(sleep, exception):
- await sleep(0.1)
- if exception:
- raise Exception('dummy error')
- else:
- return True
-
-
-@pytest.mark.parametrize('exception', [False, True])
-@pytest.mark.asyncio
-async def test_run_coroutine_job(asyncio_scheduler, asyncio_executor, exception):
- from asyncio import Future, sleep
-
- future = Future()
- job = asyncio_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, exception])
- asyncio_executor._run_job_success = lambda job_id, events: future.set_result(events)
- asyncio_executor._run_job_error = lambda job_id, exc, tb: future.set_exception(exc)
- asyncio_executor.submit_job(job, [datetime.now(utc)])
- events = await future
- assert len(events) == 1
- if exception:
- assert str(events[0].exception) == 'dummy error'
- else:
- assert events[0].retval is True
-
-
-@pytest.mark.parametrize('exception', [False, True])
-@pytest.mark.gen_test
-async def test_run_coroutine_job_tornado(tornado_scheduler, tornado_executor, exception):
- from tornado.concurrent import Future
- from tornado.gen import sleep
-
- future = Future()
- job = tornado_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, exception])
- tornado_executor._run_job_success = lambda job_id, events: future.set_result(events)
- tornado_executor._run_job_error = lambda job_id, exc, tb: future.set_exception(exc)
- tornado_executor.submit_job(job, [datetime.now(utc)])
- events = await future
- assert len(events) == 1
- if exception:
- assert str(events[0].exception) == 'dummy error'
- else:
- assert events[0].retval is True
-
-
-@pytest.mark.asyncio
-async def test_asyncio_executor_shutdown(asyncio_scheduler, asyncio_executor):
- """Test that the AsyncIO executor cancels its pending tasks on shutdown."""
- from asyncio import sleep
-
- job = asyncio_scheduler.add_job(waiter, 'interval', seconds=1, args=[sleep, None])
- asyncio_executor.submit_job(job, [datetime.now(utc)])
- futures = asyncio_executor._pending_futures.copy()
- assert len(futures) == 1
-
- asyncio_executor.shutdown()
- with pytest.raises(CancelledError):
- await futures.pop()
diff --git a/tests/test_job.py b/tests/test_job.py
index 9dc9210..d1ab355 100644
--- a/tests/test_job.py
+++ b/tests/test_job.py
@@ -1,19 +1,13 @@
-# coding: utf-8
from datetime import datetime, timedelta
from functools import partial
+from unittest.mock import MagicMock, patch
import pytest
-import six
from apscheduler.job import Job
from apscheduler.schedulers.base import BaseScheduler
from apscheduler.triggers.date import DateTrigger
-try:
- from unittest.mock import MagicMock, patch
-except ImportError:
- from mock import MagicMock, patch
-
def dummyfunc():
pass
@@ -114,9 +108,9 @@ def test_private_modify_bad_func(job):
def test_private_modify_func_ref(job):
"""Tests that the target callable can be given as a textual reference."""
- job._modify(func='tests.test_job:dummyfunc')
+ job._modify(func='test_job:dummyfunc')
assert job.func is dummyfunc
- assert job.func_ref == 'tests.test_job:dummyfunc'
+ assert job.func_ref == 'test_job:dummyfunc'
def test_private_modify_unreachable_func(job):
@@ -197,7 +191,7 @@ def test_private_modify_bad_argument(job):
def test_getstate(job):
state = job.__getstate__()
assert state == dict(
- version=1, trigger=job.trigger, executor='default', func='tests.test_job:dummyfunc',
+ version=1, trigger=job.trigger, executor='default', func='test_job:dummyfunc',
name=b'n\xc3\xa4m\xc3\xa9'.decode('utf-8'), args=(), kwargs={},
id=b't\xc3\xa9st\xc3\xafd'.decode('utf-8'), misfire_grace_time=1, coalesce=False,
max_instances=1, next_run_time=None)
@@ -207,13 +201,13 @@ def test_setstate(job, timezone):
trigger = DateTrigger('2010-12-14 13:05:00', timezone)
state = dict(
version=1, scheduler=MagicMock(), jobstore=MagicMock(), trigger=trigger,
- executor='dummyexecutor', func='tests.test_job:dummyfunc', name='testjob.dummyfunc',
+ executor='dummyexecutor', func='test_job:dummyfunc', name='testjob.dummyfunc',
args=[], kwargs={}, id='other_id', misfire_grace_time=2, coalesce=True, max_instances=2,
next_run_time=None)
job.__setstate__(state)
assert job.id == 'other_id'
assert job.func == dummyfunc
- assert job.func_ref == 'tests.test_job:dummyfunc'
+ assert job.func_ref == 'test_job:dummyfunc'
assert job.trigger == trigger
assert job.executor == 'dummyexecutor'
assert job.args == []
@@ -241,11 +235,7 @@ def test_eq(create_job):
def test_repr(job):
- if six.PY2:
- assert repr(job) == '<Job (id=t\\xe9st\\xefd name=n\\xe4m\\xe9)>'
- else:
- assert repr(job) == \
- b'<Job (id=t\xc3\xa9st\xc3\xafd name=n\xc3\xa4m\xc3\xa9)>'.decode('utf-8')
+ assert repr(job) == "<Job (id='téstïd' name='nämé')>"
@pytest.mark.parametrize('status, expected_status', [
@@ -253,19 +243,12 @@ def test_repr(job):
('paused', 'paused'),
('pending', 'pending')
], ids=['scheduled', 'paused', 'pending'])
-@pytest.mark.parametrize('unicode', [False, True], ids=['nativestr', 'unicode'])
-def test_str(create_job, status, unicode, expected_status):
+def test_str(create_job, status, expected_status):
job = create_job(func=dummyfunc)
if status == 'scheduled':
job.next_run_time = job.trigger.run_date
elif status == 'pending':
del job.next_run_time
- if six.PY2 and not unicode:
- expected = 'n\\xe4m\\xe9 (trigger: date[2011-04-03 18:40:00 CEST], %s)' % expected_status
- else:
- expected = b'n\xc3\xa4m\xc3\xa9 (trigger: date[2011-04-03 18:40:00 CEST], %s)'.\
- decode('utf-8') % expected_status
-
- result = job.__unicode__() if unicode else job.__str__()
- assert result == expected
+ expected = 'nämé (trigger: date[2011-04-03 18:40:00 CEST], {})'.format(expected_status)
+ assert str(job) == expected
diff --git a/tests/test_schedulers.py b/tests/test_schedulers.py
index 4d5e55f..31f49de 100644
--- a/tests/test_schedulers.py
+++ b/tests/test_schedulers.py
@@ -1,9 +1,11 @@
import logging
from datetime import datetime, timedelta
+from io import StringIO
+from queue import Queue
from threading import Thread
+from unittest.mock import MagicMock, patch
import pytest
-import six
from pytz import utc
from apscheduler.events import (
@@ -22,16 +24,6 @@ from apscheduler.schedulers.base import BaseScheduler, STATE_RUNNING, STATE_STOP
from apscheduler.triggers.base import BaseTrigger
from apscheduler.util import undefined
-try:
- from StringIO import StringIO
-except ImportError:
- from io import StringIO
-
-try:
- from unittest.mock import MagicMock, patch
-except ImportError:
- from mock import MagicMock, patch
-
class DummyScheduler(BaseScheduler):
def __init__(self, *args, **kwargs):
@@ -642,10 +634,10 @@ Jobstore other:
'coalesce': False,
'max_instances': 9
}
- assert set(six.iterkeys(scheduler._executors)) == set(['default', 'alter'])
+ assert set(scheduler._executors.keys()) == {'default', 'alter'}
assert scheduler._executors['default'].args == {'arg1': '3', 'arg2': 'a'}
assert scheduler._executors['alter'].args == {'arg': 'true'}
- assert set(six.iterkeys(scheduler._jobstores)) == set(['default', 'bar'])
+ assert set(scheduler._jobstores.keys()) == {'default', 'bar'}
assert scheduler._jobstores['default'].args == {'arg1': '3', 'arg2': 'a'}
assert scheduler._jobstores['bar'].args == {'arg': 'false'}
@@ -877,7 +869,6 @@ class SchedulerImplementationTestBase(object):
@pytest.fixture
def eventqueue(self, scheduler):
- from six.moves.queue import Queue
events = Queue()
scheduler.add_listener(events.put)
return events
diff --git a/tests/test_triggers.py b/tests/test_triggers.py
index a637485..9375df7 100644
--- a/tests/test_triggers.py
+++ b/tests/test_triggers.py
@@ -2,20 +2,16 @@ import pickle
import random
import sys
from datetime import datetime, timedelta, date
+from unittest.mock import Mock
import pytest
import pytz
from apscheduler.triggers.base import BaseTrigger
+from apscheduler.triggers.combining import AndTrigger, OrTrigger, BaseCombiningTrigger
from apscheduler.triggers.cron import CronTrigger
from apscheduler.triggers.date import DateTrigger
from apscheduler.triggers.interval import IntervalTrigger
-from apscheduler.triggers.combining import AndTrigger, OrTrigger, BaseCombiningTrigger
-
-try:
- from unittest.mock import Mock
-except ImportError:
- from mock import Mock
class _DummyTriggerWithJitter(BaseTrigger):
diff --git a/tests/test_util.py b/tests/test_util.py
index f1f07e6..43889c2 100644
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -1,25 +1,18 @@
-# coding: utf-8
import platform
+import sys
from datetime import date, datetime, timedelta, tzinfo
from functools import partial
from types import ModuleType
+from unittest.mock import Mock
import pytest
import pytz
-import six
-import sys
from apscheduler.job import Job
from apscheduler.util import (
asint, asbool, astimezone, convert_to_datetime, datetime_to_utc_timestamp,
utc_timestamp_to_datetime, timedelta_seconds, datetime_ceil, get_callable_name, obj_to_ref,
- ref_to_obj, maybe_ref, check_callable_args, datetime_repr, repr_escape)
-from tests.conftest import minpython, maxpython
-
-try:
- from unittest.mock import Mock
-except ImportError:
- from mock import Mock
+ ref_to_obj, maybe_ref, check_callable_args, datetime_repr)
class DummyClass(object):
@@ -222,17 +215,17 @@ class TestObjToRef(object):
@pytest.mark.parametrize('input,expected', [
pytest.mark.skipif(sys.version_info[:2] == (3, 2),
reason="Unbound methods can't be resolved on Python 3.2")(
- (DummyClass.meth, 'tests.test_util:DummyClass.meth')
+ (DummyClass.meth, 'test_util:DummyClass.meth')
),
- (DummyClass.classmeth, 'tests.test_util:DummyClass.classmeth'),
+ (DummyClass.classmeth, 'test_util:DummyClass.classmeth'),
pytest.mark.skipif(sys.version_info < (3, 3),
reason="Requires __qualname__ (Python 3.3+)")(
(DummyClass.InnerDummyClass.innerclassmeth,
- 'tests.test_util:DummyClass.InnerDummyClass.innerclassmeth')
+ 'test_util:DummyClass.InnerDummyClass.innerclassmeth')
),
pytest.mark.skipif(sys.version_info < (3, 3),
reason="Requires __qualname__ (Python 3.3+)")(
- (DummyClass.staticmeth, 'tests.test_util:DummyClass.staticmeth')
+ (DummyClass.staticmeth, 'test_util:DummyClass.staticmeth')
),
pytest.mark.skipif(sys.version_info >= (3, 2),
reason="Unbound methods (Python 3.2) and __qualname__ (Python 3.3+)")(
@@ -276,15 +269,6 @@ def test_maybe_ref(input, expected):
assert maybe_ref(input) == expected
-@pytest.mark.parametrize('input,expected', [
- (b'T\xc3\xa9st'.decode('utf-8'), 'T\\xe9st' if six.PY2 else 'Tést'),
- (1, 1)
-], ids=['string', 'int'])
-@maxpython(3)
-def test_repr_escape_py2(input, expected):
- assert repr_escape(input) == expected
-
-
class TestCheckCallableArgs(object):
def test_invalid_callable_args(self):
"""
@@ -330,7 +314,6 @@ class TestCheckCallableArgs(object):
"""Tests that a function where signature() fails is accepted."""
check_callable_args(object().__setattr__, ('blah', 1), {})
- @minpython(3, 4)
@pytest.mark.skipif(platform.python_implementation() == 'PyPy',
reason='PyPy does not expose signatures of builtins')
def test_positional_only_args(self):
@@ -344,7 +327,6 @@ class TestCheckCallableArgs(object):
assert str(exc.value) == ('The following arguments cannot be given as keyword arguments: '
'value')
- @minpython(3)
def test_unfulfilled_kwargs(self):
"""
Tests that attempting to schedule a job where not all keyword-only arguments are fulfilled
diff --git a/tox.ini b/tox.ini
index 1882570..3cc9730 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,11 +1,10 @@
[tox]
-envlist = py27, py34, py35, py36, py37, pypy, pypy3, flake8
+envlist = py35, py36, py37, pypy3, flake8
skip_missing_interpreters = true
[testenv]
commands = pytest {posargs}
extras = testing
- asyncio
gevent
mongodb
redis