summaryrefslogtreecommitdiff
path: root/rq
diff options
context:
space:
mode:
Diffstat (limited to 'rq')
-rw-r--r--rq/__init__.py5
-rw-r--r--rq/cli/__init__.py2
-rwxr-xr-xrq/cli/cli.py22
-rw-r--r--rq/cli/helpers.py16
-rw-r--r--rq/command.py5
-rw-r--r--rq/connections.py3
-rw-r--r--rq/contrib/legacy.py3
-rw-r--r--rq/decorators.py3
-rw-r--r--rq/defaults.py2
-rw-r--r--rq/job.py34
-rw-r--r--rq/local.py17
-rw-r--r--rq/logutils.py13
-rw-r--r--rq/queue.py17
-rw-r--r--rq/registry.py25
-rw-r--r--rq/results.py7
-rw-r--r--rq/scheduler.py2
-rw-r--r--rq/serializers.py4
-rw-r--r--rq/suspension.py1
-rw-r--r--rq/utils.py6
-rw-r--r--rq/worker.py26
-rw-r--r--rq/worker_pool.py9
-rw-r--r--rq/worker_registration.py9
22 files changed, 116 insertions, 115 deletions
diff --git a/rq/__init__.py b/rq/__init__.py
index 0ab7065..b385e76 100644
--- a/rq/__init__.py
+++ b/rq/__init__.py
@@ -1,7 +1,6 @@
-# flake8: noqa
-
+# ruff: noqa: F401
from .connections import Connection, get_current_connection, pop_connection, push_connection
-from .job import cancel_job, get_current_job, requeue_job, Retry, Callback
+from .job import Callback, Retry, cancel_job, get_current_job, requeue_job
from .queue import Queue
from .version import VERSION
from .worker import SimpleWorker, Worker
diff --git a/rq/cli/__init__.py b/rq/cli/__init__.py
index 821f9d7..ec850b8 100644
--- a/rq/cli/__init__.py
+++ b/rq/cli/__init__.py
@@ -1,4 +1,4 @@
-# flake8: noqa
+# ruff: noqa: F401 I001
from .cli import main
# TODO: the following imports can be removed when we drop the `rqinfo` and
diff --git a/rq/cli/cli.py b/rq/cli/cli.py
index bccde97..eb18293 100755
--- a/rq/cli/cli.py
+++ b/rq/cli/cli.py
@@ -5,45 +5,47 @@ RQ command line tool
import os
import sys
import warnings
-
from typing import List, Type
import click
from redis.exceptions import ConnectionError
-from rq import Connection, Retry, __version__ as version
+from rq import Connection, Retry
+from rq import __version__ as version
from rq.cli.helpers import (
+ parse_function_args,
+ parse_schedule,
+ pass_cli_config,
read_config_file,
refresh,
setup_loghandlers_from_args,
show_both,
show_queues,
show_workers,
- parse_function_args,
- parse_schedule,
- pass_cli_config,
)
# from rq.cli.pool import pool
from rq.contrib.legacy import cleanup_ghosts
from rq.defaults import (
- DEFAULT_RESULT_TTL,
- DEFAULT_WORKER_TTL,
DEFAULT_JOB_MONITORING_INTERVAL,
- DEFAULT_LOGGING_FORMAT,
DEFAULT_LOGGING_DATE_FORMAT,
+ DEFAULT_LOGGING_FORMAT,
DEFAULT_MAINTENANCE_TASK_INTERVAL,
+ DEFAULT_RESULT_TTL,
+ DEFAULT_WORKER_TTL,
)
from rq.exceptions import InvalidJobOperationError
from rq.job import Job, JobStatus
from rq.logutils import blue
from rq.registry import FailedJobRegistry, clean_registries
from rq.serializers import DefaultSerializer
-from rq.suspension import suspend as connection_suspend, resume as connection_resume, is_suspended
+from rq.suspension import is_suspended
+from rq.suspension import resume as connection_resume
+from rq.suspension import suspend as connection_suspend
+from rq.utils import get_call_string, import_attribute
from rq.worker import Worker
from rq.worker_pool import WorkerPool
from rq.worker_registration import clean_worker_registry
-from rq.utils import import_attribute, get_call_string
@click.group()
diff --git a/rq/cli/helpers.py b/rq/cli/helpers.py
index bea2c37..e585ca7 100644
--- a/rq/cli/helpers.py
+++ b/rq/cli/helpers.py
@@ -1,14 +1,12 @@
-import sys
import importlib
-import time
import os
-
-from functools import partial, update_wrapper
-from enum import Enum
-
-from datetime import datetime, timezone, timedelta
-from json import loads, JSONDecodeError
+import sys
+import time
from ast import literal_eval
+from datetime import datetime, timedelta, timezone
+from enum import Enum
+from functools import partial, update_wrapper
+from json import JSONDecodeError, loads
from shutil import get_terminal_size
import click
@@ -20,8 +18,8 @@ from rq.defaults import (
DEFAULT_DEATH_PENALTY_CLASS,
DEFAULT_JOB_CLASS,
DEFAULT_QUEUE_CLASS,
- DEFAULT_WORKER_CLASS,
DEFAULT_SERIALIZER_CLASS,
+ DEFAULT_WORKER_CLASS,
)
from rq.logutils import setup_loghandlers
from rq.utils import import_attribute, parse_timeout
diff --git a/rq/command.py b/rq/command.py
index 4566ec0..0488d68 100644
--- a/rq/command.py
+++ b/rq/command.py
@@ -1,17 +1,16 @@
import json
import os
import signal
-
-from typing import TYPE_CHECKING, Dict, Any
+from typing import TYPE_CHECKING, Any, Dict
if TYPE_CHECKING:
from redis import Redis
+
from .worker import Worker
from rq.exceptions import InvalidJobOperation
from rq.job import Job
-
PUBSUB_CHANNEL_TEMPLATE = 'rq:pubsub:%s'
diff --git a/rq/connections.py b/rq/connections.py
index 02b50e3..5d10ea4 100644
--- a/rq/connections.py
+++ b/rq/connections.py
@@ -2,7 +2,8 @@ import warnings
from contextlib import contextmanager
from typing import Optional, Tuple, Type
-from redis import Connection as RedisConnection, Redis
+from redis import Connection as RedisConnection
+from redis import Redis
from .local import LocalStack
diff --git a/rq/contrib/legacy.py b/rq/contrib/legacy.py
index 33ecf18..be44b65 100644
--- a/rq/contrib/legacy.py
+++ b/rq/contrib/legacy.py
@@ -1,7 +1,6 @@
import logging
-from rq import get_current_connection
-from rq import Worker
+from rq import Worker, get_current_connection
logger = logging.getLogger(__name__)
diff --git a/rq/decorators.py b/rq/decorators.py
index 2bf46e8..a24101e 100644
--- a/rq/decorators.py
+++ b/rq/decorators.py
@@ -1,8 +1,9 @@
from functools import wraps
-from typing import TYPE_CHECKING, Callable, Dict, Optional, List, Any, Union
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
if TYPE_CHECKING:
from redis import Redis
+
from .job import Retry
from .defaults import DEFAULT_RESULT_TTL
diff --git a/rq/defaults.py b/rq/defaults.py
index 3744c12..0cea711 100644
--- a/rq/defaults.py
+++ b/rq/defaults.py
@@ -99,4 +99,4 @@ Defaults to the `UnixSignalDeathPenalty` class within the `rq.timeouts` module
UNSERIALIZABLE_RETURN_VALUE_PAYLOAD = 'Unserializable return value'
""" The value that we store in the job's _result property or in the Result's return_value
in case the return value of the actual job is not serializable
-""" \ No newline at end of file
+"""
diff --git a/rq/job.py b/rq/job.py
index b4ee6a4..7e7e964 100644
--- a/rq/job.py
+++ b/rq/job.py
@@ -1,25 +1,26 @@
+import asyncio
import inspect
import json
import logging
import warnings
import zlib
-import asyncio
-
from datetime import datetime, timedelta, timezone
from enum import Enum
-from redis import WatchError
-from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple, Union, Type
+from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, Union
from uuid import uuid4
+from redis import WatchError
+
from .defaults import CALLBACK_TIMEOUT, UNSERIALIZABLE_RETURN_VALUE_PAYLOAD
-from .timeouts import JobTimeoutException, BaseDeathPenalty
+from .timeouts import BaseDeathPenalty, JobTimeoutException
if TYPE_CHECKING:
- from .results import Result
- from .queue import Queue
from redis import Redis
from redis.client import Pipeline
+ from .queue import Queue
+ from .results import Result
+
from .connections import resolve_connection
from .exceptions import DeserializationError, InvalidJobOperation, NoSuchJobError
from .local import LocalStack
@@ -167,8 +168,8 @@ class Job:
func (FunctionReference): The function/method/callable for the Job. This can be
a reference to a concrete callable or a string representing the path of function/method to be
imported. Effectively this is the only required attribute when creating a new Job.
- args (Union[List[Any], Optional[Tuple]], optional): A Tuple / List of positional arguments to pass the callable.
- Defaults to None, meaning no args being passed.
+ args (Union[List[Any], Optional[Tuple]], optional): A Tuple / List of positional arguments to pass the
+ callable. Defaults to None, meaning no args being passed.
kwargs (Optional[Dict], optional): A Dictionary of keyword arguments to pass the callable.
Defaults to None, meaning no kwargs being passed.
connection (Optional[Redis], optional): The Redis connection to use. Defaults to None.
@@ -179,13 +180,16 @@ class Job:
status (JobStatus, optional): The Job Status. Defaults to None.
description (Optional[str], optional): The Job Description. Defaults to None.
depends_on (Union['Dependency', List[Union['Dependency', 'Job']]], optional): What the jobs depends on.
- This accepts a variaty of different arguments including a `Dependency`, a list of `Dependency` or a `Job`
- list of `Job`. Defaults to None.
- timeout (Optional[int], optional): The amount of time in seconds that should be a hardlimit for a job execution. Defaults to None.
+ This accepts a variaty of different arguments including a `Dependency`, a list of `Dependency` or a
+ `Job` list of `Job`. Defaults to None.
+ timeout (Optional[int], optional): The amount of time in seconds that should be a hardlimit for a job
+ execution. Defaults to None.
id (Optional[str], optional): An Optional ID (str) for the Job. Defaults to None.
origin (Optional[str], optional): The queue of origin. Defaults to None.
- meta (Optional[Dict[str, Any]], optional): Custom metadata about the job, takes a dictioanry. Defaults to None.
- failure_ttl (Optional[int], optional): THe time to live in seconds for failed-jobs information. Defaults to None.
+ meta (Optional[Dict[str, Any]], optional): Custom metadata about the job, takes a dictioanry.
+ Defaults to None.
+ failure_ttl (Optional[int], optional): THe time to live in seconds for failed-jobs information.
+ Defaults to None.
serializer (Optional[str], optional): The serializer class path to use. Should be a string with the import
path for the serializer to use. eg. `mymodule.myfile.MySerializer` Defaults to None.
on_success (Optional[Callable[..., Any]], optional): A callback function, should be a callable to run
@@ -1081,8 +1085,8 @@ class Job:
"""
if self.is_canceled:
raise InvalidJobOperation("Cannot cancel already canceled job: {}".format(self.get_id()))
- from .registry import CanceledJobRegistry
from .queue import Queue
+ from .registry import CanceledJobRegistry
pipe = pipeline or self.connection.pipeline()
diff --git a/rq/local.py b/rq/local.py
index e6b070b..2fe22c9 100644
--- a/rq/local.py
+++ b/rq/local.py
@@ -1,4 +1,4 @@
-# flake8: noqa
+# ruff: noqa: E731
"""
werkzeug.local
~~~~~~~~~~~~~~
@@ -13,14 +13,14 @@
# current thread ident.
try:
from greenlet import getcurrent as get_ident
-except ImportError: # noqa
+except ImportError:
try:
- from threading import get_ident # noqa
- except ImportError: # noqa
+ from threading import get_ident
+ except ImportError:
try:
- from _thread import get_ident # noqa
- except ImportError: # noqa
- from dummy_thread import get_ident # noqa
+ from _thread import get_ident
+ except ImportError:
+ from dummy_thread import get_ident
def release_local(local):
@@ -120,7 +120,7 @@ class LocalStack:
def _get__ident_func__(self):
return self._local.__ident_func__
- def _set__ident_func__(self, value): # noqa
+ def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
@@ -348,7 +348,6 @@ class LocalProxy:
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
- __long__ = lambda x: long(x._get_current_object())
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
diff --git a/rq/logutils.py b/rq/logutils.py
index b36ece8..9a1c6c5 100644
--- a/rq/logutils.py
+++ b/rq/logutils.py
@@ -2,7 +2,7 @@ import logging
import sys
from typing import Union
-from rq.defaults import DEFAULT_LOGGING_FORMAT, DEFAULT_LOGGING_DATE_FORMAT
+from rq.defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT
class _Colorizer:
@@ -24,12 +24,12 @@ class _Colorizer:
light_colors = ["darkgray", "red", "green", "yellow", "blue", "fuchsia", "turquoise", "white"]
x = 30
- for d, l in zip(dark_colors, light_colors):
- self.codes[d] = esc + "%im" % x
- self.codes[l] = esc + "%i;01m" % x
+ for dark, light in zip(dark_colors, light_colors):
+ self.codes[dark] = esc + "%im" % x
+ self.codes[light] = esc + "%i;01m" % x
x += 1
- del d, l, x
+ del dark, light, x
self.codes["darkteal"] = self.codes["turquoise"]
self.codes["darkyellow"] = self.codes["brown"]
@@ -117,7 +117,8 @@ def setup_loghandlers(
level (Union[int, str, None], optional): The log level.
Access an integer level (10-50) or a string level ("info", "debug" etc). Defaults to None.
date_format (str, optional): The date format to use. Defaults to DEFAULT_LOGGING_DATE_FORMAT ('%H:%M:%S').
- log_format (str, optional): The log format to use. Defaults to DEFAULT_LOGGING_FORMAT ('%(asctime)s %(message)s').
+ log_format (str, optional): The log format to use.
+ Defaults to DEFAULT_LOGGING_FORMAT ('%(asctime)s %(message)s').
name (str, optional): The looger name. Defaults to 'rq.worker'.
"""
logger = logging.getLogger(name)
diff --git a/rq/queue.py b/rq/queue.py
index f7a0c87..2d74ddf 100644
--- a/rq/queue.py
+++ b/rq/queue.py
@@ -4,9 +4,9 @@ import traceback
import uuid
import warnings
from collections import namedtuple
-from datetime import datetime, timezone, timedelta
+from datetime import datetime, timedelta, timezone
from functools import total_ordering
-from typing import TYPE_CHECKING, Dict, List, Any, Callable, Optional, Tuple, Type, Union
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, Union
from redis import WatchError
@@ -15,18 +15,17 @@ from .timeouts import BaseDeathPenalty, UnixSignalDeathPenalty
if TYPE_CHECKING:
from redis import Redis
from redis.client import Pipeline
+
from .job import Retry
-from .utils import as_text
from .connections import resolve_connection
from .defaults import DEFAULT_RESULT_TTL
from .exceptions import DequeueTimeout, NoSuchJobError
from .job import Job, JobStatus
from .logutils import blue, green
-from .types import FunctionReferenceType, JobDependencyType
from .serializers import resolve_serializer
-from .utils import backend_class, get_version, import_attribute, parse_timeout, utcnow, compact
-
+from .types import FunctionReferenceType, JobDependencyType
+from .utils import as_text, backend_class, compact, get_version, import_attribute, parse_timeout, utcnow
logger = logging.getLogger("rq.queue")
@@ -158,9 +157,11 @@ class Queue:
connection (Optional[Redis], optional): Redis connection. Defaults to None.
is_async (bool, optional): Whether jobs should run "async" (using the worker).
If `is_async` is false, jobs will run on the same process from where it was called. Defaults to True.
- job_class (Union[str, 'Job', optional): Job class or a string referencing the Job class path. Defaults to None.
+ job_class (Union[str, 'Job', optional): Job class or a string referencing the Job class path.
+ Defaults to None.
serializer (Any, optional): Serializer. Defaults to None.
- death_penalty_class (Type[BaseDeathPenalty, optional): Job class or a string referencing the Job class path. Defaults to UnixSignalDeathPenalty.
+ death_penalty_class (Type[BaseDeathPenalty, optional): Job class or a string referencing the Job class path.
+ Defaults to UnixSignalDeathPenalty.
"""
self.connection = connection or resolve_connection()
prefix = self.redis_queue_namespace_prefix
diff --git a/rq/registry.py b/rq/registry.py
index acd6bd7..b955d6b 100644
--- a/rq/registry.py
+++ b/rq/registry.py
@@ -1,26 +1,24 @@
import calendar
import logging
-import traceback
-
-from rq.serializers import resolve_serializer
import time
+import traceback
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, Any, List, Optional, Type, Union
-from .timeouts import UnixSignalDeathPenalty, BaseDeathPenalty
+from rq.serializers import resolve_serializer
+
+from .timeouts import BaseDeathPenalty, UnixSignalDeathPenalty
if TYPE_CHECKING:
from redis import Redis
from redis.client import Pipeline
-from .utils import as_text
from .connections import resolve_connection
from .defaults import DEFAULT_FAILURE_TTL
-from .exceptions import InvalidJobOperation, NoSuchJobError, AbandonedJobError
+from .exceptions import AbandonedJobError, InvalidJobOperation, NoSuchJobError
from .job import Job, JobStatus
from .queue import Queue
-from .utils import backend_class, current_timestamp
-
+from .utils import as_text, backend_class, current_timestamp
logger = logging.getLogger("rq.registry")
@@ -237,8 +235,9 @@ class StartedJobRegistry(BaseRegistry):
except NoSuchJobError:
continue
- job.execute_failure_callback(self.death_penalty_class, AbandonedJobError, AbandonedJobError(),
- traceback.extract_stack())
+ job.execute_failure_callback(
+ self.death_penalty_class, AbandonedJobError, AbandonedJobError(), traceback.extract_stack()
+ )
retry = job.retries_left and job.retries_left > 0
@@ -248,8 +247,10 @@ class StartedJobRegistry(BaseRegistry):
else:
exc_string = f"due to {AbandonedJobError.__name__}"
- logger.warning(f'{self.__class__.__name__} cleanup: Moving job to {FailedJobRegistry.__name__} '
- f'({exc_string})')
+ logger.warning(
+ f'{self.__class__.__name__} cleanup: Moving job to {FailedJobRegistry.__name__} '
+ f'({exc_string})'
+ )
job.set_status(JobStatus.FAILED)
job._exc_info = f"Moved to {FailedJobRegistry.__name__}, {exc_string}, at {datetime.now()}"
job.save(pipeline=pipeline, include_meta=False)
diff --git a/rq/results.py b/rq/results.py
index fdbb763..27bab15 100644
--- a/rq/results.py
+++ b/rq/results.py
@@ -1,16 +1,15 @@
-from typing import Any, Optional
import zlib
-
from base64 import b64decode, b64encode
from datetime import datetime, timezone
from enum import Enum
+from typing import Any, Optional
+
from redis import Redis
from .defaults import UNSERIALIZABLE_RETURN_VALUE_PAYLOAD
-from .utils import decode_redis_hash
from .job import Job
from .serializers import resolve_serializer
-from .utils import now
+from .utils import decode_redis_hash, now
def get_key(job_id):
diff --git a/rq/scheduler.py b/rq/scheduler.py
index a64b400..97d627c 100644
--- a/rq/scheduler.py
+++ b/rq/scheduler.py
@@ -8,7 +8,7 @@ from enum import Enum
from multiprocessing import Process
from typing import List, Set
-from redis import ConnectionPool, Redis, SSLConnection, UnixDomainSocketConnection
+from redis import ConnectionPool, Redis
from .connections import parse_connection
from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT, DEFAULT_SCHEDULER_FALLBACK_PERIOD
diff --git a/rq/serializers.py b/rq/serializers.py
index 96de3f5..94eddbf 100644
--- a/rq/serializers.py
+++ b/rq/serializers.py
@@ -1,6 +1,6 @@
-from functools import partial
-import pickle
import json
+import pickle
+from functools import partial
from typing import Optional, Type, Union
from .utils import import_attribute
diff --git a/rq/suspension.py b/rq/suspension.py
index 77df9b8..10af5ba 100644
--- a/rq/suspension.py
+++ b/rq/suspension.py
@@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from redis import Redis
+
from rq.worker import Worker
diff --git a/rq/utils.py b/rq/utils.py
index db483ab..5e61983 100644
--- a/rq/utils.py
+++ b/rq/utils.py
@@ -7,16 +7,16 @@ terminal colorizing code, originally by Georg Brandl.
import calendar
import datetime
+import datetime as dt
import importlib
import logging
import numbers
-import sys
-import datetime as dt
from collections.abc import Iterable
-from typing import TYPE_CHECKING, Dict, List, Optional, Any, Callable, Tuple, Union
+from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
if TYPE_CHECKING:
from redis import Redis
+
from .queue import Queue
from redis.exceptions import ResponseError
diff --git a/rq/worker.py b/rq/worker.py
index ade789b..062b1b4 100644
--- a/rq/worker.py
+++ b/rq/worker.py
@@ -13,8 +13,8 @@ import warnings
from datetime import datetime, timedelta
from enum import Enum
from random import shuffle
-from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Type, Union
from types import FrameType
+from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Type, Union
from uuid import uuid4
if TYPE_CHECKING:
@@ -35,19 +35,17 @@ from contextlib import suppress
import redis.exceptions
from . import worker_registration
-from .command import parse_payload, PUBSUB_CHANNEL_TEMPLATE, handle_command
-from .connections import get_current_connection, push_connection, pop_connection
-
+from .command import PUBSUB_CHANNEL_TEMPLATE, handle_command, parse_payload
+from .connections import get_current_connection, pop_connection, push_connection
from .defaults import (
+ DEFAULT_JOB_MONITORING_INTERVAL,
+ DEFAULT_LOGGING_DATE_FORMAT,
+ DEFAULT_LOGGING_FORMAT,
DEFAULT_MAINTENANCE_TASK_INTERVAL,
DEFAULT_RESULT_TTL,
DEFAULT_WORKER_TTL,
- DEFAULT_JOB_MONITORING_INTERVAL,
- DEFAULT_LOGGING_FORMAT,
- DEFAULT_LOGGING_DATE_FORMAT,
)
-from .exceptions import DeserializationError, DequeueTimeout, ShutDownImminentException
-
+from .exceptions import DequeueTimeout, DeserializationError, ShutDownImminentException
from .job import Job, JobStatus
from .logutils import blue, green, setup_loghandlers, yellow
from .queue import Queue
@@ -55,20 +53,19 @@ from .registry import StartedJobRegistry, clean_registries
from .scheduler import RQScheduler
from .serializers import resolve_serializer
from .suspension import is_suspended
-from .timeouts import JobTimeoutException, HorseMonitorTimeoutException, UnixSignalDeathPenalty
+from .timeouts import HorseMonitorTimeoutException, JobTimeoutException, UnixSignalDeathPenalty
from .utils import (
+ as_text,
backend_class,
+ compact,
ensure_list,
get_version,
utcformat,
utcnow,
utcparse,
- compact,
- as_text,
)
from .version import VERSION
-
try:
from setproctitle import setproctitle as setprocname
except ImportError:
@@ -373,7 +370,8 @@ class BaseWorker:
max_jobs (Optional[int], optional): Max number of jobs. Defaults to None.
max_idle_time (Optional[int], optional): Max seconds for worker to be idle. Defaults to None.
with_scheduler (bool, optional): Whether to run the scheduler in a separate process. Defaults to False.
- dequeue_strategy (DequeueStrategy, optional): Which strategy to use to dequeue jobs. Defaults to DequeueStrategy.DEFAULT
+ dequeue_strategy (DequeueStrategy, optional): Which strategy to use to dequeue jobs.
+ Defaults to DequeueStrategy.DEFAULT
Returns:
worked (bool): Will return True if any job was processed, False otherwise.
diff --git a/rq/worker_pool.py b/rq/worker_pool.py
index 005c3b9..b161cc8 100644
--- a/rq/worker_pool.py
+++ b/rq/worker_pool.py
@@ -4,17 +4,14 @@ import logging
import os
import signal
import time
-
from enum import Enum
from multiprocessing import Process
-from typing import Dict, List, NamedTuple, Optional, Set, Type, Union
+from typing import Dict, List, NamedTuple, Optional, Type, Union
from uuid import uuid4
-from redis import Redis
-from redis import ConnectionPool
-from rq.serializers import DefaultSerializer
+from redis import ConnectionPool, Redis
-from rq.timeouts import HorseMonitorTimeoutException, UnixSignalDeathPenalty
+from rq.serializers import DefaultSerializer
from .connections import parse_connection
from .defaults import DEFAULT_LOGGING_DATE_FORMAT, DEFAULT_LOGGING_FORMAT
diff --git a/rq/worker_registration.py b/rq/worker_registration.py
index fe4dc04..838c63f 100644
--- a/rq/worker_registration.py
+++ b/rq/worker_registration.py
@@ -1,15 +1,16 @@
-from typing import Optional, TYPE_CHECKING, Any, Set
+from typing import TYPE_CHECKING, Optional, Set
if TYPE_CHECKING:
from redis import Redis
from redis.client import Pipeline
- from .worker import Worker
- from .queue import Queue
-from .utils import as_text
+ from .queue import Queue
+ from .worker import Worker
from rq.utils import split_list
+from .utils import as_text
+
WORKERS_BY_QUEUE_KEY = 'rq:workers:%s'
REDIS_WORKER_KEYS = 'rq:workers'
MAX_KEYS = 1000