summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorUlysses Souza <ulysses.souza@docker.com>2021-07-08 11:29:25 -0300
committerGitHub <noreply@github.com>2021-07-08 11:29:25 -0300
commita9748a8b702a3c75b46ba8c8d0490e4b8ec5ab04 (patch)
treebeb70d3e8d90200aa77ef2ffa88dd5ce29a19f89
parent650aad3a5fb84059b392ad450f760ed08143ae3f (diff)
parent5fcc293ba268a89ea1535114d36fbdcb73ec3d88 (diff)
downloaddocker-py-a9748a8b702a3c75b46ba8c8d0490e4b8ec5ab04.tar.gz
Merge pull request #2863 from asottile/py36_plus
use python3.6+ constructs
-rw-r--r--docker/api/build.py12
-rw-r--r--docker/api/client.py20
-rw-r--r--docker/api/config.py7
-rw-r--r--docker/api/container.py8
-rw-r--r--docker/api/daemon.py2
-rw-r--r--docker/api/exec_api.py6
-rw-r--r--docker/api/image.py10
-rw-r--r--docker/api/network.py2
-rw-r--r--docker/api/plugin.py6
-rw-r--r--docker/api/secret.py7
-rw-r--r--docker/api/service.py4
-rw-r--r--docker/api/swarm.py4
-rw-r--r--docker/api/volume.py2
-rw-r--r--docker/auth.py38
-rw-r--r--docker/client.py4
-rw-r--r--docker/constants.py2
-rw-r--r--docker/context/api.py6
-rw-r--r--docker/context/config.py4
-rw-r--r--docker/context/context.py4
-rw-r--r--docker/credentials/store.py27
-rw-r--r--docker/errors.py26
-rw-r--r--docker/models/configs.py2
-rw-r--r--docker/models/images.py14
-rw-r--r--docker/models/plugins.py5
-rw-r--r--docker/models/resource.py9
-rw-r--r--docker/models/secrets.py2
-rw-r--r--docker/models/swarm.py2
-rw-r--r--docker/tls.py2
-rw-r--r--docker/transport/basehttpadapter.py2
-rw-r--r--docker/transport/npipeconn.py17
-rw-r--r--docker/transport/npipesocket.py8
-rw-r--r--docker/transport/sshconn.py27
-rw-r--r--docker/transport/ssladapter.py4
-rw-r--r--docker/transport/unixconn.py26
-rw-r--r--docker/types/base.py5
-rw-r--r--docker/types/containers.py42
-rw-r--r--docker/types/daemon.py4
-rw-r--r--docker/types/healthcheck.py8
-rw-r--r--docker/types/services.py20
-rw-r--r--docker/utils/build.py25
-rw-r--r--docker/utils/config.py6
-rw-r--r--docker/utils/decorators.py2
-rw-r--r--docker/utils/fnmatch.py2
-rw-r--r--docker/utils/json_stream.py13
-rw-r--r--docker/utils/ports.py2
-rw-r--r--docker/utils/socket.py14
-rw-r--r--docker/utils/utils.py32
-rw-r--r--docker/version.py2
-rw-r--r--docs/conf.py19
-rwxr-xr-xscripts/versions.py4
-rw-r--r--setup.py1
-rw-r--r--tests/helpers.py11
-rw-r--r--tests/integration/api_build_test.py19
-rw-r--r--tests/integration/api_client_test.py2
-rw-r--r--tests/integration/api_config_test.py4
-rw-r--r--tests/integration/api_container_test.py42
-rw-r--r--tests/integration/api_exec_test.py2
-rw-r--r--tests/integration/api_image_test.py6
-rw-r--r--tests/integration/api_network_test.py2
-rw-r--r--tests/integration/api_secret_test.py4
-rw-r--r--tests/integration/api_service_test.py32
-rw-r--r--tests/integration/api_swarm_test.py4
-rw-r--r--tests/integration/base.py4
-rw-r--r--tests/integration/conftest.py6
-rw-r--r--tests/integration/credentials/store_test.py7
-rw-r--r--tests/integration/credentials/utils_test.py2
-rw-r--r--tests/integration/models_images_test.py22
-rw-r--r--tests/integration/regression_test.py10
-rw-r--r--tests/ssh/api_build_test.py19
-rw-r--r--tests/ssh/base.py2
-rw-r--r--tests/unit/api_container_test.py27
-rw-r--r--tests/unit/api_exec_test.py10
-rw-r--r--tests/unit/api_image_test.py2
-rw-r--r--tests/unit/api_network_test.py20
-rw-r--r--tests/unit/api_test.py44
-rw-r--r--tests/unit/api_volume_test.py4
-rw-r--r--tests/unit/auth_test.py22
-rw-r--r--tests/unit/client_test.py2
-rw-r--r--tests/unit/dockertypes_test.py4
-rw-r--r--tests/unit/errors_test.py2
-rw-r--r--tests/unit/fake_api.py100
-rw-r--r--tests/unit/fake_api_client.py4
-rw-r--r--tests/unit/models_resources_test.py2
-rw-r--r--tests/unit/models_secrets_test.py2
-rw-r--r--tests/unit/models_services_test.py8
-rw-r--r--tests/unit/ssladapter_test.py38
-rw-r--r--tests/unit/swarm_test.py2
-rw-r--r--tests/unit/utils_build_test.py112
-rw-r--r--tests/unit/utils_config_test.py2
-rw-r--r--tests/unit/utils_json_stream_test.py12
-rw-r--r--tests/unit/utils_proxy_test.py7
-rw-r--r--tests/unit/utils_test.py34
92 files changed, 524 insertions, 658 deletions
diff --git a/docker/api/build.py b/docker/api/build.py
index 365129a..aac43c4 100644
--- a/docker/api/build.py
+++ b/docker/api/build.py
@@ -12,7 +12,7 @@ from .. import utils
log = logging.getLogger(__name__)
-class BuildApiMixin(object):
+class BuildApiMixin:
def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False,
@@ -132,7 +132,7 @@ class BuildApiMixin(object):
for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException(
- 'Invalid container_limits key {0}'.format(key)
+ f'Invalid container_limits key {key}'
)
if custom_context:
@@ -150,7 +150,7 @@ class BuildApiMixin(object):
dockerignore = os.path.join(path, '.dockerignore')
exclude = None
if os.path.exists(dockerignore):
- with open(dockerignore, 'r') as f:
+ with open(dockerignore) as f:
exclude = list(filter(
lambda x: x != '' and x[0] != '#',
[l.strip() for l in f.read().splitlines()]
@@ -313,7 +313,7 @@ class BuildApiMixin(object):
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug(
- 'Sending auth config ({0})'.format(
+ 'Sending auth config ({})'.format(
', '.join(repr(k) for k in auth_data.keys())
)
)
@@ -344,9 +344,9 @@ def process_dockerfile(dockerfile, path):
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later
- with open(abs_dockerfile, 'r') as df:
+ with open(abs_dockerfile) as df:
return (
- '.dockerfile.{0:x}'.format(random.getrandbits(160)),
+ f'.dockerfile.{random.getrandbits(160):x}',
df.read()
)
diff --git a/docker/api/client.py b/docker/api/client.py
index ee9ad9c..f0cb39b 100644
--- a/docker/api/client.py
+++ b/docker/api/client.py
@@ -107,7 +107,7 @@ class APIClient(
user_agent=DEFAULT_USER_AGENT, num_pools=None,
credstore_env=None, use_ssh_client=False,
max_pool_size=DEFAULT_MAX_POOL_SIZE):
- super(APIClient, self).__init__()
+ super().__init__()
if tls and not base_url:
raise TLSParameterError(
@@ -199,7 +199,7 @@ class APIClient(
self._version = version
if not isinstance(self._version, str):
raise DockerException(
- 'Version parameter must be a string or None. Found {0}'.format(
+ 'Version parameter must be a string or None. Found {}'.format(
type(version).__name__
)
)
@@ -219,7 +219,7 @@ class APIClient(
)
except Exception as e:
raise DockerException(
- 'Error while fetching server API version: {0}'.format(e)
+ f'Error while fetching server API version: {e}'
)
def _set_request_timeout(self, kwargs):
@@ -248,7 +248,7 @@ class APIClient(
for arg in args:
if not isinstance(arg, str):
raise ValueError(
- 'Expected a string but found {0} ({1}) '
+ 'Expected a string but found {} ({}) '
'instead'.format(arg, type(arg))
)
@@ -256,11 +256,11 @@ class APIClient(
args = map(quote_f, args)
if kwargs.get('versioned_api', True):
- return '{0}/v{1}{2}'.format(
+ return '{}/v{}{}'.format(
self.base_url, self._version, pathfmt.format(*args)
)
else:
- return '{0}{1}'.format(self.base_url, pathfmt.format(*args))
+ return f'{self.base_url}{pathfmt.format(*args)}'
def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred."""
@@ -341,8 +341,7 @@ class APIClient(
if response.raw._fp.chunked:
if decode:
- for chunk in json_stream(self._stream_helper(response, False)):
- yield chunk
+ yield from json_stream(self._stream_helper(response, False))
else:
reader = response.raw
while not reader.closed:
@@ -398,8 +397,7 @@ class APIClient(
def _stream_raw_result(self, response, chunk_size=1, decode=True):
''' Stream result for TTY-enabled container and raw binary data'''
self._raise_for_status(response)
- for out in response.iter_content(chunk_size, decode):
- yield out
+ yield from response.iter_content(chunk_size, decode)
def _read_from_socket(self, response, stream, tty=True, demux=False):
socket = self._get_raw_response_socket(response)
@@ -477,7 +475,7 @@ class APIClient(
def get_adapter(self, url):
try:
- return super(APIClient, self).get_adapter(url)
+ return super().get_adapter(url)
except requests.exceptions.InvalidSchema as e:
if self._custom_adapter:
return self._custom_adapter
diff --git a/docker/api/config.py b/docker/api/config.py
index 93e5168..8cf74e1 100644
--- a/docker/api/config.py
+++ b/docker/api/config.py
@@ -1,11 +1,9 @@
import base64
-import six
-
from .. import utils
-class ConfigApiMixin(object):
+class ConfigApiMixin:
@utils.minimum_version('1.30')
def create_config(self, name, data, labels=None):
"""
@@ -22,8 +20,7 @@ class ConfigApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
diff --git a/docker/api/container.py b/docker/api/container.py
index 369eba9..83fcd4f 100644
--- a/docker/api/container.py
+++ b/docker/api/container.py
@@ -1,7 +1,5 @@
from datetime import datetime
-import six
-
from .. import errors
from .. import utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
@@ -12,7 +10,7 @@ from ..types import HostConfig
from ..types import NetworkingConfig
-class ContainerApiMixin(object):
+class ContainerApiMixin:
@utils.check_resource('container')
def attach(self, container, stdout=True, stderr=True,
stream=False, logs=False, demux=False):
@@ -408,7 +406,7 @@ class ContainerApiMixin(object):
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
- if isinstance(volumes, six.string_types):
+ if isinstance(volumes, str):
volumes = [volumes, ]
if isinstance(environment, dict):
@@ -790,7 +788,7 @@ class ContainerApiMixin(object):
url = self._url("/containers/{0}/kill", container)
params = {}
if signal is not None:
- if not isinstance(signal, six.string_types):
+ if not isinstance(signal, str):
signal = int(signal)
params['signal'] = signal
res = self._post(url, params=params)
diff --git a/docker/api/daemon.py b/docker/api/daemon.py
index 6b71926..a857213 100644
--- a/docker/api/daemon.py
+++ b/docker/api/daemon.py
@@ -4,7 +4,7 @@ from datetime import datetime
from .. import auth, types, utils
-class DaemonApiMixin(object):
+class DaemonApiMixin:
@utils.minimum_version('1.25')
def df(self):
"""
diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py
index 4c49ac3..496308a 100644
--- a/docker/api/exec_api.py
+++ b/docker/api/exec_api.py
@@ -1,10 +1,8 @@
-import six
-
from .. import errors
from .. import utils
-class ExecApiMixin(object):
+class ExecApiMixin:
@utils.check_resource('container')
def exec_create(self, container, cmd, stdout=True, stderr=True,
stdin=False, tty=False, privileged=False, user='',
@@ -45,7 +43,7 @@ class ExecApiMixin(object):
'Setting environment for exec is not supported in API < 1.25'
)
- if isinstance(cmd, six.string_types):
+ if isinstance(cmd, str):
cmd = utils.split_command(cmd)
if isinstance(environment, dict):
diff --git a/docker/api/image.py b/docker/api/image.py
index 772101f..772d889 100644
--- a/docker/api/image.py
+++ b/docker/api/image.py
@@ -1,15 +1,13 @@
import logging
import os
-import six
-
from .. import auth, errors, utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE
log = logging.getLogger(__name__)
-class ImageApiMixin(object):
+class ImageApiMixin:
@utils.check_resource('image')
def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
@@ -130,7 +128,7 @@ class ImageApiMixin(object):
params = _import_image_params(
repository, tag, image,
- src=(src if isinstance(src, six.string_types) else None),
+ src=(src if isinstance(src, str) else None),
changes=changes
)
headers = {'Content-Type': 'application/tar'}
@@ -139,7 +137,7 @@ class ImageApiMixin(object):
return self._result(
self._post(u, data=None, params=params)
)
- elif isinstance(src, six.string_types): # from file path
+ elif isinstance(src, str): # from file path
with open(src, 'rb') as f:
return self._result(
self._post(
@@ -571,7 +569,7 @@ class ImageApiMixin(object):
def is_file(src):
try:
return (
- isinstance(src, six.string_types) and
+ isinstance(src, str) and
os.path.isfile(src)
)
except TypeError: # a data string will make isfile() raise a TypeError
diff --git a/docker/api/network.py b/docker/api/network.py
index 139c2d1..0b76bf3 100644
--- a/docker/api/network.py
+++ b/docker/api/network.py
@@ -4,7 +4,7 @@ from ..utils import version_lt
from .. import utils
-class NetworkApiMixin(object):
+class NetworkApiMixin:
def networks(self, names=None, ids=None, filters=None):
"""
List networks. Similar to the ``docker network ls`` command.
diff --git a/docker/api/plugin.py b/docker/api/plugin.py
index f6c0b13..57110f1 100644
--- a/docker/api/plugin.py
+++ b/docker/api/plugin.py
@@ -1,9 +1,7 @@
-import six
-
from .. import auth, utils
-class PluginApiMixin(object):
+class PluginApiMixin:
@utils.minimum_version('1.25')
@utils.check_resource('name')
def configure_plugin(self, name, options):
@@ -21,7 +19,7 @@ class PluginApiMixin(object):
url = self._url('/plugins/{0}/set', name)
data = options
if isinstance(data, dict):
- data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)]
+ data = [f'{k}={v}' for k, v in data.items()]
res = self._post_json(url, data=data)
self._raise_for_status(res)
return True
diff --git a/docker/api/secret.py b/docker/api/secret.py
index e57952b..cd440b9 100644
--- a/docker/api/secret.py
+++ b/docker/api/secret.py
@@ -1,12 +1,10 @@
import base64
-import six
-
from .. import errors
from .. import utils
-class SecretApiMixin(object):
+class SecretApiMixin:
@utils.minimum_version('1.25')
def create_secret(self, name, data, labels=None, driver=None):
"""
@@ -25,8 +23,7 @@ class SecretApiMixin(object):
data = data.encode('utf-8')
data = base64.b64encode(data)
- if six.PY3:
- data = data.decode('ascii')
+ data = data.decode('ascii')
body = {
'Data': data,
'Name': name,
diff --git a/docker/api/service.py b/docker/api/service.py
index e9027bf..371f541 100644
--- a/docker/api/service.py
+++ b/docker/api/service.py
@@ -45,7 +45,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
if task_template is not None:
if 'ForceUpdate' in task_template and utils.version_lt(
version, '1.25'):
- raise_version_error('force_update', '1.25')
+ raise_version_error('force_update', '1.25')
if task_template.get('Placement'):
if utils.version_lt(version, '1.30'):
@@ -113,7 +113,7 @@ def _merge_task_template(current, override):
return merged
-class ServiceApiMixin(object):
+class ServiceApiMixin:
@utils.minimum_version('1.24')
def create_service(
self, task_template, name=None, labels=None, mode=None,
diff --git a/docker/api/swarm.py b/docker/api/swarm.py
index 897f08e..2ec1aea 100644
--- a/docker/api/swarm.py
+++ b/docker/api/swarm.py
@@ -1,5 +1,5 @@
import logging
-from six.moves import http_client
+import http.client as http_client
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
from .. import errors
from .. import types
@@ -8,7 +8,7 @@ from .. import utils
log = logging.getLogger(__name__)
-class SwarmApiMixin(object):
+class SwarmApiMixin:
def create_swarm_spec(self, *args, **kwargs):
"""
diff --git a/docker/api/volume.py b/docker/api/volume.py
index 900a608..c6b26fe 100644
--- a/docker/api/volume.py
+++ b/docker/api/volume.py
@@ -2,7 +2,7 @@ from .. import errors
from .. import utils
-class VolumeApiMixin(object):
+class VolumeApiMixin:
def volumes(self, filters=None):
"""
List volumes currently registered by the docker daemon. Similar to the
diff --git a/docker/auth.py b/docker/auth.py
index 6a07ea2..4fa798f 100644
--- a/docker/auth.py
+++ b/docker/auth.py
@@ -2,14 +2,12 @@ import base64
import json
import logging
-import six
-
from . import credentials
from . import errors
from .utils import config
INDEX_NAME = 'docker.io'
-INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME)
+INDEX_URL = f'https://index.{INDEX_NAME}/v1/'
TOKEN_USERNAME = '<token>'
log = logging.getLogger(__name__)
@@ -18,13 +16,13 @@ log = logging.getLogger(__name__)
def resolve_repository_name(repo_name):
if '://' in repo_name:
raise errors.InvalidRepository(
- 'Repository name cannot contain a scheme ({0})'.format(repo_name)
+ f'Repository name cannot contain a scheme ({repo_name})'
)
index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository(
- 'Invalid index name ({0}). Cannot begin or end with a'
+ 'Invalid index name ({}). Cannot begin or end with a'
' hyphen.'.format(index_name)
)
return resolve_index_name(index_name), remote_name
@@ -98,10 +96,10 @@ class AuthConfig(dict):
"""
conf = {}
- for registry, entry in six.iteritems(entries):
+ for registry, entry in entries.items():
if not isinstance(entry, dict):
log.debug(
- 'Config entry for key {0} is not auth config'.format(
+ 'Config entry for key {} is not auth config'.format(
registry
)
)
@@ -111,14 +109,14 @@ class AuthConfig(dict):
# keys is not formatted properly.
if raise_on_error:
raise errors.InvalidConfigFile(
- 'Invalid configuration for registry {0}'.format(
+ 'Invalid configuration for registry {}'.format(
registry
)
)
return {}
if 'identitytoken' in entry:
log.debug(
- 'Found an IdentityToken entry for registry {0}'.format(
+ 'Found an IdentityToken entry for registry {}'.format(
registry
)
)
@@ -132,7 +130,7 @@ class AuthConfig(dict):
# a valid value in the auths config.
# https://github.com/docker/compose/issues/3265
log.debug(
- 'Auth data for {0} is absent. Client might be using a '
+ 'Auth data for {} is absent. Client might be using a '
'credentials store instead.'.format(registry)
)
conf[registry] = {}
@@ -140,7 +138,7 @@ class AuthConfig(dict):
username, password = decode_auth(entry['auth'])
log.debug(
- 'Found entry (registry={0}, username={1})'
+ 'Found entry (registry={}, username={})'
.format(repr(registry), repr(username))
)
@@ -170,7 +168,7 @@ class AuthConfig(dict):
try:
with open(config_file) as f:
config_dict = json.load(f)
- except (IOError, KeyError, ValueError) as e:
+ except (OSError, KeyError, ValueError) as e:
# Likely missing new Docker config file or it's in an
# unknown format, continue to attempt to read old location
# and format.
@@ -230,7 +228,7 @@ class AuthConfig(dict):
store_name = self.get_credential_store(registry)
if store_name is not None:
log.debug(
- 'Using credentials store "{0}"'.format(store_name)
+ f'Using credentials store "{store_name}"'
)
cfg = self._resolve_authconfig_credstore(registry, store_name)
if cfg is not None:
@@ -239,15 +237,15 @@ class AuthConfig(dict):
# Default to the public index server
registry = resolve_index_name(registry) if registry else INDEX_NAME
- log.debug("Looking for auth entry for {0}".format(repr(registry)))
+ log.debug(f"Looking for auth entry for {repr(registry)}")
if registry in self.auths:
- log.debug("Found {0}".format(repr(registry)))
+ log.debug(f"Found {repr(registry)}")
return self.auths[registry]
- for key, conf in six.iteritems(self.auths):
+ for key, conf in self.auths.items():
if resolve_index_name(key) == registry:
- log.debug("Found {0}".format(repr(key)))
+ log.debug(f"Found {repr(key)}")
return conf
log.debug("No entry found")
@@ -258,7 +256,7 @@ class AuthConfig(dict):
# The ecosystem is a little schizophrenic with index.docker.io VS
# docker.io - in that case, it seems the full URL is necessary.
registry = INDEX_URL
- log.debug("Looking for auth entry for {0}".format(repr(registry)))
+ log.debug(f"Looking for auth entry for {repr(registry)}")
store = self._get_store_instance(credstore_name)
try:
data = store.get(registry)
@@ -278,7 +276,7 @@ class AuthConfig(dict):
return None
except credentials.StoreError as e:
raise errors.DockerException(
- 'Credentials store error: {0}'.format(repr(e))
+ f'Credentials store error: {repr(e)}'
)
def _get_store_instance(self, name):
@@ -329,7 +327,7 @@ def convert_to_hostname(url):
def decode_auth(auth):
- if isinstance(auth, six.string_types):
+ if isinstance(auth, str):
auth = auth.encode('ascii')
s = base64.b64decode(auth)
login, pwd = s.split(b':', 1)
diff --git a/docker/client.py b/docker/client.py
index 5add5d7..4dbd846 100644
--- a/docker/client.py
+++ b/docker/client.py
@@ -13,7 +13,7 @@ from .models.volumes import VolumeCollection
from .utils import kwargs_from_env
-class DockerClient(object):
+class DockerClient:
"""
A client for communicating with a Docker server.
@@ -212,7 +212,7 @@ class DockerClient(object):
close.__doc__ = APIClient.close.__doc__
def __getattr__(self, name):
- s = ["'DockerClient' object has no attribute '{}'".format(name)]
+ s = [f"'DockerClient' object has no attribute '{name}'"]
# If a user calls a method on APIClient, they
if hasattr(APIClient, name):
s.append("In Docker SDK for Python 2.0, this method is now on the "
diff --git a/docker/constants.py b/docker/constants.py
index 43fce61..218e491 100644
--- a/docker/constants.py
+++ b/docker/constants.py
@@ -28,7 +28,7 @@ INSECURE_REGISTRY_DEPRECATION_WARNING = \
IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
-DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version)
+DEFAULT_USER_AGENT = f"docker-sdk-python/{version}"
DEFAULT_NUM_POOLS = 25
# The OpenSSH server default value for MaxSessions is 10 which means we can
diff --git a/docker/context/api.py b/docker/context/api.py
index c45115b..380e8c4 100644
--- a/docker/context/api.py
+++ b/docker/context/api.py
@@ -9,7 +9,7 @@ from docker.context.config import write_context_name_to_docker_config
from docker.context import Context
-class ContextAPI(object):
+class ContextAPI:
"""Context API.
Contains methods for context management:
create, list, remove, get, inspect.
@@ -109,7 +109,7 @@ class ContextAPI(object):
if filename == METAFILE:
try:
data = json.load(
- open(os.path.join(dirname, filename), "r"))
+ open(os.path.join(dirname, filename)))
names.append(data["Name"])
except Exception as e:
raise errors.ContextException(
@@ -138,7 +138,7 @@ class ContextAPI(object):
err = write_context_name_to_docker_config(name)
if err:
raise errors.ContextException(
- 'Failed to set current context: {}'.format(err))
+ f'Failed to set current context: {err}')
@classmethod
def remove_context(cls, name):
diff --git a/docker/context/config.py b/docker/context/config.py
index baf54f7..d761aef 100644
--- a/docker/context/config.py
+++ b/docker/context/config.py
@@ -15,7 +15,7 @@ def get_current_context_name():
docker_cfg_path = find_config_file()
if docker_cfg_path:
try:
- with open(docker_cfg_path, "r") as f:
+ with open(docker_cfg_path) as f:
name = json.load(f).get("currentContext", "default")
except Exception:
return "default"
@@ -29,7 +29,7 @@ def write_context_name_to_docker_config(name=None):
config = {}
if docker_cfg_path:
try:
- with open(docker_cfg_path, "r") as f:
+ with open(docker_cfg_path) as f:
config = json.load(f)
except Exception as e:
return e
diff --git a/docker/context/context.py b/docker/context/context.py
index f4aff6b..dbaa01c 100644
--- a/docker/context/context.py
+++ b/docker/context/context.py
@@ -94,7 +94,7 @@ class Context:
try:
with open(meta_file) as f:
metadata = json.load(f)
- except (IOError, KeyError, ValueError) as e:
+ except (OSError, KeyError, ValueError) as e:
# unknown format
raise Exception("""Detected corrupted meta file for
context {} : {}""".format(name, e))
@@ -171,7 +171,7 @@ class Context:
rmtree(self.tls_path)
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
def __str__(self):
return json.dumps(self.__call__(), indent=2)
diff --git a/docker/credentials/store.py b/docker/credentials/store.py
index 0017888..e55976f 100644
--- a/docker/credentials/store.py
+++ b/docker/credentials/store.py
@@ -2,15 +2,13 @@ import errno
import json
import subprocess
-import six
-
from . import constants
from . import errors
from .utils import create_environment_dict
from .utils import find_executable
-class Store(object):
+class Store:
def __init__(self, program, environment=None):
""" Create a store object that acts as an interface to
perform the basic operations for storing, retrieving
@@ -30,7 +28,7 @@ class Store(object):
""" Retrieve credentials for `server`. If no credentials are found,
a `StoreError` will be raised.
"""
- if not isinstance(server, six.binary_type):
+ if not isinstance(server, bytes):
server = server.encode('utf-8')
data = self._execute('get', server)
result = json.loads(data.decode('utf-8'))
@@ -41,7 +39,7 @@ class Store(object):
# raise CredentialsNotFound
if result['Username'] == '' and result['Secret'] == '':
raise errors.CredentialsNotFound(
- 'No matching credentials in {}'.format(self.program)
+ f'No matching credentials in {self.program}'
)
return result
@@ -61,7 +59,7 @@ class Store(object):
""" Erase credentials for `server`. Raises a `StoreError` if an error
occurs.
"""
- if not isinstance(server, six.binary_type):
+ if not isinstance(server, bytes):
server = server.encode('utf-8')
self._execute('erase', server)
@@ -75,20 +73,9 @@ class Store(object):
output = None
env = create_environment_dict(self.environment)
try:
- if six.PY3:
- output = subprocess.check_output(
- [self.exe, subcmd], input=data_input, env=env,
- )
- else:
- process = subprocess.Popen(
- [self.exe, subcmd], stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, env=env,
- )
- output, _ = process.communicate(data_input)
- if process.returncode != 0:
- raise subprocess.CalledProcessError(
- returncode=process.returncode, cmd='', output=output
- )
+ output = subprocess.check_output(
+ [self.exe, subcmd], input=data_input, env=env,
+ )
except subprocess.CalledProcessError as e:
raise errors.process_store_error(e, self.program)
except OSError as e:
diff --git a/docker/errors.py b/docker/errors.py
index ab30a29..ba95256 100644
--- a/docker/errors.py
+++ b/docker/errors.py
@@ -38,25 +38,25 @@ class APIError(requests.exceptions.HTTPError, DockerException):
def __init__(self, message, response=None, explanation=None):
# requests 1.2 supports response as a keyword argument, but
# requests 1.1 doesn't
- super(APIError, self).__init__(message)
+ super().__init__(message)
self.response = response
self.explanation = explanation
def __str__(self):
- message = super(APIError, self).__str__()
+ message = super().__str__()
if self.is_client_error():
- message = '{0} Client Error for {1}: {2}'.format(
+ message = '{} Client Error for {}: {}'.format(
self.response.status_code, self.response.url,
self.response.reason)
elif self.is_server_error():
- message = '{0} Server Error for {1}: {2}'.format(
+ message = '{} Server Error for {}: {}'.format(
self.response.status_code, self.response.url,
self.response.reason)
if self.explanation:
- message = '{0} ("{1}")'.format(message, self.explanation)
+ message = f'{message} ("{self.explanation}")'
return message
@@ -133,11 +133,11 @@ class ContainerError(DockerException):
self.image = image
self.stderr = stderr
- err = ": {}".format(stderr) if stderr is not None else ""
+ err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{}' in image '{}' returned non-zero exit "
"status {}{}").format(command, image, exit_status, err)
- super(ContainerError, self).__init__(msg)
+ super().__init__(msg)
class StreamParseError(RuntimeError):
@@ -147,7 +147,7 @@ class StreamParseError(RuntimeError):
class BuildError(DockerException):
def __init__(self, reason, build_log):
- super(BuildError, self).__init__(reason)
+ super().__init__(reason)
self.msg = reason
self.build_log = build_log
@@ -157,8 +157,8 @@ class ImageLoadError(DockerException):
def create_unexpected_kwargs_error(name, kwargs):
- quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)]
- text = ["{}() ".format(name)]
+ quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)]
+ text = [f"{name}() "]
if len(quoted_kwargs) == 1:
text.append("got an unexpected keyword argument ")
else:
@@ -172,7 +172,7 @@ class MissingContextParameter(DockerException):
self.param = param
def __str__(self):
- return ("missing parameter: {}".format(self.param))
+ return (f"missing parameter: {self.param}")
class ContextAlreadyExists(DockerException):
@@ -180,7 +180,7 @@ class ContextAlreadyExists(DockerException):
self.name = name
def __str__(self):
- return ("context {} already exists".format(self.name))
+ return (f"context {self.name} already exists")
class ContextException(DockerException):
@@ -196,4 +196,4 @@ class ContextNotFound(DockerException):
self.name = name
def __str__(self):
- return ("context '{}' not found".format(self.name))
+ return (f"context '{self.name}' not found")
diff --git a/docker/models/configs.py b/docker/models/configs.py
index 7f23f65..3588c8b 100644
--- a/docker/models/configs.py
+++ b/docker/models/configs.py
@@ -7,7 +7,7 @@ class Config(Model):
id_attribute = 'ID'
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
diff --git a/docker/models/images.py b/docker/models/images.py
index 28cfc93..46f8efe 100644
--- a/docker/models/images.py
+++ b/docker/models/images.py
@@ -2,8 +2,6 @@ import itertools
import re
import warnings
-import six
-
from ..api import APIClient
from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..errors import BuildError, ImageLoadError, InvalidArgument
@@ -17,7 +15,7 @@ class Image(Model):
An image on the server.
"""
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, "', '".join(self.tags))
+ return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags))
@property
def labels(self):
@@ -93,10 +91,10 @@ class Image(Model):
img = self.id
if named:
img = self.tags[0] if self.tags else img
- if isinstance(named, six.string_types):
+ if isinstance(named, str):
if named not in self.tags:
raise InvalidArgument(
- "{} is not a valid tag for this image".format(named)
+ f"{named} is not a valid tag for this image"
)
img = named
@@ -127,7 +125,7 @@ class RegistryData(Model):
Image metadata stored on the registry, including available platforms.
"""
def __init__(self, image_name, *args, **kwargs):
- super(RegistryData, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
self.image_name = image_name
@property
@@ -180,7 +178,7 @@ class RegistryData(Model):
parts = platform.split('/')
if len(parts) > 3 or len(parts) < 1:
raise InvalidArgument(
- '"{0}" is not a valid platform descriptor'.format(platform)
+ f'"{platform}" is not a valid platform descriptor'
)
platform = {'os': parts[0]}
if len(parts) > 2:
@@ -277,7 +275,7 @@ class ImageCollection(Collection):
If neither ``path`` nor ``fileobj`` is specified.
"""
resp = self.client.api.build(**kwargs)
- if isinstance(resp, six.string_types):
+ if isinstance(resp, str):
return self.get(resp)
last_event = None
image_id = None
diff --git a/docker/models/plugins.py b/docker/models/plugins.py
index ae5851c..37ecefb 100644
--- a/docker/models/plugins.py
+++ b/docker/models/plugins.py
@@ -7,7 +7,7 @@ class Plugin(Model):
A plugin on the server.
"""
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
@@ -117,8 +117,7 @@ class Plugin(Model):
if remote is None:
remote = self.name
privileges = self.client.api.plugin_privileges(remote)
- for d in self.client.api.upgrade_plugin(self.name, remote, privileges):
- yield d
+ yield from self.client.api.upgrade_plugin(self.name, remote, privileges)
self.reload()
diff --git a/docker/models/resource.py b/docker/models/resource.py
index ed3900a..dec2349 100644
--- a/docker/models/resource.py
+++ b/docker/models/resource.py
@@ -1,5 +1,4 @@
-
-class Model(object):
+class Model:
"""
A base class for representing a single object on the server.
"""
@@ -18,13 +17,13 @@ class Model(object):
self.attrs = {}
def __repr__(self):
- return "<%s: %s>" % (self.__class__.__name__, self.short_id)
+ return f"<{self.__class__.__name__}: {self.short_id}>"
def __eq__(self, other):
return isinstance(other, self.__class__) and self.id == other.id
def __hash__(self):
- return hash("%s:%s" % (self.__class__.__name__, self.id))
+ return hash(f"{self.__class__.__name__}:{self.id}")
@property
def id(self):
@@ -49,7 +48,7 @@ class Model(object):
self.attrs = new_model.attrs
-class Collection(object):
+class Collection:
"""
A base class for representing all objects of a particular type on the
server.
diff --git a/docker/models/secrets.py b/docker/models/secrets.py
index e2ee88a..da01d44 100644
--- a/docker/models/secrets.py
+++ b/docker/models/secrets.py
@@ -7,7 +7,7 @@ class Secret(Model):
id_attribute = 'ID'
def __repr__(self):
- return "<%s: '%s'>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__}: '{self.name}'>"
@property
def name(self):
diff --git a/docker/models/swarm.py b/docker/models/swarm.py
index 755c17d..b0b1a2e 100644
--- a/docker/models/swarm.py
+++ b/docker/models/swarm.py
@@ -11,7 +11,7 @@ class Swarm(Model):
id_attribute = 'ID'
def __init__(self, *args, **kwargs):
- super(Swarm, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
if self.client:
try:
self.reload()
diff --git a/docker/tls.py b/docker/tls.py
index 1b297ab..067d556 100644
--- a/docker/tls.py
+++ b/docker/tls.py
@@ -5,7 +5,7 @@ from . import errors
from .transport import SSLHTTPAdapter
-class TLSConfig(object):
+class TLSConfig:
"""
TLS configuration.
diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py
index 4d819b6..dfbb193 100644
--- a/docker/transport/basehttpadapter.py
+++ b/docker/transport/basehttpadapter.py
@@ -3,6 +3,6 @@ import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self):
- super(BaseHTTPAdapter, self).close()
+ super().close()
if hasattr(self, 'pools'):
self.pools.clear()
diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py
index 70d8519..df67f21 100644
--- a/docker/transport/npipeconn.py
+++ b/docker/transport/npipeconn.py
@@ -1,14 +1,11 @@
-import six
+import queue
import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
from .npipesocket import NpipeSocket
-if six.PY3:
- import http.client as httplib
-else:
- import httplib
+import http.client as httplib
try:
import requests.packages.urllib3 as urllib3
@@ -18,9 +15,9 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-class NpipeHTTPConnection(httplib.HTTPConnection, object):
+class NpipeHTTPConnection(httplib.HTTPConnection):
def __init__(self, npipe_path, timeout=60):
- super(NpipeHTTPConnection, self).__init__(
+ super().__init__(
'localhost', timeout=timeout
)
self.npipe_path = npipe_path
@@ -35,7 +32,7 @@ class NpipeHTTPConnection(httplib.HTTPConnection, object):
class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, npipe_path, timeout=60, maxsize=10):
- super(NpipeHTTPConnectionPool, self).__init__(
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
self.npipe_path = npipe_path
@@ -57,7 +54,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
- except six.moves.queue.Empty:
+ except queue.Empty:
if self.block:
raise urllib3.exceptions.EmptyPoolError(
self,
@@ -85,7 +82,7 @@ class NpipeHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
- super(NpipeHTTPAdapter, self).__init__()
+ super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py
index 176b5c8..766372a 100644
--- a/docker/transport/npipesocket.py
+++ b/docker/transport/npipesocket.py
@@ -2,7 +2,6 @@ import functools
import time
import io
-import six
import win32file
import win32pipe
@@ -24,7 +23,7 @@ def check_closed(f):
return wrapped
-class NpipeSocket(object):
+class NpipeSocket:
""" Partial implementation of the socket API over windows named pipes.
This implementation is only designed to be used as a client socket,
and server-specific methods (bind, listen, accept...) are not
@@ -128,9 +127,6 @@ class NpipeSocket(object):
@check_closed
def recv_into(self, buf, nbytes=0):
- if six.PY2:
- return self._recv_into_py2(buf, nbytes)
-
readbuf = buf
if not isinstance(buf, memoryview):
readbuf = memoryview(buf)
@@ -195,7 +191,7 @@ class NpipeFileIOBase(io.RawIOBase):
self.sock = npipe_socket
def close(self):
- super(NpipeFileIOBase, self).close()
+ super().close()
self.sock = None
def fileno(self):
diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py
index fb5c6bb..3ca45c4 100644
--- a/docker/transport/sshconn.py
+++ b/docker/transport/sshconn.py
@@ -1,6 +1,7 @@
import paramiko
+import queue
+import urllib.parse
import requests.adapters
-import six
import logging
import os
import signal
@@ -10,10 +11,7 @@ import subprocess
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
-if six.PY3:
- import http.client as httplib
-else:
- import httplib
+import http.client as httplib
try:
import requests.packages.urllib3 as urllib3
@@ -25,7 +23,7 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class SSHSocket(socket.socket):
def __init__(self, host):
- super(SSHSocket, self).__init__(
+ super().__init__(
socket.AF_INET, socket.SOCK_STREAM)
self.host = host
self.port = None
@@ -90,8 +88,7 @@ class SSHSocket(socket.socket):
def makefile(self, mode):
if not self.proc:
self.connect()
- if six.PY3:
- self.proc.stdout.channel = self
+ self.proc.stdout.channel = self
return self.proc.stdout
@@ -103,9 +100,9 @@ class SSHSocket(socket.socket):
self.proc.terminate()
-class SSHConnection(httplib.HTTPConnection, object):
+class SSHConnection(httplib.HTTPConnection):
def __init__(self, ssh_transport=None, timeout=60, host=None):
- super(SSHConnection, self).__init__(
+ super().__init__(
'localhost', timeout=timeout
)
self.ssh_transport = ssh_transport
@@ -129,7 +126,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
scheme = 'ssh'
def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
- super(SSHConnectionPool, self).__init__(
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
self.ssh_transport = None
@@ -152,7 +149,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
- except six.moves.queue.Empty:
+ except queue.Empty:
if self.block:
raise urllib3.exceptions.EmptyPoolError(
self,
@@ -188,12 +185,12 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
- super(SSHHTTPAdapter, self).__init__()
+ super().__init__()
def _create_paramiko_client(self, base_url):
logging.getLogger("paramiko").setLevel(logging.WARNING)
self.ssh_client = paramiko.SSHClient()
- base_url = six.moves.urllib_parse.urlparse(base_url)
+ base_url = urllib.parse.urlparse(base_url)
self.ssh_params = {
"hostname": base_url.hostname,
"port": base_url.port,
@@ -252,6 +249,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
return pool
def close(self):
- super(SSHHTTPAdapter, self).close()
+ super().close()
if self.ssh_client:
self.ssh_client.close()
diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py
index 12de76c..31e3014 100644
--- a/docker/transport/ssladapter.py
+++ b/docker/transport/ssladapter.py
@@ -36,7 +36,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
- super(SSLHTTPAdapter, self).__init__(**kwargs)
+ super().__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
kwargs = {
@@ -59,7 +59,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
But we still need to take care of when there is a proxy poolmanager
"""
- conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs)
+ conn = super().get_connection(*args, **kwargs)
if conn.assert_hostname != self.assert_hostname:
conn.assert_hostname = self.assert_hostname
return conn
diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py
index 3e040c5..adb6f18 100644
--- a/docker/transport/unixconn.py
+++ b/docker/transport/unixconn.py
@@ -1,7 +1,6 @@
-import six
import requests.adapters
import socket
-from six.moves import http_client as httplib
+import http.client as httplib
from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants
@@ -15,21 +14,10 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-class UnixHTTPResponse(httplib.HTTPResponse, object):
- def __init__(self, sock, *args, **kwargs):
- disable_buffering = kwargs.pop('disable_buffering', False)
- if six.PY2:
- # FIXME: We may need to disable buffering on Py3 as well,
- # but there's no clear way to do it at the moment. See:
- # https://github.com/docker/docker-py/issues/1799
- kwargs['buffering'] = not disable_buffering
- super(UnixHTTPResponse, self).__init__(sock, *args, **kwargs)
-
-
-class UnixHTTPConnection(httplib.HTTPConnection, object):
+class UnixHTTPConnection(httplib.HTTPConnection):
def __init__(self, base_url, unix_socket, timeout=60):
- super(UnixHTTPConnection, self).__init__(
+ super().__init__(
'localhost', timeout=timeout
)
self.base_url = base_url
@@ -44,7 +32,7 @@ class UnixHTTPConnection(httplib.HTTPConnection, object):
self.sock = sock
def putheader(self, header, *values):
- super(UnixHTTPConnection, self).putheader(header, *values)
+ super().putheader(header, *values)
if header == 'Connection' and 'Upgrade' in values:
self.disable_buffering = True
@@ -52,12 +40,12 @@ class UnixHTTPConnection(httplib.HTTPConnection, object):
if self.disable_buffering:
kwargs['disable_buffering'] = True
- return UnixHTTPResponse(sock, *args, **kwargs)
+ return httplib.HTTPResponse(sock, *args, **kwargs)
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
- super(UnixHTTPConnectionPool, self).__init__(
+ super().__init__(
'localhost', timeout=timeout, maxsize=maxsize
)
self.base_url = base_url
@@ -89,7 +77,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close()
)
- super(UnixHTTPAdapter, self).__init__()
+ super().__init__()
def get_connection(self, url, proxies=None):
with self.pools.lock:
diff --git a/docker/types/base.py b/docker/types/base.py
index 6891062..8851f1e 100644
--- a/docker/types/base.py
+++ b/docker/types/base.py
@@ -1,7 +1,4 @@
-import six
-
-
class DictType(dict):
def __init__(self, init):
- for k, v in six.iteritems(init):
+ for k, v in init.items():
self[k] = v
diff --git a/docker/types/containers.py b/docker/types/containers.py
index 9fa4656..f1b60b2 100644
--- a/docker/types/containers.py
+++ b/docker/types/containers.py
@@ -1,5 +1,3 @@
-import six
-
from .. import errors
from ..utils.utils import (
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
@@ -10,7 +8,7 @@ from .base import DictType
from .healthcheck import Healthcheck
-class LogConfigTypesEnum(object):
+class LogConfigTypesEnum:
_values = (
'json-file',
'syslog',
@@ -61,7 +59,7 @@ class LogConfig(DictType):
if config and not isinstance(config, dict):
raise ValueError("LogConfig.config must be a dictionary")
- super(LogConfig, self).__init__({
+ super().__init__({
'Type': log_driver_type,
'Config': config
})
@@ -117,13 +115,13 @@ class Ulimit(DictType):
name = kwargs.get('name', kwargs.get('Name'))
soft = kwargs.get('soft', kwargs.get('Soft'))
hard = kwargs.get('hard', kwargs.get('Hard'))
- if not isinstance(name, six.string_types):
+ if not isinstance(name, str):
raise ValueError("Ulimit.name must be a string")
if soft and not isinstance(soft, int):
raise ValueError("Ulimit.soft must be an integer")
if hard and not isinstance(hard, int):
raise ValueError("Ulimit.hard must be an integer")
- super(Ulimit, self).__init__({
+ super().__init__({
'Name': name,
'Soft': soft,
'Hard': hard
@@ -184,7 +182,7 @@ class DeviceRequest(DictType):
if driver is None:
driver = ''
- elif not isinstance(driver, six.string_types):
+ elif not isinstance(driver, str):
raise ValueError('DeviceRequest.driver must be a string')
if count is None:
count = 0
@@ -203,7 +201,7 @@ class DeviceRequest(DictType):
elif not isinstance(options, dict):
raise ValueError('DeviceRequest.options must be a dict')
- super(DeviceRequest, self).__init__({
+ super().__init__({
'Driver': driver,
'Count': count,
'DeviceIDs': device_ids,
@@ -297,7 +295,7 @@ class HostConfig(dict):
self['MemorySwappiness'] = mem_swappiness
if shm_size is not None:
- if isinstance(shm_size, six.string_types):
+ if isinstance(shm_size, str):
shm_size = parse_bytes(shm_size)
self['ShmSize'] = shm_size
@@ -358,7 +356,7 @@ class HostConfig(dict):
self['Devices'] = parse_devices(devices)
if group_add:
- self['GroupAdd'] = [six.text_type(grp) for grp in group_add]
+ self['GroupAdd'] = [str(grp) for grp in group_add]
if dns is not None:
self['Dns'] = dns
@@ -378,11 +376,11 @@ class HostConfig(dict):
if not isinstance(sysctls, dict):
raise host_config_type_error('sysctls', sysctls, 'dict')
self['Sysctls'] = {}
- for k, v in six.iteritems(sysctls):
- self['Sysctls'][k] = six.text_type(v)
+ for k, v in sysctls.items():
+ self['Sysctls'][k] = str(v)
if volumes_from is not None:
- if isinstance(volumes_from, six.string_types):
+ if isinstance(volumes_from, str):
volumes_from = volumes_from.split(',')
self['VolumesFrom'] = volumes_from
@@ -404,7 +402,7 @@ class HostConfig(dict):
if isinstance(lxc_conf, dict):
formatted = []
- for k, v in six.iteritems(lxc_conf):
+ for k, v in lxc_conf.items():
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
@@ -559,7 +557,7 @@ class HostConfig(dict):
self["PidsLimit"] = pids_limit
if isolation:
- if not isinstance(isolation, six.string_types):
+ if not isinstance(isolation, str):
raise host_config_type_error('isolation', isolation, 'string')
if version_lt(version, '1.24'):
raise host_config_version_error('isolation', '1.24')
@@ -609,7 +607,7 @@ class HostConfig(dict):
self['CpuPercent'] = cpu_percent
if nano_cpus:
- if not isinstance(nano_cpus, six.integer_types):
+ if not isinstance(nano_cpus, int):
raise host_config_type_error('nano_cpus', nano_cpus, 'int')
if version_lt(version, '1.25'):
raise host_config_version_error('nano_cpus', '1.25')
@@ -699,17 +697,17 @@ class ContainerConfig(dict):
'version 1.29'
)
- if isinstance(command, six.string_types):
+ if isinstance(command, str):
command = split_command(command)
- if isinstance(entrypoint, six.string_types):
+ if isinstance(entrypoint, str):
entrypoint = split_command(entrypoint)
if isinstance(environment, dict):
environment = format_environment(environment)
if isinstance(labels, list):
- labels = dict((lbl, six.text_type('')) for lbl in labels)
+ labels = {lbl: '' for lbl in labels}
if isinstance(ports, list):
exposed_ports = {}
@@ -720,10 +718,10 @@ class ContainerConfig(dict):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
- exposed_ports['{0}/{1}'.format(port, proto)] = {}
+ exposed_ports[f'{port}/{proto}'] = {}
ports = exposed_ports
- if isinstance(volumes, six.string_types):
+ if isinstance(volumes, str):
volumes = [volumes, ]
if isinstance(volumes, list):
@@ -752,7 +750,7 @@ class ContainerConfig(dict):
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
- 'User': six.text_type(user) if user is not None else None,
+ 'User': str(user) if user is not None else None,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
diff --git a/docker/types/daemon.py b/docker/types/daemon.py
index af3e5bc..10e8101 100644
--- a/docker/types/daemon.py
+++ b/docker/types/daemon.py
@@ -8,7 +8,7 @@ except ImportError:
from ..errors import DockerException
-class CancellableStream(object):
+class CancellableStream:
"""
Stream wrapper for real-time events, logs, etc. from the server.
@@ -32,7 +32,7 @@ class CancellableStream(object):
return next(self._stream)
except urllib3.exceptions.ProtocolError:
raise StopIteration
- except socket.error:
+ except OSError:
raise StopIteration
next = __next__
diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py
index 9815018..dfc88a9 100644
--- a/docker/types/healthcheck.py
+++ b/docker/types/healthcheck.py
@@ -1,7 +1,5 @@
from .base import DictType
-import six
-
class Healthcheck(DictType):
"""
@@ -31,7 +29,7 @@ class Healthcheck(DictType):
"""
def __init__(self, **kwargs):
test = kwargs.get('test', kwargs.get('Test'))
- if isinstance(test, six.string_types):
+ if isinstance(test, str):
test = ["CMD-SHELL", test]
interval = kwargs.get('interval', kwargs.get('Interval'))
@@ -39,7 +37,7 @@ class Healthcheck(DictType):
retries = kwargs.get('retries', kwargs.get('Retries'))
start_period = kwargs.get('start_period', kwargs.get('StartPeriod'))
- super(Healthcheck, self).__init__({
+ super().__init__({
'Test': test,
'Interval': interval,
'Timeout': timeout,
@@ -53,7 +51,7 @@ class Healthcheck(DictType):
@test.setter
def test(self, value):
- if isinstance(value, six.string_types):
+ if isinstance(value, str):
value = ["CMD-SHELL", value]
self['Test'] = value
diff --git a/docker/types/services.py b/docker/types/services.py
index 29498e9..a6dd76e 100644
--- a/docker/types/services.py
+++ b/docker/types/services.py
@@ -1,5 +1,3 @@
-import six
-
from .. import errors
from ..constants import IS_WINDOWS_PLATFORM
from ..utils import (
@@ -121,7 +119,7 @@ class ContainerSpec(dict):
privileges=None, isolation=None, init=None):
self['Image'] = image
- if isinstance(command, six.string_types):
+ if isinstance(command, str):
command = split_command(command)
self['Command'] = command
self['Args'] = args
@@ -151,7 +149,7 @@ class ContainerSpec(dict):
if mounts is not None:
parsed_mounts = []
for mount in mounts:
- if isinstance(mount, six.string_types):
+ if isinstance(mount, str):
parsed_mounts.append(Mount.parse_mount_string(mount))
else:
# If mount already parsed
@@ -224,7 +222,7 @@ class Mount(dict):
self['Source'] = source
if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
raise errors.InvalidArgument(
- 'Unsupported mount type: "{}"'.format(type)
+ f'Unsupported mount type: "{type}"'
)
self['Type'] = type
self['ReadOnly'] = read_only
@@ -260,7 +258,7 @@ class Mount(dict):
elif type == 'tmpfs':
tmpfs_opts = {}
if tmpfs_mode:
- if not isinstance(tmpfs_mode, six.integer_types):
+ if not isinstance(tmpfs_mode, int):
raise errors.InvalidArgument(
'tmpfs_mode must be an integer'
)
@@ -280,7 +278,7 @@ class Mount(dict):
parts = string.split(':')
if len(parts) > 3:
raise errors.InvalidArgument(
- 'Invalid mount format "{0}"'.format(string)
+ f'Invalid mount format "{string}"'
)
if len(parts) == 1:
return cls(target=parts[0], source=None)
@@ -347,7 +345,7 @@ def _convert_generic_resources_dict(generic_resources):
' (found {})'.format(type(generic_resources))
)
resources = []
- for kind, value in six.iteritems(generic_resources):
+ for kind, value in generic_resources.items():
resource_type = None
if isinstance(value, int):
resource_type = 'DiscreteResourceSpec'
@@ -443,7 +441,7 @@ class RollbackConfig(UpdateConfig):
pass
-class RestartConditionTypesEnum(object):
+class RestartConditionTypesEnum:
_values = (
'none',
'on-failure',
@@ -474,7 +472,7 @@ class RestartPolicy(dict):
max_attempts=0, window=0):
if condition not in self.condition_types._values:
raise TypeError(
- 'Invalid RestartPolicy condition {0}'.format(condition)
+ f'Invalid RestartPolicy condition {condition}'
)
self['Condition'] = condition
@@ -533,7 +531,7 @@ def convert_service_ports(ports):
)
result = []
- for k, v in six.iteritems(ports):
+ for k, v in ports.items():
port_spec = {
'Protocol': 'tcp',
'PublishedPort': k
diff --git a/docker/utils/build.py b/docker/utils/build.py
index 5787cab..ac06043 100644
--- a/docker/utils/build.py
+++ b/docker/utils/build.py
@@ -4,8 +4,6 @@ import re
import tarfile
import tempfile
-import six
-
from .fnmatch import fnmatch
from ..constants import IS_WINDOWS_PLATFORM
@@ -69,7 +67,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
if files is None:
files = build_file_list(root)
- extra_names = set(e[0] for e in extra_files)
+ extra_names = {e[0] for e in extra_files}
for path in files:
if path in extra_names:
# Extra files override context files with the same name
@@ -95,9 +93,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
try:
with open(full_path, 'rb') as f:
t.addfile(i, f)
- except IOError:
- raise IOError(
- 'Can not read file in context: {}'.format(full_path)
+ except OSError:
+ raise OSError(
+ f'Can not read file in context: {full_path}'
)
else:
# Directories, FIFOs, symlinks... don't need to be read.
@@ -119,12 +117,8 @@ def mkbuildcontext(dockerfile):
t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile')
- if six.PY3:
- raise TypeError('Please use io.BytesIO to create in-memory '
- 'Dockerfiles with Python 3')
- else:
- dfinfo.size = len(dockerfile.getvalue())
- dockerfile.seek(0)
+ raise TypeError('Please use io.BytesIO to create in-memory '
+ 'Dockerfiles with Python 3')
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
@@ -154,7 +148,7 @@ def walk(root, patterns, default=True):
# Heavily based on
# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go
-class PatternMatcher(object):
+class PatternMatcher:
def __init__(self, patterns):
self.patterns = list(filter(
lambda p: p.dirs, [Pattern(p) for p in patterns]
@@ -212,13 +206,12 @@ class PatternMatcher(object):
break
if skip:
continue
- for sub in rec_walk(cur):
- yield sub
+ yield from rec_walk(cur)
return rec_walk(root)
-class Pattern(object):
+class Pattern:
def __init__(self, pattern_str):
self.exclusion = False
if pattern_str.startswith('!'):
diff --git a/docker/utils/config.py b/docker/utils/config.py
index 82a0e2a..8e24959 100644
--- a/docker/utils/config.py
+++ b/docker/utils/config.py
@@ -18,11 +18,11 @@ def find_config_file(config_path=None):
os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
]))
- log.debug("Trying paths: {0}".format(repr(paths)))
+ log.debug(f"Trying paths: {repr(paths)}")
for path in paths:
if os.path.exists(path):
- log.debug("Found file at path: {0}".format(path))
+ log.debug(f"Found file at path: {path}")
return path
log.debug("No config file found")
@@ -57,7 +57,7 @@ def load_general_config(config_path=None):
try:
with open(config_file) as f:
return json.load(f)
- except (IOError, ValueError) as e:
+ except (OSError, ValueError) as e:
# In the case of a legacy `.dockercfg` file, we won't
# be able to load any JSON data.
log.debug(e)
diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py
index c975d4b..cf1baf4 100644
--- a/docker/utils/decorators.py
+++ b/docker/utils/decorators.py
@@ -27,7 +27,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version):
raise errors.InvalidVersion(
- '{0} is not available for version < {1}'.format(
+ '{} is not available for version < {}'.format(
f.__name__, version
)
)
diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py
index cc940a2..90e9f60 100644
--- a/docker/utils/fnmatch.py
+++ b/docker/utils/fnmatch.py
@@ -108,7 +108,7 @@ def translate(pat):
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
- res = '%s[%s]' % (res, stuff)
+ res = f'{res}[{stuff}]'
else:
res = res + re.escape(c)
diff --git a/docker/utils/json_stream.py b/docker/utils/json_stream.py
index addffdf..f384175 100644
--- a/docker/utils/json_stream.py
+++ b/docker/utils/json_stream.py
@@ -1,11 +1,6 @@
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
import json
import json.decoder
-import six
-
from ..errors import StreamParseError
@@ -20,7 +15,7 @@ def stream_as_text(stream):
instead of byte streams.
"""
for data in stream:
- if not isinstance(data, six.text_type):
+ if not isinstance(data, str):
data = data.decode('utf-8', 'replace')
yield data
@@ -46,8 +41,8 @@ def json_stream(stream):
return split_buffer(stream, json_splitter, json_decoder.decode)
-def line_splitter(buffer, separator=u'\n'):
- index = buffer.find(six.text_type(separator))
+def line_splitter(buffer, separator='\n'):
+ index = buffer.find(str(separator))
if index == -1:
return None
return buffer[:index + 1], buffer[index + 1:]
@@ -61,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
of the input.
"""
splitter = splitter or line_splitter
- buffered = six.text_type('')
+ buffered = ''
for data in stream_as_text(stream):
buffered += data
diff --git a/docker/utils/ports.py b/docker/utils/ports.py
index 10b19d7..e813936 100644
--- a/docker/utils/ports.py
+++ b/docker/utils/ports.py
@@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
if not end:
return [start + proto]
if randomly_available_port:
- return ['{}-{}'.format(start, end) + proto]
+ return [f'{start}-{end}' + proto]
return [str(port) + proto for port in range(int(start), int(end) + 1)]
diff --git a/docker/utils/socket.py b/docker/utils/socket.py
index 7ba9505..4a2076e 100644
--- a/docker/utils/socket.py
+++ b/docker/utils/socket.py
@@ -4,8 +4,6 @@ import select
import socket as pysocket
import struct
-import six
-
try:
from ..transport import NpipeSocket
except ImportError:
@@ -27,16 +25,16 @@ def read(socket, n=4096):
recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK)
- if six.PY3 and not isinstance(socket, NpipeSocket):
+ if not isinstance(socket, NpipeSocket):
select.select([socket], [], [])
try:
if hasattr(socket, 'recv'):
return socket.recv(n)
- if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')):
+ if isinstance(socket, getattr(pysocket, 'SocketIO')):
return socket.read(n)
return os.read(socket.fileno(), n)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in recoverable_errors:
raise
@@ -46,7 +44,7 @@ def read_exactly(socket, n):
Reads exactly n bytes from socket
Raises SocketError if there isn't enough data
"""
- data = six.binary_type()
+ data = bytes()
while len(data) < n:
next_data = read(socket, n - len(data))
if not next_data:
@@ -134,7 +132,7 @@ def consume_socket_output(frames, demux=False):
if demux is False:
# If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate.
- return six.binary_type().join(frames)
+ return bytes().join(frames)
# If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr)
@@ -166,4 +164,4 @@ def demux_adaptor(stream_id, data):
elif stream_id == STDERR:
return (None, data)
else:
- raise ValueError('{0} is not a valid stream'.format(stream_id))
+ raise ValueError(f'{stream_id} is not a valid stream')
diff --git a/docker/utils/utils.py b/docker/utils/utils.py
index f703cbd..f7c3dd7 100644
--- a/docker/utils/utils.py
+++ b/docker/utils/utils.py
@@ -136,13 +136,13 @@ def convert_volume_binds(binds):
mode = 'rw'
result.append(
- str('{0}:{1}:{2}').format(k, bind, mode)
+ f'{k}:{bind}:{mode}'
)
else:
if isinstance(v, bytes):
v = v.decode('utf-8')
result.append(
- str('{0}:{1}:rw').format(k, v)
+ f'{k}:{v}:rw'
)
return result
@@ -233,14 +233,14 @@ def parse_host(addr, is_win32=False, tls=False):
if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
raise errors.DockerException(
- "Invalid bind address protocol: {}".format(addr)
+ f"Invalid bind address protocol: {addr}"
)
if proto == 'tcp' and not parsed_url.netloc:
# "tcp://" is exceptionally disallowed by convention;
# omitting a hostname for other protocols is fine
raise errors.DockerException(
- 'Invalid bind address format: {}'.format(addr)
+ f'Invalid bind address format: {addr}'
)
if any([
@@ -248,7 +248,7 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url.password
]):
raise errors.DockerException(
- 'Invalid bind address format: {}'.format(addr)
+ f'Invalid bind address format: {addr}'
)
if parsed_url.path and proto == 'ssh':
@@ -285,8 +285,8 @@ def parse_host(addr, is_win32=False, tls=False):
proto = 'http+unix'
if proto in ('http+unix', 'npipe'):
- return "{}://{}".format(proto, path).rstrip('/')
- return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/')
+ return f"{proto}://{path}".rstrip('/')
+ return f'{proto}://{host}:{port}{path}'.rstrip('/')
def parse_devices(devices):
@@ -297,7 +297,7 @@ def parse_devices(devices):
continue
if not isinstance(device, str):
raise errors.DockerException(
- 'Invalid device type {0}'.format(type(device))
+ f'Invalid device type {type(device)}'
)
device_mapping = device.split(':')
if device_mapping:
@@ -408,7 +408,7 @@ def parse_bytes(s):
digits = float(digits_part)
except ValueError:
raise errors.DockerException(
- 'Failed converting the string value for memory ({0}) to'
+ 'Failed converting the string value for memory ({}) to'
' an integer.'.format(digits_part)
)
@@ -416,7 +416,7 @@ def parse_bytes(s):
s = int(digits * units[suffix])
else:
raise errors.DockerException(
- 'The specified value for memory ({0}) should specify the'
+ 'The specified value for memory ({}) should specify the'
' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s)
)
@@ -428,7 +428,7 @@ def normalize_links(links):
if isinstance(links, dict):
links = iter(links.items())
- return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)]
+ return [f'{k}:{v}' if v else k for k, v in sorted(links)]
def parse_env_file(env_file):
@@ -438,7 +438,7 @@ def parse_env_file(env_file):
"""
environment = {}
- with open(env_file, 'r') as f:
+ with open(env_file) as f:
for line in f:
if line[0] == '#':
@@ -454,7 +454,7 @@ def parse_env_file(env_file):
environment[k] = v
else:
raise errors.DockerException(
- 'Invalid line in environment file {0}:\n{1}'.format(
+ 'Invalid line in environment file {}:\n{}'.format(
env_file, line))
return environment
@@ -471,7 +471,7 @@ def format_environment(environment):
if isinstance(value, bytes):
value = value.decode('utf-8')
- return u'{key}={value}'.format(key=key, value=value)
+ return f'{key}={value}'
return [format_env(*var) for var in iter(environment.items())]
@@ -479,11 +479,11 @@ def format_extra_hosts(extra_hosts, task=False):
# Use format dictated by Swarm API if container is part of a task
if task:
return [
- '{} {}'.format(v, k) for k, v in sorted(iter(extra_hosts.items()))
+ f'{v} {k}' for k, v in sorted(iter(extra_hosts.items()))
]
return [
- '{}:{}'.format(k, v) for k, v in sorted(iter(extra_hosts.items()))
+ f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items()))
]
diff --git a/docker/version.py b/docker/version.py
index bc09e63..3554104 100644
--- a/docker/version.py
+++ b/docker/version.py
@@ -1,2 +1,2 @@
version = "4.5.0-dev"
-version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
+version_info = tuple(int(d) for d in version.split("-")[0].split("."))
diff --git a/docs/conf.py b/docs/conf.py
index f46d1f7..2b0a719 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
#
# docker-sdk-python documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 14 15:48:58 2016.
@@ -60,21 +59,21 @@ source_suffix = ['.rst', '.md']
master_doc = 'index'
# General information about the project.
-project = u'Docker SDK for Python'
+project = 'Docker SDK for Python'
year = datetime.datetime.now().year
-copyright = u'%d Docker Inc' % year
-author = u'Docker Inc'
+copyright = '%d Docker Inc' % year
+author = 'Docker Inc'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
-with open('../docker/version.py', 'r') as vfile:
+with open('../docker/version.py') as vfile:
exec(vfile.read())
# The full version, including alpha/beta/rc tags.
release = version
# The short X.Y version.
-version = '{}.{}'.format(version_info[0], version_info[1])
+version = f'{version_info[0]}.{version_info[1]}'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -283,8 +282,8 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'docker-sdk-python.tex', u'docker-sdk-python Documentation',
- u'Docker Inc.', 'manual'),
+ (master_doc, 'docker-sdk-python.tex', 'docker-sdk-python Documentation',
+ 'Docker Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -325,7 +324,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation',
+ (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
[author], 1)
]
@@ -340,7 +339,7 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation',
+ (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
author, 'docker-sdk-python', 'One line description of project.',
'Miscellaneous'),
]
diff --git a/scripts/versions.py b/scripts/versions.py
index 4bdcb74..75e5355 100755
--- a/scripts/versions.py
+++ b/scripts/versions.py
@@ -52,8 +52,8 @@ class Version(namedtuple('_Version', 'major minor patch stage edition')):
return (int(self.major), int(self.minor), int(self.patch)) + stage
def __str__(self):
- stage = '-{}'.format(self.stage) if self.stage else ''
- edition = '-{}'.format(self.edition) if self.edition else ''
+ stage = f'-{self.stage}' if self.stage else ''
+ edition = f'-{self.edition}' if self.edition else ''
return '.'.join(map(str, self[:3])) + edition + stage
diff --git a/setup.py b/setup.py
index ec1a51d..a966fea 100644
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python
-from __future__ import print_function
import codecs
import os
diff --git a/tests/helpers.py b/tests/helpers.py
index f344e1c..63cbe2e 100644
--- a/tests/helpers.py
+++ b/tests/helpers.py
@@ -11,7 +11,6 @@ import time
import docker
import paramiko
import pytest
-import six
def make_tree(dirs, files):
@@ -54,7 +53,7 @@ def requires_api_version(version):
return pytest.mark.skipif(
docker.utils.version_lt(test_version, version),
- reason="API version is too low (< {0})".format(version)
+ reason=f"API version is too low (< {version})"
)
@@ -86,7 +85,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
def random_name():
- return u'dockerpytest_{0:x}'.format(random.getrandbits(64))
+ return f'dockerpytest_{random.getrandbits(64):x}'
def force_leave_swarm(client):
@@ -105,11 +104,11 @@ def force_leave_swarm(client):
def swarm_listen_addr():
- return '0.0.0.0:{0}'.format(random.randrange(10000, 25000))
+ return f'0.0.0.0:{random.randrange(10000, 25000)}'
def assert_cat_socket_detached_with_keys(sock, inputs):
- if six.PY3 and hasattr(sock, '_sock'):
+ if hasattr(sock, '_sock'):
sock = sock._sock
for i in inputs:
@@ -128,7 +127,7 @@ def assert_cat_socket_detached_with_keys(sock, inputs):
# of the daemon no longer cause this to raise an error.
try:
sock.sendall(b'make sure the socket is closed\n')
- except socket.error:
+ except OSError:
return
sock.sendall(b"make sure the socket is closed\n")
diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py
index b830a10..ef48e12 100644
--- a/tests/integration/api_build_test.py
+++ b/tests/integration/api_build_test.py
@@ -7,7 +7,6 @@ from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
-import six
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
@@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert len(logs) > 0
def test_build_from_stringio(self):
- if six.PY3:
- return
- script = io.StringIO(six.text_type('\n').join([
+ return
+ script = io.StringIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
@@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest):
stream = self.client.build(fileobj=script)
logs = ''
for chunk in stream:
- if six.PY3:
- chunk = chunk.decode('utf-8')
+ chunk = chunk.decode('utf-8')
logs += chunk
assert logs != ''
@@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
self.client.wait(c)
logs = self.client.logs(c)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt',
@@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file')
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs
@@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest):
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([
b'FROM busybox',
- 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8')
+ f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
]))
stream = self.client.build(
@@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest):
@requires_api_version('1.32')
@requires_experimental(until=None)
def test_build_invalid_platform(self):
- script = io.BytesIO('FROM busybox\n'.encode('ascii'))
+ script = io.BytesIO(b'FROM busybox\n')
with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar')
diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py
index 9e348f3..d1622fa 100644
--- a/tests/integration/api_client_test.py
+++ b/tests/integration/api_client_test.py
@@ -72,6 +72,6 @@ class UnixconnTest(unittest.TestCase):
client.close()
del client
- assert len(w) == 0, "No warnings produced: {0}".format(
+ assert len(w) == 0, "No warnings produced: {}".format(
w[0].message
)
diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py
index 0ffd767..72cbb43 100644
--- a/tests/integration/api_config_test.py
+++ b/tests/integration/api_config_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import docker
import pytest
@@ -31,7 +29,7 @@ class ConfigAPITest(BaseAPIIntegrationTest):
def test_create_config_unicode_data(self):
config_id = self.client.create_config(
- 'favorite_character', u'いざよいさくや'
+ 'favorite_character', 'いざよいさくや'
)
self.tmp_configs.append(config_id)
assert 'ID' in config_id
diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py
index 3087045..9da2cfb 100644
--- a/tests/integration/api_container_test.py
+++ b/tests/integration/api_container_test.py
@@ -34,7 +34,7 @@ class ListContainersTest(BaseAPIIntegrationTest):
assert len(retrieved) == 1
retrieved = retrieved[0]
assert 'Command' in retrieved
- assert retrieved['Command'] == str('true')
+ assert retrieved['Command'] == 'true'
assert 'Image' in retrieved
assert re.search(r'alpine:.*', retrieved['Image'])
assert 'Status' in retrieved
@@ -104,10 +104,10 @@ class CreateContainerTest(BaseAPIIntegrationTest):
assert self.client.wait(container3_id)['StatusCode'] == 0
logs = self.client.logs(container3_id).decode('utf-8')
- assert '{0}_NAME='.format(link_env_prefix1) in logs
- assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs
- assert '{0}_NAME='.format(link_env_prefix2) in logs
- assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs
+ assert f'{link_env_prefix1}_NAME=' in logs
+ assert f'{link_env_prefix1}_ENV_FOO=1' in logs
+ assert f'{link_env_prefix2}_NAME=' in logs
+ assert f'{link_env_prefix2}_ENV_FOO=1' in logs
def test_create_with_restart_policy(self):
container = self.client.create_container(
@@ -487,7 +487,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
)
class VolumeBindTest(BaseAPIIntegrationTest):
def setUp(self):
- super(VolumeBindTest, self).setUp()
+ super().setUp()
self.mount_dest = '/mnt'
@@ -618,7 +618,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_archive_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container(
- TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data),
+ TEST_IMG, f'sh -c "echo {data} > /vol1/data.txt"',
volumes=['/vol1']
)
self.tmp_containers.append(ctnr)
@@ -636,7 +636,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_stat_from_container(self):
data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container(
- TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data),
+ TEST_IMG, f'sh -c "echo -n {data} > /vol1/data.txt"',
volumes=['/vol1']
)
self.tmp_containers.append(ctnr)
@@ -655,7 +655,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
test_file.seek(0)
ctnr = self.client.create_container(
TEST_IMG,
- 'cat {0}'.format(
+ 'cat {}'.format(
os.path.join('/vol1/', os.path.basename(test_file.name))
),
volumes=['/vol1']
@@ -701,7 +701,7 @@ class RenameContainerTest(BaseAPIIntegrationTest):
if version == '1.5.0':
assert name == inspect['Name']
else:
- assert '/{0}'.format(name) == inspect['Name']
+ assert f'/{name}' == inspect['Name']
class StartContainerTest(BaseAPIIntegrationTest):
@@ -807,7 +807,7 @@ class LogsTest(BaseAPIIntegrationTest):
def test_logs(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -821,7 +821,7 @@ class LogsTest(BaseAPIIntegrationTest):
snippet = '''Line1
Line2'''
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -834,7 +834,7 @@ Line2'''
def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -854,7 +854,7 @@ Line2'''
def test_logs_streaming_and_follow_and_cancel(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet)
+ TEST_IMG, f'sh -c "echo \\"{snippet}\\" && sleep 3"'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -872,7 +872,7 @@ Line2'''
def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo {0}'.format(snippet)
+ TEST_IMG, f'echo {snippet}'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -885,7 +885,7 @@ Line2'''
def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
id = container['Id']
self.tmp_containers.append(id)
@@ -899,7 +899,7 @@ Line2'''
def test_logs_with_until(self):
snippet = 'Shanghai Teahouse (Hong Meiling)'
container = self.client.create_container(
- TEST_IMG, 'echo "{0}"'.format(snippet)
+ TEST_IMG, f'echo "{snippet}"'
)
self.tmp_containers.append(container)
@@ -1095,7 +1095,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container)
if not IS_WINDOWS_PLATFORM:
- assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND']
+ assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND']
assert len(res['Processes']) == 1
assert res['Processes'][0][-1] == 'sleep 60'
self.client.kill(container)
@@ -1113,7 +1113,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container)
res = self.client.top(container, '-eopid,user')
- assert res['Titles'] == [u'PID', u'USER']
+ assert res['Titles'] == ['PID', 'USER']
assert len(res['Processes']) == 1
assert res['Processes'][0][10] == 'sleep 60'
@@ -1203,7 +1203,7 @@ class AttachContainerTest(BaseAPIIntegrationTest):
def test_run_container_reading_socket(self):
line = 'hi there and stuff and things, words!'
# `echo` appends CRLF, `printf` doesn't
- command = "printf '{0}'".format(line)
+ command = f"printf '{line}'"
container = self.client.create_container(TEST_IMG, command,
detach=True, tty=False)
self.tmp_containers.append(container)
@@ -1487,7 +1487,7 @@ class LinkTest(BaseAPIIntegrationTest):
# Remove link
linked_name = self.client.inspect_container(container2_id)['Name'][1:]
- link_name = '%s/%s' % (linked_name, link_alias)
+ link_name = f'{linked_name}/{link_alias}'
self.client.remove_container(link_name, link=True)
# Link is gone
diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py
index 554e862..4d7748f 100644
--- a/tests/integration/api_exec_test.py
+++ b/tests/integration/api_exec_test.py
@@ -239,7 +239,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest):
)
def setUp(self):
- super(ExecDemuxTest, self).setUp()
+ super().setUp()
self.container = self.client.create_container(
TEST_IMG, 'cat', detach=True, stdin_open=True
)
diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py
index d5f8989..e30de46 100644
--- a/tests/integration/api_image_test.py
+++ b/tests/integration/api_image_test.py
@@ -265,7 +265,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
output = self.client.load_image(data)
assert any([
line for line in output
- if 'Loaded image: {}'.format(test_img) in line.get('stream', '')
+ if f'Loaded image: {test_img}' in line.get('stream', '')
])
@contextlib.contextmanager
@@ -284,7 +284,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
thread.setDaemon(True)
thread.start()
- yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1])
+ yield f'http://{socket.gethostname()}:{server.server_address[1]}'
server.shutdown()
@@ -350,7 +350,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest):
result = self.client.load_image(f.read())
success = False
- result_line = 'Loaded image: {}\n'.format(TEST_IMG)
+ result_line = f'Loaded image: {TEST_IMG}\n'
for data in result:
print(data)
if 'stream' in data:
diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py
index af22da8..2568138 100644
--- a/tests/integration/api_network_test.py
+++ b/tests/integration/api_network_test.py
@@ -9,7 +9,7 @@ from .base import BaseAPIIntegrationTest, TEST_IMG
class TestNetworks(BaseAPIIntegrationTest):
def tearDown(self):
self.client.leave_swarm(force=True)
- super(TestNetworks, self).tearDown()
+ super().tearDown()
def create_network(self, *args, **kwargs):
net_name = random_name()
diff --git a/tests/integration/api_secret_test.py b/tests/integration/api_secret_test.py
index b3d93b8..fd98543 100644
--- a/tests/integration/api_secret_test.py
+++ b/tests/integration/api_secret_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import docker
import pytest
@@ -31,7 +29,7 @@ class SecretAPITest(BaseAPIIntegrationTest):
def test_create_secret_unicode_data(self):
secret_id = self.client.create_secret(
- 'favorite_character', u'いざよいさくや'
+ 'favorite_character', 'いざよいさくや'
)
self.tmp_secrets.append(secret_id)
assert 'ID' in secret_id
diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py
index 1bee46e..19a6f15 100644
--- a/tests/integration/api_service_test.py
+++ b/tests/integration/api_service_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import random
import time
@@ -30,10 +28,10 @@ class ServiceTest(BaseAPIIntegrationTest):
self.client.remove_service(service['ID'])
except docker.errors.APIError:
pass
- super(ServiceTest, self).tearDown()
+ super().tearDown()
def get_service_name(self):
- return 'dockerpytest_{0:x}'.format(random.getrandbits(64))
+ return f'dockerpytest_{random.getrandbits(64):x}'
def get_service_container(self, service_name, attempts=20, interval=0.5,
include_stopped=False):
@@ -54,7 +52,7 @@ class ServiceTest(BaseAPIIntegrationTest):
def create_simple_service(self, name=None, labels=None):
if name:
- name = 'dockerpytest_{0}'.format(name)
+ name = f'dockerpytest_{name}'
else:
name = self.get_service_name()
@@ -403,20 +401,20 @@ class ServiceTest(BaseAPIIntegrationTest):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate(
- container_spec, placement=['node.id=={}'.format(node_id)]
+ container_spec, placement=[f'node.id=={node_id}']
)
name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert (svc_info['Spec']['TaskTemplate']['Placement'] ==
- {'Constraints': ['node.id=={}'.format(node_id)]})
+ {'Constraints': [f'node.id=={node_id}']})
def test_create_service_with_placement_object(self):
node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement(
- constraints=['node.id=={}'.format(node_id)]
+ constraints=[f'node.id=={node_id}']
)
task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt
@@ -508,7 +506,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@@ -670,14 +668,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /run/secrets/{0}'.format(secret_name)
+ container, f'cat /run/secrets/{secret_name}'
)
assert self.client.exec_start(exec_id) == secret_data
@requires_api_version('1.25')
def test_create_service_with_unicode_secret(self):
secret_name = 'favorite_touhou'
- secret_data = u'東方花映塚'
+ secret_data = '東方花映塚'
secret_id = self.client.create_secret(secret_name, secret_data)
self.tmp_secrets.append(secret_id)
secret_ref = docker.types.SecretReference(secret_id, secret_name)
@@ -695,7 +693,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /run/secrets/{0}'.format(secret_name)
+ container, f'cat /run/secrets/{secret_name}'
)
container_secret = self.client.exec_start(exec_id)
container_secret = container_secret.decode('utf-8')
@@ -722,14 +720,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /{0}'.format(config_name)
+ container, f'cat /{config_name}'
)
assert self.client.exec_start(exec_id) == config_data
@requires_api_version('1.30')
def test_create_service_with_unicode_config(self):
config_name = 'favorite_touhou'
- config_data = u'東方花映塚'
+ config_data = '東方花映塚'
config_id = self.client.create_config(config_name, config_data)
self.tmp_configs.append(config_id)
config_ref = docker.types.ConfigReference(config_id, config_name)
@@ -747,7 +745,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name)
assert container is not None
exec_id = self.client.exec_create(
- container, 'cat /{0}'.format(config_name)
+ container, f'cat /{config_name}'
)
container_config = self.client.exec_start(exec_id)
container_config = container_config.decode('utf-8')
@@ -1136,7 +1134,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
@@ -1163,7 +1161,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp'
else:
- self.fail('Invalid port specification: {0}'.format(port))
+ self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3
diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py
index f1cbc26..48c0592 100644
--- a/tests/integration/api_swarm_test.py
+++ b/tests/integration/api_swarm_test.py
@@ -8,7 +8,7 @@ from .base import BaseAPIIntegrationTest
class SwarmTest(BaseAPIIntegrationTest):
def setUp(self):
- super(SwarmTest, self).setUp()
+ super().setUp()
force_leave_swarm(self.client)
self._unlock_key = None
@@ -19,7 +19,7 @@ class SwarmTest(BaseAPIIntegrationTest):
except docker.errors.APIError:
pass
force_leave_swarm(self.client)
- super(SwarmTest, self).tearDown()
+ super().tearDown()
@requires_api_version('1.24')
def test_init_swarm_simple(self):
diff --git a/tests/integration/base.py b/tests/integration/base.py
index a7613f6..031079c 100644
--- a/tests/integration/base.py
+++ b/tests/integration/base.py
@@ -75,11 +75,11 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
"""
def setUp(self):
- super(BaseAPIIntegrationTest, self).setUp()
+ super().setUp()
self.client = self.get_client_instance()
def tearDown(self):
- super(BaseAPIIntegrationTest, self).tearDown()
+ super().tearDown()
self.client.close()
@staticmethod
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index ec48835..ae94595 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -1,5 +1,3 @@
-from __future__ import print_function
-
import sys
import warnings
@@ -17,11 +15,11 @@ def setup_test_session():
try:
c.inspect_image(TEST_IMG)
except docker.errors.NotFound:
- print("\npulling {0}".format(TEST_IMG), file=sys.stderr)
+ print(f"\npulling {TEST_IMG}", file=sys.stderr)
for data in c.pull(TEST_IMG, stream=True, decode=True):
status = data.get("status")
progress = data.get("progress")
- detail = "{0} - {1}".format(status, progress)
+ detail = f"{status} - {progress}"
print(detail, file=sys.stderr)
# Double make sure we now have busybox
diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py
index dd543e2..d0cfd54 100644
--- a/tests/integration/credentials/store_test.py
+++ b/tests/integration/credentials/store_test.py
@@ -3,7 +3,6 @@ import random
import sys
import pytest
-import six
from distutils.spawn import find_executable
from docker.credentials import (
@@ -12,7 +11,7 @@ from docker.credentials import (
)
-class TestStore(object):
+class TestStore:
def teardown_method(self):
for server in self.tmp_keys:
try:
@@ -33,7 +32,7 @@ class TestStore(object):
self.store = Store(DEFAULT_OSX_STORE)
def get_random_servername(self):
- res = 'pycreds_test_{:x}'.format(random.getrandbits(32))
+ res = f'pycreds_test_{random.getrandbits(32):x}'
self.tmp_keys.append(res)
return res
@@ -61,7 +60,7 @@ class TestStore(object):
def test_unicode_strings(self):
key = self.get_random_servername()
- key = six.u(key)
+ key = key
self.store.store(server=key, username='user', secret='pass')
data = self.store.get(key)
assert data
diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py
index ad55f32..d7b2a1a 100644
--- a/tests/integration/credentials/utils_test.py
+++ b/tests/integration/credentials/utils_test.py
@@ -5,7 +5,7 @@ from docker.credentials.utils import create_environment_dict
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
@mock.patch.dict(os.environ)
diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py
index 0d60f37..94aa201 100644
--- a/tests/integration/models_images_test.py
+++ b/tests/integration/models_images_test.py
@@ -13,8 +13,8 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_build(self):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo hello world".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo hello world"
))
self.tmp_imgs.append(image.id)
assert client.containers.run(image) == b"hello world\n"
@@ -24,8 +24,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
with pytest.raises(docker.errors.BuildError) as cm:
client.images.build(fileobj=io.BytesIO(
- "FROM alpine\n"
- "RUN exit 1".encode('ascii')
+ b"FROM alpine\n"
+ b"RUN exit 1"
))
assert (
"The command '/bin/sh -c exit 1' returned a non-zero code: 1"
@@ -36,8 +36,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(
tag='some-tag', fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo hello world".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo hello world"
)
)
self.tmp_imgs.append(image.id)
@@ -47,8 +47,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(
tag='dup-txt-tag', fileobj=io.BytesIO(
- "FROM alpine\n"
- "CMD echo Successfully built abcd1234".encode('ascii')
+ b"FROM alpine\n"
+ b"CMD echo Successfully built abcd1234"
)
)
self.tmp_imgs.append(image.id)
@@ -119,7 +119,7 @@ class ImageCollectionTest(BaseIntegrationTest):
self.tmp_imgs.append(additional_tag)
image.reload()
with tempfile.TemporaryFile() as f:
- stream = image.save(named='{}:latest'.format(additional_tag))
+ stream = image.save(named=f'{additional_tag}:latest')
for chunk in stream:
f.write(chunk)
@@ -129,7 +129,7 @@ class ImageCollectionTest(BaseIntegrationTest):
assert len(result) == 1
assert result[0].id == image.id
- assert '{}:latest'.format(additional_tag) in result[0].tags
+ assert f'{additional_tag}:latest' in result[0].tags
def test_save_name_error(self):
client = docker.from_env(version=TEST_API_VERSION)
@@ -143,7 +143,7 @@ class ImageTest(BaseIntegrationTest):
def test_tag_and_remove(self):
repo = 'dockersdk.tests.images.test_tag'
tag = 'some-tag'
- identifier = '{}:{}'.format(repo, tag)
+ identifier = f'{repo}:{tag}'
client = docker.from_env(version=TEST_API_VERSION)
image = client.images.pull('alpine:latest')
diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py
index a63883c..deb9aff 100644
--- a/tests/integration/regression_test.py
+++ b/tests/integration/regression_test.py
@@ -2,7 +2,6 @@ import io
import random
import docker
-import six
from .base import BaseAPIIntegrationTest, TEST_IMG
import pytest
@@ -39,8 +38,7 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr)
self.client.wait(ctnr)
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert logs == '1000\n'
def test_792_explicit_port_protocol(self):
@@ -56,10 +54,10 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr)
assert self.client.port(
ctnr, 2000
- )[0]['HostPort'] == six.text_type(tcp_port)
+ )[0]['HostPort'] == str(tcp_port)
assert self.client.port(
ctnr, '2000/tcp'
- )[0]['HostPort'] == six.text_type(tcp_port)
+ )[0]['HostPort'] == str(tcp_port)
assert self.client.port(
ctnr, '2000/udp'
- )[0]['HostPort'] == six.text_type(udp_port)
+ )[0]['HostPort'] == str(udp_port)
diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py
index b830a10..ef48e12 100644
--- a/tests/ssh/api_build_test.py
+++ b/tests/ssh/api_build_test.py
@@ -7,7 +7,6 @@ from docker import errors
from docker.utils.proxy import ProxyConfig
import pytest
-import six
from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental
@@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert len(logs) > 0
def test_build_from_stringio(self):
- if six.PY3:
- return
- script = io.StringIO(six.text_type('\n').join([
+ return
+ script = io.StringIO('\n'.join([
'FROM busybox',
'RUN mkdir -p /tmp/test',
'EXPOSE 8080',
@@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest):
stream = self.client.build(fileobj=script)
logs = ''
for chunk in stream:
- if six.PY3:
- chunk = chunk.decode('utf-8')
+ chunk = chunk.decode('utf-8')
logs += chunk
assert logs != ''
@@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
self.client.wait(c)
logs = self.client.logs(c)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt',
@@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file')
logs = self.client.logs(ctnr)
- if six.PY3:
- logs = logs.decode('utf-8')
+ logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs
@@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest):
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([
b'FROM busybox',
- 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8')
+ f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
]))
stream = self.client.build(
@@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest):
@requires_api_version('1.32')
@requires_experimental(until=None)
def test_build_invalid_platform(self):
- script = io.BytesIO('FROM busybox\n'.encode('ascii'))
+ script = io.BytesIO(b'FROM busybox\n')
with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar')
diff --git a/tests/ssh/base.py b/tests/ssh/base.py
index c723d82..4825227 100644
--- a/tests/ssh/base.py
+++ b/tests/ssh/base.py
@@ -79,7 +79,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
cls.client.pull(TEST_IMG)
def tearDown(self):
- super(BaseAPIIntegrationTest, self).tearDown()
+ super().tearDown()
self.client.close()
@staticmethod
diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py
index 8a0577e..1ebd37d 100644
--- a/tests/unit/api_container_test.py
+++ b/tests/unit/api_container_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import datetime
import json
import signal
@@ -7,7 +5,6 @@ import signal
import docker
from docker.api import APIClient
import pytest
-import six
from . import fake_api
from ..helpers import requires_api_version
@@ -19,7 +16,7 @@ from .api_test import (
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
def fake_inspect_container_tty(self, container):
@@ -771,7 +768,7 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_device_requests(self):
client = APIClient(version='1.40')
fake_api.fake_responses.setdefault(
- '{0}/v1.40/containers/create'.format(fake_api.prefix),
+ f'{fake_api.prefix}/v1.40/containers/create',
fake_api.post_fake_create_container,
)
client.create_container(
@@ -831,8 +828,8 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_labels_dict(self):
labels_dict = {
- six.text_type('foo'): six.text_type('1'),
- six.text_type('bar'): six.text_type('2'),
+ 'foo': '1',
+ 'bar': '2',
}
self.client.create_container(
@@ -848,12 +845,12 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_labels_list(self):
labels_list = [
- six.text_type('foo'),
- six.text_type('bar'),
+ 'foo',
+ 'bar',
]
labels_dict = {
- six.text_type('foo'): six.text_type(),
- six.text_type('bar'): six.text_type(),
+ 'foo': '',
+ 'bar': '',
}
self.client.create_container(
@@ -1013,11 +1010,11 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_unicode_envvars(self):
envvars_dict = {
- 'foo': u'☃',
+ 'foo': '☃',
}
expected = [
- u'foo=☃'
+ 'foo=☃'
]
self.client.create_container(
@@ -1138,7 +1135,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False
)
- assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_logs_with_dict_instead_of_id(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
@@ -1154,7 +1151,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False
)
- assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii')
+ assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_log_streaming(self):
with mock.patch('docker.api.client.APIClient.inspect_container',
diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py
index a9d2dd5..4504250 100644
--- a/tests/unit/api_exec_test.py
+++ b/tests/unit/api_exec_test.py
@@ -11,7 +11,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1'])
args = fake_request.call_args
- assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format(
+ assert 'POST' == args[0][0], url_prefix + 'containers/{}/exec'.format(
fake_api.FAKE_CONTAINER_ID
)
@@ -32,7 +32,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
@@ -51,7 +51,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/start'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
@@ -68,7 +68,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
- assert args[0][1] == url_prefix + 'exec/{0}/json'.format(
+ assert args[0][1] == url_prefix + 'exec/{}/json'.format(
fake_api.FAKE_EXEC_ID
)
@@ -77,7 +77,7 @@ class ExecTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
- url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID),
+ url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize',
params={'h': 20, 'w': 60},
timeout=DEFAULT_TIMEOUT_SECONDS
)
diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py
index 0b60df4..843c11b 100644
--- a/tests/unit/api_image_test.py
+++ b/tests/unit/api_image_test.py
@@ -11,7 +11,7 @@ from .api_test import (
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
class ImageTest(BaseAPIClientTest):
diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py
index 758f013..84d6544 100644
--- a/tests/unit/api_network_test.py
+++ b/tests/unit/api_network_test.py
@@ -1,14 +1,12 @@
import json
-import six
-
from .api_test import BaseAPIClientTest, url_prefix, response
from docker.types import IPAMConfig, IPAMPool
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
class NetworkTest(BaseAPIClientTest):
@@ -103,16 +101,16 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id)
args = delete.call_args
- assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
+ assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_inspect_network(self):
network_id = 'abc12345'
network_name = 'foo'
network_data = {
- six.u('name'): network_name,
- six.u('id'): network_id,
- six.u('driver'): 'bridge',
- six.u('containers'): {},
+ 'name': network_name,
+ 'id': network_id,
+ 'driver': 'bridge',
+ 'containers': {},
}
network_response = response(status_code=200, content=network_data)
@@ -123,7 +121,7 @@ class NetworkTest(BaseAPIClientTest):
assert result == network_data
args = get.call_args
- assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id)
+ assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_connect_container_to_network(self):
network_id = 'abc12345'
@@ -141,7 +139,7 @@ class NetworkTest(BaseAPIClientTest):
)
assert post.call_args[0][0] == (
- url_prefix + 'networks/{0}/connect'.format(network_id)
+ url_prefix + f'networks/{network_id}/connect'
)
assert json.loads(post.call_args[1]['data']) == {
@@ -164,7 +162,7 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id}, net_id=network_id)
assert post.call_args[0][0] == (
- url_prefix + 'networks/{0}/disconnect'.format(network_id)
+ url_prefix + f'networks/{network_id}/disconnect'
)
assert json.loads(post.call_args[1]['data']) == {
'Container': container_id
diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py
index cb14b74..dfc3816 100644
--- a/tests/unit/api_test.py
+++ b/tests/unit/api_test.py
@@ -10,11 +10,12 @@ import tempfile
import threading
import time
import unittest
+import socketserver
+import http.server
import docker
import pytest
import requests
-import six
from docker.api import APIClient
from docker.constants import DEFAULT_DOCKER_API_VERSION
from requests.packages import urllib3
@@ -24,7 +25,7 @@ from . import fake_api
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
@@ -34,7 +35,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
request=None, raw=None):
res = requests.Response()
res.status_code = status_code
- if not isinstance(content, six.binary_type):
+ if not isinstance(content, bytes):
content = json.dumps(content).encode('ascii')
res._content = content
res.headers = requests.structures.CaseInsensitiveDict(headers or {})
@@ -60,7 +61,7 @@ def fake_resp(method, url, *args, **kwargs):
elif (url, method) in fake_api.fake_responses:
key = (url, method)
if not key:
- raise Exception('{0} {1}'.format(method, url))
+ raise Exception(f'{method} {url}')
status_code, content = fake_api.fake_responses[key]()
return response(status_code=status_code, content=content)
@@ -85,11 +86,11 @@ def fake_delete(self, url, *args, **kwargs):
def fake_read_from_socket(self, response, stream, tty=False, demux=False):
- return six.binary_type()
+ return bytes()
-url_base = '{0}/'.format(fake_api.prefix)
-url_prefix = '{0}v{1}/'.format(
+url_base = f'{fake_api.prefix}/'
+url_prefix = '{}v{}/'.format(
url_base,
docker.constants.DEFAULT_DOCKER_API_VERSION)
@@ -133,20 +134,20 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename')
- assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world')
+ assert url == '{}{}'.format(url_prefix, 'hello/somename/world')
url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername'
)
- assert url == '{0}{1}'.format(
+ assert url == '{}{}'.format(
url_prefix, 'hello/somename/world/someothername'
)
url = self.client._url('/hello/{0}/world', 'some?name')
- assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world')
+ assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world')
url = self.client._url("/images/{0}/push", "localhost:5000/image")
- assert url == '{0}{1}'.format(
+ assert url == '{}{}'.format(
url_prefix, 'images/localhost:5000/image/push'
)
@@ -156,13 +157,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self):
url = self.client._url('/simple')
- assert url == '{0}{1}'.format(url_prefix, 'simple')
+ assert url == '{}{}'.format(url_prefix, 'simple')
def test_url_unversioned_api(self):
url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False
)
- assert url == '{0}{1}'.format(url_base, 'hello/somename/world')
+ assert url == '{}{}'.format(url_base, 'hello/somename/world')
def test_version(self):
self.client.version()
@@ -184,13 +185,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_retrieve_server_version(self):
client = APIClient(version="auto")
- assert isinstance(client._version, six.string_types)
+ assert isinstance(client._version, str)
assert not (client._version == "auto")
client.close()
def test_auto_retrieve_server_version(self):
version = self.client._retrieve_server_version()
- assert isinstance(version, six.string_types)
+ assert isinstance(version, str)
def test_info(self):
self.client.info()
@@ -337,8 +338,7 @@ class DockerApiTest(BaseAPIClientTest):
def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']()
content_str = json.dumps(content)
- if six.PY3:
- content_str = content_str.encode('utf-8')
+ content_str = content_str.encode('utf-8')
body = io.BytesIO(content_str)
# mock a stream interface
@@ -405,7 +405,7 @@ class UnixSocketStreamTest(unittest.TestCase):
while not self.stop_server:
try:
connection, client_address = self.server_socket.accept()
- except socket.error:
+ except OSError:
# Probably no connection to accept yet
time.sleep(0.01)
continue
@@ -489,7 +489,7 @@ class TCPSocketStreamTest(unittest.TestCase):
@classmethod
def setup_class(cls):
- cls.server = six.moves.socketserver.ThreadingTCPServer(
+ cls.server = socketserver.ThreadingTCPServer(
('', 0), cls.get_handler_class())
cls.thread = threading.Thread(target=cls.server.serve_forever)
cls.thread.setDaemon(True)
@@ -508,7 +508,7 @@ class TCPSocketStreamTest(unittest.TestCase):
stdout_data = cls.stdout_data
stderr_data = cls.stderr_data
- class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object):
+ class Handler(http.server.BaseHTTPRequestHandler):
def do_POST(self):
resp_data = self.get_resp_data()
self.send_response(101)
@@ -534,7 +534,7 @@ class TCPSocketStreamTest(unittest.TestCase):
data += stderr_data
return data
else:
- raise Exception('Unknown path {0}'.format(path))
+ raise Exception(f'Unknown path {path}')
@staticmethod
def frame_header(stream, data):
@@ -632,7 +632,7 @@ class UserAgentTest(unittest.TestCase):
class DisableSocketTest(unittest.TestCase):
- class DummySocket(object):
+ class DummySocket:
def __init__(self, timeout=60):
self.timeout = timeout
diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py
index 7850c22..a8d9193 100644
--- a/tests/unit/api_volume_test.py
+++ b/tests/unit/api_volume_test.py
@@ -104,7 +104,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'GET'
- assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
+ assert args[0][1] == f'{url_prefix}volumes/{name}'
def test_remove_volume(self):
name = 'perfectcherryblossom'
@@ -112,4 +112,4 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'DELETE'
- assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name)
+ assert args[0][1] == f'{url_prefix}volumes/{name}'
diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py
index aac8910..8bd2e16 100644
--- a/tests/unit/auth_test.py
+++ b/tests/unit/auth_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import base64
import json
import os
@@ -15,7 +13,7 @@ import pytest
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
class RegressionTest(unittest.TestCase):
@@ -239,7 +237,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, '.dockercfg')
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
with open(cfg_path, 'w') as f:
- f.write('auth = {0}\n'.format(auth_))
+ f.write(f'auth = {auth_}\n')
f.write('email = sakuya@scarlet.net')
cfg = auth.load_config(cfg_path)
@@ -297,13 +295,13 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder,
- '.{0}.dockercfg'.format(
+ '.{}.dockercfg'.format(
random.randrange(100000)))
registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -329,7 +327,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -357,7 +355,7 @@ class LoadConfigTest(unittest.TestCase):
config = {
'auths': {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -386,7 +384,7 @@ class LoadConfigTest(unittest.TestCase):
config = {
'auths': {
registry: {
- 'auth': '{0}'.format(auth_),
+ 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net'
}
}
@@ -794,9 +792,9 @@ class InMemoryStore(credentials.Store):
}
def list(self):
- return dict(
- [(k, v['Username']) for k, v in self.__store.items()]
- )
+ return {
+ k: v['Username'] for k, v in self.__store.items()
+ }
def erase(self, server):
del self.__store[server]
diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py
index ad88e84..d647d3a 100644
--- a/tests/unit/client_test.py
+++ b/tests/unit/client_test.py
@@ -15,7 +15,7 @@ from . import fake_api
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs')
POOL_SIZE = 20
diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py
index 0689d07..a0a171b 100644
--- a/tests/unit/dockertypes_test.py
+++ b/tests/unit/dockertypes_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import unittest
import pytest
@@ -15,7 +13,7 @@ from docker.types.services import convert_service_ports
try:
from unittest import mock
except: # noqa: E722
- import mock
+ from unittest import mock
def create_host_config(*args, **kwargs):
diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py
index 54c2ba8..f8c3a66 100644
--- a/tests/unit/errors_test.py
+++ b/tests/unit/errors_test.py
@@ -126,7 +126,7 @@ class ContainerErrorTest(unittest.TestCase):
err = ContainerError(container, exit_status, command, image, stderr)
msg = ("Command '{}' in image '{}' returned non-zero exit status {}"
- ).format(command, image, exit_status, stderr)
+ ).format(command, image, exit_status)
assert str(err) == msg
def test_container_with_stderr(self):
diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py
index 4fd4d11..4c93329 100644
--- a/tests/unit/fake_api.py
+++ b/tests/unit/fake_api.py
@@ -2,7 +2,7 @@ from docker import constants
from . import fake_stat
-CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION)
+CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
FAKE_CONTAINER_ID = '3cc2351ab11b'
FAKE_IMAGE_ID = 'e9aa60c60128'
@@ -526,96 +526,96 @@ if constants.IS_WINDOWS_PLATFORM:
prefix = 'http+docker://localnpipe'
fake_responses = {
- '{0}/version'.format(prefix):
+ f'{prefix}/version':
get_fake_version,
- '{1}/{0}/version'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/version':
get_fake_version,
- '{1}/{0}/info'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/info':
get_fake_info,
- '{1}/{0}/auth'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/auth':
post_fake_auth,
- '{1}/{0}/_ping'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/_ping':
get_fake_ping,
- '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/search':
get_fake_search,
- '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/json':
get_fake_images,
- '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/history':
get_fake_image_history,
- '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_import_image,
- '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/json':
get_fake_containers,
- '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start':
post_fake_start_container,
- '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize':
post_fake_resize_container,
- '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json':
get_fake_inspect_container,
- '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename':
post_fake_rename_container,
- '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag':
post_fake_tag_image,
- '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait':
get_fake_wait,
- '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs':
get_fake_logs,
- '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes':
get_fake_diff,
- '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export':
get_fake_export,
- '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update':
post_fake_update_container,
- '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec':
post_fake_exec_create,
- '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start':
post_fake_exec_start,
- '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json':
get_fake_exec_inspect,
- '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize':
post_fake_exec_resize,
- '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats':
get_fake_stats,
- '{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top':
get_fake_top,
- '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop':
post_fake_stop_container,
- '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill':
post_fake_kill_container,
- '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause':
post_fake_pause_container,
- '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause':
post_fake_unpause_container,
- '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart':
post_fake_restart_container,
- '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b':
delete_fake_remove_container,
- '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_image_create,
- '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128':
delete_fake_remove_image,
- '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get':
get_fake_get_image,
- '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/load':
post_fake_load_image,
- '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/json':
get_fake_inspect_image,
- '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/insert':
get_fake_insert_image,
- '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/images/test_image/push':
post_fake_push,
- '{1}/{0}/commit'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/commit':
post_fake_commit,
- '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/containers/create':
post_fake_create_container,
- '{1}/{0}/build'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/build':
post_fake_build_container,
- '{1}/{0}/events'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/events':
get_fake_events,
- ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'):
+ (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'):
get_fake_volume_list,
- ('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume,
('{1}/{0}/volumes/{2}'.format(
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
@@ -629,11 +629,11 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'):
post_fake_update_node,
- ('{1}/{0}/swarm/join'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm,
- ('{1}/{0}/networks'.format(CURRENT_VERSION, prefix), 'GET'):
+ (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'):
get_fake_network_list,
- ('{1}/{0}/networks/create'.format(CURRENT_VERSION, prefix), 'POST'):
+ (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network,
('{1}/{0}/networks/{2}'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
@@ -651,6 +651,6 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_disconnect,
- '{1}/{0}/secrets/create'.format(CURRENT_VERSION, prefix):
+ f'{prefix}/{CURRENT_VERSION}/secrets/create':
post_fake_secret,
}
diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py
index 5825b6e..1663ef1 100644
--- a/tests/unit/fake_api_client.py
+++ b/tests/unit/fake_api_client.py
@@ -7,7 +7,7 @@ from . import fake_api
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
class CopyReturnMagicMock(mock.MagicMock):
@@ -15,7 +15,7 @@ class CopyReturnMagicMock(mock.MagicMock):
A MagicMock which deep copies every return value.
"""
def _mock_call(self, *args, **kwargs):
- ret = super(CopyReturnMagicMock, self)._mock_call(*args, **kwargs)
+ ret = super()._mock_call(*args, **kwargs)
if isinstance(ret, (dict, list)):
ret = copy.deepcopy(ret)
return ret
diff --git a/tests/unit/models_resources_test.py b/tests/unit/models_resources_test.py
index 5af24ee..11dea29 100644
--- a/tests/unit/models_resources_test.py
+++ b/tests/unit/models_resources_test.py
@@ -16,7 +16,7 @@ class ModelTest(unittest.TestCase):
def test_hash(self):
client = make_fake_client()
container1 = client.containers.get(FAKE_CONTAINER_ID)
- my_set = set([container1])
+ my_set = {container1}
assert len(my_set) == 1
container2 = client.containers.get(FAKE_CONTAINER_ID)
diff --git a/tests/unit/models_secrets_test.py b/tests/unit/models_secrets_test.py
index 4ccf4c6..1c261a8 100644
--- a/tests/unit/models_secrets_test.py
+++ b/tests/unit/models_secrets_test.py
@@ -8,4 +8,4 @@ class CreateServiceTest(unittest.TestCase):
def test_secrets_repr(self):
client = make_fake_client()
secret = client.secrets.create(name="super_secret", data="secret")
- assert secret.__repr__() == "<Secret: '{}'>".format(FAKE_SECRET_NAME)
+ assert secret.__repr__() == f"<Secret: '{FAKE_SECRET_NAME}'>"
diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py
index 07bb589..b9192e4 100644
--- a/tests/unit/models_services_test.py
+++ b/tests/unit/models_services_test.py
@@ -40,10 +40,10 @@ class CreateServiceKwargsTest(unittest.TestCase):
'update_config': {'update': 'config'},
'endpoint_spec': {'blah': 'blah'},
}
- assert set(task_template.keys()) == set([
+ assert set(task_template.keys()) == {
'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement',
'LogDriver', 'Networks'
- ])
+ }
assert task_template['Placement'] == {
'Constraints': ['foo=bar'],
'Preferences': ['bar=baz'],
@@ -55,7 +55,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'Options': {'foo': 'bar'}
}
assert task_template['Networks'] == [{'Target': 'somenet'}]
- assert set(task_template['ContainerSpec'].keys()) == set([
+ assert set(task_template['ContainerSpec'].keys()) == {
'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User',
'Labels', 'Mounts', 'StopGracePeriod'
- ])
+ }
diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py
index 73b7336..41a87f2 100644
--- a/tests/unit/ssladapter_test.py
+++ b/tests/unit/ssladapter_test.py
@@ -32,30 +32,30 @@ class SSLAdapterTest(unittest.TestCase):
class MatchHostnameTest(unittest.TestCase):
cert = {
'issuer': (
- (('countryName', u'US'),),
- (('stateOrProvinceName', u'California'),),
- (('localityName', u'San Francisco'),),
- (('organizationName', u'Docker Inc'),),
- (('organizationalUnitName', u'Docker-Python'),),
- (('commonName', u'localhost'),),
- (('emailAddress', u'info@docker.com'),)
+ (('countryName', 'US'),),
+ (('stateOrProvinceName', 'California'),),
+ (('localityName', 'San Francisco'),),
+ (('organizationName', 'Docker Inc'),),
+ (('organizationalUnitName', 'Docker-Python'),),
+ (('commonName', 'localhost'),),
+ (('emailAddress', 'info@docker.com'),)
),
'notAfter': 'Mar 25 23:08:23 2030 GMT',
- 'notBefore': u'Mar 25 23:08:23 2016 GMT',
- 'serialNumber': u'BD5F894C839C548F',
+ 'notBefore': 'Mar 25 23:08:23 2016 GMT',
+ 'serialNumber': 'BD5F894C839C548F',
'subject': (
- (('countryName', u'US'),),
- (('stateOrProvinceName', u'California'),),
- (('localityName', u'San Francisco'),),
- (('organizationName', u'Docker Inc'),),
- (('organizationalUnitName', u'Docker-Python'),),
- (('commonName', u'localhost'),),
- (('emailAddress', u'info@docker.com'),)
+ (('countryName', 'US'),),
+ (('stateOrProvinceName', 'California'),),
+ (('localityName', 'San Francisco'),),
+ (('organizationName', 'Docker Inc'),),
+ (('organizationalUnitName', 'Docker-Python'),),
+ (('commonName', 'localhost'),),
+ (('emailAddress', 'info@docker.com'),)
),
'subjectAltName': (
- ('DNS', u'localhost'),
- ('DNS', u'*.gensokyo.jp'),
- ('IP Address', u'127.0.0.1'),
+ ('DNS', 'localhost'),
+ ('DNS', '*.gensokyo.jp'),
+ ('IP Address', '127.0.0.1'),
),
'version': 3
}
diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py
index 4385380..aee1b9e 100644
--- a/tests/unit/swarm_test.py
+++ b/tests/unit/swarm_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import json
from . import fake_api
diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py
index bc6fb5f..9f18388 100644
--- a/tests/unit/utils_build_test.py
+++ b/tests/unit/utils_build_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import os
import os.path
import shutil
@@ -82,7 +80,7 @@ class ExcludePathsTest(unittest.TestCase):
assert sorted(paths) == sorted(set(paths))
def test_wildcard_exclude(self):
- assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore'])
+ assert self.exclude(['*']) == {'Dockerfile', '.dockerignore'}
def test_exclude_dockerfile_dockerignore(self):
"""
@@ -99,18 +97,18 @@ class ExcludePathsTest(unittest.TestCase):
If we're using a custom Dockerfile, make sure that's not
excluded.
"""
- assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set(
- ['Dockerfile.alt', '.dockerignore']
- )
+ assert self.exclude(['*'], dockerfile='Dockerfile.alt') == {
+ 'Dockerfile.alt', '.dockerignore'
+ }
assert self.exclude(
['*'], dockerfile='foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+ ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
# https://github.com/docker/docker-py/issues/1956
assert self.exclude(
['*'], dockerfile='./foo/Dockerfile3'
- ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore']))
+ ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
def test_exclude_dockerfile_child(self):
includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
@@ -119,56 +117,56 @@ class ExcludePathsTest(unittest.TestCase):
def test_single_filename(self):
assert self.exclude(['a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
def test_single_filename_leading_dot_slash(self):
assert self.exclude(['./a.py']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
# As odd as it sounds, a filename pattern with a trailing slash on the
# end *will* result in that file being excluded.
def test_single_filename_trailing_slash(self):
assert self.exclude(['a.py/']) == convert_paths(
- self.all_paths - set(['a.py'])
+ self.all_paths - {'a.py'}
)
def test_wildcard_filename_start(self):
assert self.exclude(['*.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py', 'cde.py'])
+ self.all_paths - {'a.py', 'b.py', 'cde.py'}
)
def test_wildcard_with_exception(self):
assert self.exclude(['*.py', '!b.py']) == convert_paths(
- self.all_paths - set(['a.py', 'cde.py'])
+ self.all_paths - {'a.py', 'cde.py'}
)
def test_wildcard_with_wildcard_exception(self):
assert self.exclude(['*.*', '!*.go']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
- ])
+ }
)
def test_wildcard_filename_end(self):
assert self.exclude(['a.*']) == convert_paths(
- self.all_paths - set(['a.py', 'a.go'])
+ self.all_paths - {'a.py', 'a.go'}
)
def test_question_mark(self):
assert self.exclude(['?.py']) == convert_paths(
- self.all_paths - set(['a.py', 'b.py'])
+ self.all_paths - {'a.py', 'b.py'}
)
def test_single_subdir_single_filename(self):
assert self.exclude(['foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_single_subdir_single_filename_leading_slash(self):
assert self.exclude(['/foo/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_exclude_include_absolute_path(self):
@@ -176,57 +174,57 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths(
base,
['/*', '!/*.py']
- ) == set(['a.py', 'b.py'])
+ ) == {'a.py', 'b.py'}
def test_single_subdir_with_path_traversal(self):
assert self.exclude(['foo/whoops/../a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py'])
+ self.all_paths - {'foo/a.py'}
)
def test_single_subdir_wildcard_filename(self):
assert self.exclude(['foo/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py'])
+ self.all_paths - {'foo/a.py', 'foo/b.py'}
)
def test_wildcard_subdir_single_filename(self):
assert self.exclude(['*/a.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'bar/a.py'])
+ self.all_paths - {'foo/a.py', 'bar/a.py'}
)
def test_wildcard_subdir_wildcard_filename(self):
assert self.exclude(['*/*.py']) == convert_paths(
- self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py'])
+ self.all_paths - {'foo/a.py', 'foo/b.py', 'bar/a.py'}
)
def test_directory(self):
assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py',
'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_trailing_slash(self):
assert self.exclude(['foo']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py',
'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_single_exception(self):
assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_subdir_exception(self):
assert self.exclude(['foo', '!foo/bar']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
@pytest.mark.skipif(
@@ -234,21 +232,21 @@ class ExcludePathsTest(unittest.TestCase):
)
def test_directory_with_subdir_exception_win32_pathsep(self):
assert self.exclude(['foo', '!foo\\bar']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
def test_directory_with_wildcard_exception(self):
assert self.exclude(['foo', '!foo/*.py']) == convert_paths(
- self.all_paths - set([
+ self.all_paths - {
'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3'
- ])
+ }
)
def test_subdirectory(self):
assert self.exclude(['foo/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
@pytest.mark.skipif(
@@ -256,33 +254,33 @@ class ExcludePathsTest(unittest.TestCase):
)
def test_subdirectory_win32_pathsep(self):
assert self.exclude(['foo\\bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
def test_double_wildcard(self):
assert self.exclude(['**/a.py']) == convert_paths(
- self.all_paths - set(
- ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py']
- )
+ self.all_paths - {
+ 'a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'
+ }
)
assert self.exclude(['foo/**/bar']) == convert_paths(
- self.all_paths - set(['foo/bar', 'foo/bar/a.py'])
+ self.all_paths - {'foo/bar', 'foo/bar/a.py'}
)
def test_single_and_double_wildcard(self):
assert self.exclude(['**/target/*/*']) == convert_paths(
- self.all_paths - set(
- ['target/subdir/file.txt',
+ self.all_paths - {
+ 'target/subdir/file.txt',
'subdir/target/subdir/file.txt',
- 'subdir/subdir2/target/subdir/file.txt']
- )
+ 'subdir/subdir2/target/subdir/file.txt'
+ }
)
def test_trailing_double_wildcard(self):
assert self.exclude(['subdir/**']) == convert_paths(
- self.all_paths - set(
- ['subdir/file.txt',
+ self.all_paths - {
+ 'subdir/file.txt',
'subdir/target/file.txt',
'subdir/target/subdir/file.txt',
'subdir/subdir2/file.txt',
@@ -292,16 +290,16 @@ class ExcludePathsTest(unittest.TestCase):
'subdir/target/subdir',
'subdir/subdir2',
'subdir/subdir2/target',
- 'subdir/subdir2/target/subdir']
- )
+ 'subdir/subdir2/target/subdir'
+ }
)
def test_double_wildcard_with_exception(self):
assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths(
- set([
+ {
'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile',
'.dockerignore',
- ])
+ }
)
def test_include_wildcard(self):
@@ -324,7 +322,7 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths(
base,
['*.md', '!README*.md', 'README-secret.md']
- ) == set(['README.md', 'README-bis.md'])
+ ) == {'README.md', 'README-bis.md'}
def test_parent_directory(self):
base = make_tree(
@@ -340,7 +338,7 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths(
base,
['../a.py', '/../b.py']
- ) == set(['c.py'])
+ ) == {'c.py'}
class TarTest(unittest.TestCase):
@@ -374,14 +372,14 @@ class TarTest(unittest.TestCase):
'.dockerignore',
]
- expected_names = set([
+ expected_names = {
'Dockerfile',
'.dockerignore',
'a.go',
'b.py',
'bar',
'bar/a.py',
- ])
+ }
base = make_tree(dirs, files)
self.addCleanup(shutil.rmtree, base)
@@ -413,7 +411,7 @@ class TarTest(unittest.TestCase):
with pytest.raises(IOError) as ei:
tar(base)
- assert 'Can not read file in context: {}'.format(full_path) in (
+ assert f'Can not read file in context: {full_path}' in (
ei.exconly()
)
diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py
index b0934f9..83e04a1 100644
--- a/tests/unit/utils_config_test.py
+++ b/tests/unit/utils_config_test.py
@@ -11,7 +11,7 @@ from docker.utils import config
try:
from unittest import mock
except ImportError:
- import mock
+ from unittest import mock
class FindConfigFileTest(unittest.TestCase):
diff --git a/tests/unit/utils_json_stream_test.py b/tests/unit/utils_json_stream_test.py
index f7aefd0..821ebe4 100644
--- a/tests/unit/utils_json_stream_test.py
+++ b/tests/unit/utils_json_stream_test.py
@@ -1,11 +1,7 @@
-# encoding: utf-8
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
from docker.utils.json_stream import json_splitter, stream_as_text, json_stream
-class TestJsonSplitter(object):
+class TestJsonSplitter:
def test_json_splitter_no_object(self):
data = '{"foo": "bar'
@@ -20,7 +16,7 @@ class TestJsonSplitter(object):
assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
-class TestStreamAsText(object):
+class TestStreamAsText:
def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3']
@@ -28,12 +24,12 @@ class TestStreamAsText(object):
assert output == '���'
def test_stream_with_utf_character(self):
- stream = ['ěĝ'.encode('utf-8')]
+ stream = ['ěĝ'.encode()]
output, = stream_as_text(stream)
assert output == 'ěĝ'
-class TestJsonStream(object):
+class TestJsonStream:
def test_with_falsy_entries(self):
stream = [
diff --git a/tests/unit/utils_proxy_test.py b/tests/unit/utils_proxy_test.py
index ff0e14b..2da6040 100644
--- a/tests/unit/utils_proxy_test.py
+++ b/tests/unit/utils_proxy_test.py
@@ -1,7 +1,4 @@
-# -*- coding: utf-8 -*-
-
import unittest
-import six
from docker.utils.proxy import ProxyConfig
@@ -65,7 +62,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is None.
self.assertSetEqual(
set(CONFIG.inject_proxy_environment(None)),
- set(['{}={}'.format(k, v) for k, v in six.iteritems(ENV)]))
+ {f'{k}={v}' for k, v in ENV.items()})
# Proxy config is null, env is None.
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
@@ -74,7 +71,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is non null
actual = CONFIG.inject_proxy_environment(env)
- expected = ['{}={}'.format(k, v) for k, v in six.iteritems(ENV)] + env
+ expected = [f'{k}={v}' for k, v in ENV.items()] + env
# It's important that the first 8 variables are the ones from the proxy
# config, and the last 2 are the ones from the input environment
self.assertSetEqual(set(actual[:8]), set(expected[:8]))
diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py
index 0d6ff22..802d919 100644
--- a/tests/unit/utils_test.py
+++ b/tests/unit/utils_test.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import base64
import json
import os
@@ -9,7 +7,6 @@ import tempfile
import unittest
import pytest
-import six
from docker.api.client import APIClient
from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION
from docker.errors import DockerException
@@ -195,22 +192,22 @@ class ConverVolumeBindsTest(unittest.TestCase):
assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw']
def test_convert_volume_binds_unicode_bytes_input(self):
- expected = [u'/mnt/지연:/unicode/박:rw']
+ expected = ['/mnt/지연:/unicode/박:rw']
data = {
- u'/mnt/지연'.encode('utf-8'): {
- 'bind': u'/unicode/박'.encode('utf-8'),
+ '/mnt/지연'.encode(): {
+ 'bind': '/unicode/박'.encode(),
'mode': 'rw'
}
}
assert convert_volume_binds(data) == expected
def test_convert_volume_binds_unicode_unicode_input(self):
- expected = [u'/mnt/지연:/unicode/박:rw']
+ expected = ['/mnt/지연:/unicode/박:rw']
data = {
- u'/mnt/지연': {
- 'bind': u'/unicode/박',
+ '/mnt/지연': {
+ 'bind': '/unicode/박',
'mode': 'rw'
}
}
@@ -359,14 +356,14 @@ class ParseRepositoryTagTest(unittest.TestCase):
)
def test_index_image_sha(self):
- assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == (
- "root", "sha256:{0}".format(self.sha)
+ assert parse_repository_tag(f"root@sha256:{self.sha}") == (
+ "root", f"sha256:{self.sha}"
)
def test_private_reg_image_sha(self):
assert parse_repository_tag(
- "url:5000/repo@sha256:{0}".format(self.sha)
- ) == ("url:5000/repo", "sha256:{0}".format(self.sha))
+ f"url:5000/repo@sha256:{self.sha}"
+ ) == ("url:5000/repo", f"sha256:{self.sha}")
class ParseDeviceTest(unittest.TestCase):
@@ -463,20 +460,13 @@ class UtilsTest(unittest.TestCase):
def test_decode_json_header(self):
obj = {'a': 'b', 'c': 1}
data = None
- if six.PY3:
- data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
- else:
- data = base64.urlsafe_b64encode(json.dumps(obj))
+ data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
decoded_data = decode_json_header(data)
assert obj == decoded_data
class SplitCommandTest(unittest.TestCase):
def test_split_command_with_unicode(self):
- assert split_command(u'echo μμ') == ['echo', 'μμ']
-
- @pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3")
- def test_split_command_with_bytes(self):
assert split_command('echo μμ') == ['echo', 'μμ']
@@ -626,7 +616,7 @@ class FormatEnvironmentTest(unittest.TestCase):
env_dict = {
'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80'
}
- assert format_environment(env_dict) == [u'ARTIST_NAME=송지은']
+ assert format_environment(env_dict) == ['ARTIST_NAME=송지은']
def test_format_env_no_value(self):
env_dict = {