Merge branch 'master' into feat/add_templating_parameter_docker_config

This commit is contained in:
Anca Iordache 2021-10-07 23:29:03 +02:00 committed by GitHub
commit aae6be0c58
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
95 changed files with 588 additions and 674 deletions

View File

@ -12,7 +12,7 @@ from .. import utils
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class BuildApiMixin(object): class BuildApiMixin:
def build(self, path=None, tag=None, quiet=False, fileobj=None, def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, timeout=None, nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False, custom_context=False, encoding=None, pull=False,
@ -132,7 +132,7 @@ class BuildApiMixin(object):
for key in container_limits.keys(): for key in container_limits.keys():
if key not in constants.CONTAINER_LIMITS_KEYS: if key not in constants.CONTAINER_LIMITS_KEYS:
raise errors.DockerException( raise errors.DockerException(
'Invalid container_limits key {0}'.format(key) f'Invalid container_limits key {key}'
) )
if custom_context: if custom_context:
@ -150,7 +150,7 @@ class BuildApiMixin(object):
dockerignore = os.path.join(path, '.dockerignore') dockerignore = os.path.join(path, '.dockerignore')
exclude = None exclude = None
if os.path.exists(dockerignore): if os.path.exists(dockerignore):
with open(dockerignore, 'r') as f: with open(dockerignore) as f:
exclude = list(filter( exclude = list(filter(
lambda x: x != '' and x[0] != '#', lambda x: x != '' and x[0] != '#',
[l.strip() for l in f.read().splitlines()] [l.strip() for l in f.read().splitlines()]
@ -313,7 +313,7 @@ class BuildApiMixin(object):
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug( log.debug(
'Sending auth config ({0})'.format( 'Sending auth config ({})'.format(
', '.join(repr(k) for k in auth_data.keys()) ', '.join(repr(k) for k in auth_data.keys())
) )
) )
@ -344,9 +344,9 @@ def process_dockerfile(dockerfile, path):
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')): os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later # Dockerfile not in context - read data to insert into tar later
with open(abs_dockerfile, 'r') as df: with open(abs_dockerfile) as df:
return ( return (
'.dockerfile.{0:x}'.format(random.getrandbits(160)), f'.dockerfile.{random.getrandbits(160):x}',
df.read() df.read()
) )

View File

@ -107,7 +107,7 @@ class APIClient(
user_agent=DEFAULT_USER_AGENT, num_pools=None, user_agent=DEFAULT_USER_AGENT, num_pools=None,
credstore_env=None, use_ssh_client=False, credstore_env=None, use_ssh_client=False,
max_pool_size=DEFAULT_MAX_POOL_SIZE): max_pool_size=DEFAULT_MAX_POOL_SIZE):
super(APIClient, self).__init__() super().__init__()
if tls and not base_url: if tls and not base_url:
raise TLSParameterError( raise TLSParameterError(
@ -199,7 +199,7 @@ class APIClient(
self._version = version self._version = version
if not isinstance(self._version, str): if not isinstance(self._version, str):
raise DockerException( raise DockerException(
'Version parameter must be a string or None. Found {0}'.format( 'Version parameter must be a string or None. Found {}'.format(
type(version).__name__ type(version).__name__
) )
) )
@ -219,7 +219,7 @@ class APIClient(
) )
except Exception as e: except Exception as e:
raise DockerException( raise DockerException(
'Error while fetching server API version: {0}'.format(e) f'Error while fetching server API version: {e}'
) )
def _set_request_timeout(self, kwargs): def _set_request_timeout(self, kwargs):
@ -248,7 +248,7 @@ class APIClient(
for arg in args: for arg in args:
if not isinstance(arg, str): if not isinstance(arg, str):
raise ValueError( raise ValueError(
'Expected a string but found {0} ({1}) ' 'Expected a string but found {} ({}) '
'instead'.format(arg, type(arg)) 'instead'.format(arg, type(arg))
) )
@ -256,11 +256,11 @@ class APIClient(
args = map(quote_f, args) args = map(quote_f, args)
if kwargs.get('versioned_api', True): if kwargs.get('versioned_api', True):
return '{0}/v{1}{2}'.format( return '{}/v{}{}'.format(
self.base_url, self._version, pathfmt.format(*args) self.base_url, self._version, pathfmt.format(*args)
) )
else: else:
return '{0}{1}'.format(self.base_url, pathfmt.format(*args)) return f'{self.base_url}{pathfmt.format(*args)}'
def _raise_for_status(self, response): def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred.""" """Raises stored :class:`APIError`, if one occurred."""
@ -341,8 +341,7 @@ class APIClient(
if response.raw._fp.chunked: if response.raw._fp.chunked:
if decode: if decode:
for chunk in json_stream(self._stream_helper(response, False)): yield from json_stream(self._stream_helper(response, False))
yield chunk
else: else:
reader = response.raw reader = response.raw
while not reader.closed: while not reader.closed:
@ -398,8 +397,13 @@ class APIClient(
def _stream_raw_result(self, response, chunk_size=1, decode=True): def _stream_raw_result(self, response, chunk_size=1, decode=True):
''' Stream result for TTY-enabled container and raw binary data''' ''' Stream result for TTY-enabled container and raw binary data'''
self._raise_for_status(response) self._raise_for_status(response)
for out in response.iter_content(chunk_size, decode):
yield out # Disable timeout on the underlying socket to prevent
# Read timed out(s) for long running processes
socket = self._get_raw_response_socket(response)
self._disable_socket_timeout(socket)
yield from response.iter_content(chunk_size, decode)
def _read_from_socket(self, response, stream, tty=True, demux=False): def _read_from_socket(self, response, stream, tty=True, demux=False):
socket = self._get_raw_response_socket(response) socket = self._get_raw_response_socket(response)
@ -477,7 +481,7 @@ class APIClient(
def get_adapter(self, url): def get_adapter(self, url):
try: try:
return super(APIClient, self).get_adapter(url) return super().get_adapter(url)
except requests.exceptions.InvalidSchema as e: except requests.exceptions.InvalidSchema as e:
if self._custom_adapter: if self._custom_adapter:
return self._custom_adapter return self._custom_adapter

View File

@ -1,12 +1,9 @@
import base64 import base64
import six
from .. import utils from .. import utils
class ConfigApiMixin(object): class ConfigApiMixin:
# TODO: The templating field is only available starting from API v 1.37
@utils.minimum_version('1.30') @utils.minimum_version('1.30')
def create_config(self, name, data, labels=None, templating=None): def create_config(self, name, data, labels=None, templating=None):
""" """
@ -26,8 +23,7 @@ class ConfigApiMixin(object):
data = data.encode('utf-8') data = data.encode('utf-8')
data = base64.b64encode(data) data = base64.b64encode(data)
if six.PY3: data = data.decode('ascii')
data = data.decode('ascii')
body = { body = {
'Data': data, 'Data': data,
'Name': name, 'Name': name,

View File

@ -1,7 +1,5 @@
from datetime import datetime from datetime import datetime
import six
from .. import errors from .. import errors
from .. import utils from .. import utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..constants import DEFAULT_DATA_CHUNK_SIZE
@ -12,7 +10,7 @@ from ..types import HostConfig
from ..types import NetworkingConfig from ..types import NetworkingConfig
class ContainerApiMixin(object): class ContainerApiMixin:
@utils.check_resource('container') @utils.check_resource('container')
def attach(self, container, stdout=True, stderr=True, def attach(self, container, stdout=True, stderr=True,
stream=False, logs=False, demux=False): stream=False, logs=False, demux=False):
@ -408,7 +406,7 @@ class ContainerApiMixin(object):
:py:class:`docker.errors.APIError` :py:class:`docker.errors.APIError`
If the server returns an error. If the server returns an error.
""" """
if isinstance(volumes, six.string_types): if isinstance(volumes, str):
volumes = [volumes, ] volumes = [volumes, ]
if isinstance(environment, dict): if isinstance(environment, dict):
@ -790,7 +788,7 @@ class ContainerApiMixin(object):
url = self._url("/containers/{0}/kill", container) url = self._url("/containers/{0}/kill", container)
params = {} params = {}
if signal is not None: if signal is not None:
if not isinstance(signal, six.string_types): if not isinstance(signal, str):
signal = int(signal) signal = int(signal)
params['signal'] = signal params['signal'] = signal
res = self._post(url, params=params) res = self._post(url, params=params)

View File

@ -4,7 +4,7 @@ from datetime import datetime
from .. import auth, types, utils from .. import auth, types, utils
class DaemonApiMixin(object): class DaemonApiMixin:
@utils.minimum_version('1.25') @utils.minimum_version('1.25')
def df(self): def df(self):
""" """

View File

@ -1,10 +1,8 @@
import six
from .. import errors from .. import errors
from .. import utils from .. import utils
class ExecApiMixin(object): class ExecApiMixin:
@utils.check_resource('container') @utils.check_resource('container')
def exec_create(self, container, cmd, stdout=True, stderr=True, def exec_create(self, container, cmd, stdout=True, stderr=True,
stdin=False, tty=False, privileged=False, user='', stdin=False, tty=False, privileged=False, user='',
@ -45,7 +43,7 @@ class ExecApiMixin(object):
'Setting environment for exec is not supported in API < 1.25' 'Setting environment for exec is not supported in API < 1.25'
) )
if isinstance(cmd, six.string_types): if isinstance(cmd, str):
cmd = utils.split_command(cmd) cmd = utils.split_command(cmd)
if isinstance(environment, dict): if isinstance(environment, dict):

View File

@ -1,15 +1,13 @@
import logging import logging
import os import os
import six
from .. import auth, errors, utils from .. import auth, errors, utils
from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..constants import DEFAULT_DATA_CHUNK_SIZE
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class ImageApiMixin(object): class ImageApiMixin:
@utils.check_resource('image') @utils.check_resource('image')
def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE): def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
@ -130,7 +128,7 @@ class ImageApiMixin(object):
params = _import_image_params( params = _import_image_params(
repository, tag, image, repository, tag, image,
src=(src if isinstance(src, six.string_types) else None), src=(src if isinstance(src, str) else None),
changes=changes changes=changes
) )
headers = {'Content-Type': 'application/tar'} headers = {'Content-Type': 'application/tar'}
@ -139,7 +137,7 @@ class ImageApiMixin(object):
return self._result( return self._result(
self._post(u, data=None, params=params) self._post(u, data=None, params=params)
) )
elif isinstance(src, six.string_types): # from file path elif isinstance(src, str): # from file path
with open(src, 'rb') as f: with open(src, 'rb') as f:
return self._result( return self._result(
self._post( self._post(
@ -571,7 +569,7 @@ class ImageApiMixin(object):
def is_file(src): def is_file(src):
try: try:
return ( return (
isinstance(src, six.string_types) and isinstance(src, str) and
os.path.isfile(src) os.path.isfile(src)
) )
except TypeError: # a data string will make isfile() raise a TypeError except TypeError: # a data string will make isfile() raise a TypeError

View File

@ -4,7 +4,7 @@ from ..utils import version_lt
from .. import utils from .. import utils
class NetworkApiMixin(object): class NetworkApiMixin:
def networks(self, names=None, ids=None, filters=None): def networks(self, names=None, ids=None, filters=None):
""" """
List networks. Similar to the ``docker network ls`` command. List networks. Similar to the ``docker network ls`` command.

View File

@ -1,9 +1,7 @@
import six
from .. import auth, utils from .. import auth, utils
class PluginApiMixin(object): class PluginApiMixin:
@utils.minimum_version('1.25') @utils.minimum_version('1.25')
@utils.check_resource('name') @utils.check_resource('name')
def configure_plugin(self, name, options): def configure_plugin(self, name, options):
@ -21,7 +19,7 @@ class PluginApiMixin(object):
url = self._url('/plugins/{0}/set', name) url = self._url('/plugins/{0}/set', name)
data = options data = options
if isinstance(data, dict): if isinstance(data, dict):
data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)] data = [f'{k}={v}' for k, v in data.items()]
res = self._post_json(url, data=data) res = self._post_json(url, data=data)
self._raise_for_status(res) self._raise_for_status(res)
return True return True

View File

@ -1,12 +1,10 @@
import base64 import base64
import six
from .. import errors from .. import errors
from .. import utils from .. import utils
class SecretApiMixin(object): class SecretApiMixin:
@utils.minimum_version('1.25') @utils.minimum_version('1.25')
def create_secret(self, name, data, labels=None, driver=None): def create_secret(self, name, data, labels=None, driver=None):
""" """
@ -25,8 +23,7 @@ class SecretApiMixin(object):
data = data.encode('utf-8') data = data.encode('utf-8')
data = base64.b64encode(data) data = base64.b64encode(data)
if six.PY3: data = data.decode('ascii')
data = data.decode('ascii')
body = { body = {
'Data': data, 'Data': data,
'Name': name, 'Name': name,

View File

@ -45,7 +45,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
if task_template is not None: if task_template is not None:
if 'ForceUpdate' in task_template and utils.version_lt( if 'ForceUpdate' in task_template and utils.version_lt(
version, '1.25'): version, '1.25'):
raise_version_error('force_update', '1.25') raise_version_error('force_update', '1.25')
if task_template.get('Placement'): if task_template.get('Placement'):
if utils.version_lt(version, '1.30'): if utils.version_lt(version, '1.30'):
@ -113,7 +113,7 @@ def _merge_task_template(current, override):
return merged return merged
class ServiceApiMixin(object): class ServiceApiMixin:
@utils.minimum_version('1.24') @utils.minimum_version('1.24')
def create_service( def create_service(
self, task_template, name=None, labels=None, mode=None, self, task_template, name=None, labels=None, mode=None,

View File

@ -1,5 +1,5 @@
import logging import logging
from six.moves import http_client import http.client as http_client
from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE
from .. import errors from .. import errors
from .. import types from .. import types
@ -8,7 +8,7 @@ from .. import utils
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class SwarmApiMixin(object): class SwarmApiMixin:
def create_swarm_spec(self, *args, **kwargs): def create_swarm_spec(self, *args, **kwargs):
""" """

View File

@ -2,7 +2,7 @@ from .. import errors
from .. import utils from .. import utils
class VolumeApiMixin(object): class VolumeApiMixin:
def volumes(self, filters=None): def volumes(self, filters=None):
""" """
List volumes currently registered by the docker daemon. Similar to the List volumes currently registered by the docker daemon. Similar to the

View File

@ -2,14 +2,12 @@ import base64
import json import json
import logging import logging
import six
from . import credentials from . import credentials
from . import errors from . import errors
from .utils import config from .utils import config
INDEX_NAME = 'docker.io' INDEX_NAME = 'docker.io'
INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME) INDEX_URL = f'https://index.{INDEX_NAME}/v1/'
TOKEN_USERNAME = '<token>' TOKEN_USERNAME = '<token>'
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -18,13 +16,13 @@ log = logging.getLogger(__name__)
def resolve_repository_name(repo_name): def resolve_repository_name(repo_name):
if '://' in repo_name: if '://' in repo_name:
raise errors.InvalidRepository( raise errors.InvalidRepository(
'Repository name cannot contain a scheme ({0})'.format(repo_name) f'Repository name cannot contain a scheme ({repo_name})'
) )
index_name, remote_name = split_repo_name(repo_name) index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-': if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository( raise errors.InvalidRepository(
'Invalid index name ({0}). Cannot begin or end with a' 'Invalid index name ({}). Cannot begin or end with a'
' hyphen.'.format(index_name) ' hyphen.'.format(index_name)
) )
return resolve_index_name(index_name), remote_name return resolve_index_name(index_name), remote_name
@ -98,10 +96,10 @@ class AuthConfig(dict):
""" """
conf = {} conf = {}
for registry, entry in six.iteritems(entries): for registry, entry in entries.items():
if not isinstance(entry, dict): if not isinstance(entry, dict):
log.debug( log.debug(
'Config entry for key {0} is not auth config'.format( 'Config entry for key {} is not auth config'.format(
registry registry
) )
) )
@ -111,14 +109,14 @@ class AuthConfig(dict):
# keys is not formatted properly. # keys is not formatted properly.
if raise_on_error: if raise_on_error:
raise errors.InvalidConfigFile( raise errors.InvalidConfigFile(
'Invalid configuration for registry {0}'.format( 'Invalid configuration for registry {}'.format(
registry registry
) )
) )
return {} return {}
if 'identitytoken' in entry: if 'identitytoken' in entry:
log.debug( log.debug(
'Found an IdentityToken entry for registry {0}'.format( 'Found an IdentityToken entry for registry {}'.format(
registry registry
) )
) )
@ -132,7 +130,7 @@ class AuthConfig(dict):
# a valid value in the auths config. # a valid value in the auths config.
# https://github.com/docker/compose/issues/3265 # https://github.com/docker/compose/issues/3265
log.debug( log.debug(
'Auth data for {0} is absent. Client might be using a ' 'Auth data for {} is absent. Client might be using a '
'credentials store instead.'.format(registry) 'credentials store instead.'.format(registry)
) )
conf[registry] = {} conf[registry] = {}
@ -140,7 +138,7 @@ class AuthConfig(dict):
username, password = decode_auth(entry['auth']) username, password = decode_auth(entry['auth'])
log.debug( log.debug(
'Found entry (registry={0}, username={1})' 'Found entry (registry={}, username={})'
.format(repr(registry), repr(username)) .format(repr(registry), repr(username))
) )
@ -170,7 +168,7 @@ class AuthConfig(dict):
try: try:
with open(config_file) as f: with open(config_file) as f:
config_dict = json.load(f) config_dict = json.load(f)
except (IOError, KeyError, ValueError) as e: except (OSError, KeyError, ValueError) as e:
# Likely missing new Docker config file or it's in an # Likely missing new Docker config file or it's in an
# unknown format, continue to attempt to read old location # unknown format, continue to attempt to read old location
# and format. # and format.
@ -230,7 +228,7 @@ class AuthConfig(dict):
store_name = self.get_credential_store(registry) store_name = self.get_credential_store(registry)
if store_name is not None: if store_name is not None:
log.debug( log.debug(
'Using credentials store "{0}"'.format(store_name) f'Using credentials store "{store_name}"'
) )
cfg = self._resolve_authconfig_credstore(registry, store_name) cfg = self._resolve_authconfig_credstore(registry, store_name)
if cfg is not None: if cfg is not None:
@ -239,15 +237,15 @@ class AuthConfig(dict):
# Default to the public index server # Default to the public index server
registry = resolve_index_name(registry) if registry else INDEX_NAME registry = resolve_index_name(registry) if registry else INDEX_NAME
log.debug("Looking for auth entry for {0}".format(repr(registry))) log.debug(f"Looking for auth entry for {repr(registry)}")
if registry in self.auths: if registry in self.auths:
log.debug("Found {0}".format(repr(registry))) log.debug(f"Found {repr(registry)}")
return self.auths[registry] return self.auths[registry]
for key, conf in six.iteritems(self.auths): for key, conf in self.auths.items():
if resolve_index_name(key) == registry: if resolve_index_name(key) == registry:
log.debug("Found {0}".format(repr(key))) log.debug(f"Found {repr(key)}")
return conf return conf
log.debug("No entry found") log.debug("No entry found")
@ -258,7 +256,7 @@ class AuthConfig(dict):
# The ecosystem is a little schizophrenic with index.docker.io VS # The ecosystem is a little schizophrenic with index.docker.io VS
# docker.io - in that case, it seems the full URL is necessary. # docker.io - in that case, it seems the full URL is necessary.
registry = INDEX_URL registry = INDEX_URL
log.debug("Looking for auth entry for {0}".format(repr(registry))) log.debug(f"Looking for auth entry for {repr(registry)}")
store = self._get_store_instance(credstore_name) store = self._get_store_instance(credstore_name)
try: try:
data = store.get(registry) data = store.get(registry)
@ -278,7 +276,7 @@ class AuthConfig(dict):
return None return None
except credentials.StoreError as e: except credentials.StoreError as e:
raise errors.DockerException( raise errors.DockerException(
'Credentials store error: {0}'.format(repr(e)) f'Credentials store error: {repr(e)}'
) )
def _get_store_instance(self, name): def _get_store_instance(self, name):
@ -329,7 +327,7 @@ def convert_to_hostname(url):
def decode_auth(auth): def decode_auth(auth):
if isinstance(auth, six.string_types): if isinstance(auth, str):
auth = auth.encode('ascii') auth = auth.encode('ascii')
s = base64.b64decode(auth) s = base64.b64decode(auth)
login, pwd = s.split(b':', 1) login, pwd = s.split(b':', 1)

View File

@ -13,7 +13,7 @@ from .models.volumes import VolumeCollection
from .utils import kwargs_from_env from .utils import kwargs_from_env
class DockerClient(object): class DockerClient:
""" """
A client for communicating with a Docker server. A client for communicating with a Docker server.
@ -212,7 +212,7 @@ class DockerClient(object):
close.__doc__ = APIClient.close.__doc__ close.__doc__ = APIClient.close.__doc__
def __getattr__(self, name): def __getattr__(self, name):
s = ["'DockerClient' object has no attribute '{}'".format(name)] s = [f"'DockerClient' object has no attribute '{name}'"]
# If a user calls a method on APIClient, they # If a user calls a method on APIClient, they
if hasattr(APIClient, name): if hasattr(APIClient, name):
s.append("In Docker SDK for Python 2.0, this method is now on the " s.append("In Docker SDK for Python 2.0, this method is now on the "

View File

@ -28,7 +28,7 @@ INSECURE_REGISTRY_DEPRECATION_WARNING = \
IS_WINDOWS_PLATFORM = (sys.platform == 'win32') IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
WINDOWS_LONGPATH_PREFIX = '\\\\?\\' WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version) DEFAULT_USER_AGENT = f"docker-sdk-python/{version}"
DEFAULT_NUM_POOLS = 25 DEFAULT_NUM_POOLS = 25
# The OpenSSH server default value for MaxSessions is 10 which means we can # The OpenSSH server default value for MaxSessions is 10 which means we can

View File

@ -9,7 +9,7 @@ from docker.context.config import write_context_name_to_docker_config
from docker.context import Context from docker.context import Context
class ContextAPI(object): class ContextAPI:
"""Context API. """Context API.
Contains methods for context management: Contains methods for context management:
create, list, remove, get, inspect. create, list, remove, get, inspect.
@ -109,7 +109,7 @@ class ContextAPI(object):
if filename == METAFILE: if filename == METAFILE:
try: try:
data = json.load( data = json.load(
open(os.path.join(dirname, filename), "r")) open(os.path.join(dirname, filename)))
names.append(data["Name"]) names.append(data["Name"])
except Exception as e: except Exception as e:
raise errors.ContextException( raise errors.ContextException(
@ -138,7 +138,7 @@ class ContextAPI(object):
err = write_context_name_to_docker_config(name) err = write_context_name_to_docker_config(name)
if err: if err:
raise errors.ContextException( raise errors.ContextException(
'Failed to set current context: {}'.format(err)) f'Failed to set current context: {err}')
@classmethod @classmethod
def remove_context(cls, name): def remove_context(cls, name):

View File

@ -15,7 +15,7 @@ def get_current_context_name():
docker_cfg_path = find_config_file() docker_cfg_path = find_config_file()
if docker_cfg_path: if docker_cfg_path:
try: try:
with open(docker_cfg_path, "r") as f: with open(docker_cfg_path) as f:
name = json.load(f).get("currentContext", "default") name = json.load(f).get("currentContext", "default")
except Exception: except Exception:
return "default" return "default"
@ -29,7 +29,7 @@ def write_context_name_to_docker_config(name=None):
config = {} config = {}
if docker_cfg_path: if docker_cfg_path:
try: try:
with open(docker_cfg_path, "r") as f: with open(docker_cfg_path) as f:
config = json.load(f) config = json.load(f)
except Exception as e: except Exception as e:
return e return e

View File

@ -94,7 +94,7 @@ class Context:
try: try:
with open(meta_file) as f: with open(meta_file) as f:
metadata = json.load(f) metadata = json.load(f)
except (IOError, KeyError, ValueError) as e: except (OSError, KeyError, ValueError) as e:
# unknown format # unknown format
raise Exception("""Detected corrupted meta file for raise Exception("""Detected corrupted meta file for
context {} : {}""".format(name, e)) context {} : {}""".format(name, e))
@ -171,7 +171,7 @@ class Context:
rmtree(self.tls_path) rmtree(self.tls_path)
def __repr__(self): def __repr__(self):
return "<%s: '%s'>" % (self.__class__.__name__, self.name) return f"<{self.__class__.__name__}: '{self.name}'>"
def __str__(self): def __str__(self):
return json.dumps(self.__call__(), indent=2) return json.dumps(self.__call__(), indent=2)

View File

@ -2,15 +2,13 @@ import errno
import json import json
import subprocess import subprocess
import six
from . import constants from . import constants
from . import errors from . import errors
from .utils import create_environment_dict from .utils import create_environment_dict
from .utils import find_executable from .utils import find_executable
class Store(object): class Store:
def __init__(self, program, environment=None): def __init__(self, program, environment=None):
""" Create a store object that acts as an interface to """ Create a store object that acts as an interface to
perform the basic operations for storing, retrieving perform the basic operations for storing, retrieving
@ -30,7 +28,7 @@ class Store(object):
""" Retrieve credentials for `server`. If no credentials are found, """ Retrieve credentials for `server`. If no credentials are found,
a `StoreError` will be raised. a `StoreError` will be raised.
""" """
if not isinstance(server, six.binary_type): if not isinstance(server, bytes):
server = server.encode('utf-8') server = server.encode('utf-8')
data = self._execute('get', server) data = self._execute('get', server)
result = json.loads(data.decode('utf-8')) result = json.loads(data.decode('utf-8'))
@ -41,7 +39,7 @@ class Store(object):
# raise CredentialsNotFound # raise CredentialsNotFound
if result['Username'] == '' and result['Secret'] == '': if result['Username'] == '' and result['Secret'] == '':
raise errors.CredentialsNotFound( raise errors.CredentialsNotFound(
'No matching credentials in {}'.format(self.program) f'No matching credentials in {self.program}'
) )
return result return result
@ -61,7 +59,7 @@ class Store(object):
""" Erase credentials for `server`. Raises a `StoreError` if an error """ Erase credentials for `server`. Raises a `StoreError` if an error
occurs. occurs.
""" """
if not isinstance(server, six.binary_type): if not isinstance(server, bytes):
server = server.encode('utf-8') server = server.encode('utf-8')
self._execute('erase', server) self._execute('erase', server)
@ -75,20 +73,9 @@ class Store(object):
output = None output = None
env = create_environment_dict(self.environment) env = create_environment_dict(self.environment)
try: try:
if six.PY3: output = subprocess.check_output(
output = subprocess.check_output( [self.exe, subcmd], input=data_input, env=env,
[self.exe, subcmd], input=data_input, env=env, )
)
else:
process = subprocess.Popen(
[self.exe, subcmd], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, env=env,
)
output, _ = process.communicate(data_input)
if process.returncode != 0:
raise subprocess.CalledProcessError(
returncode=process.returncode, cmd='', output=output
)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
raise errors.process_store_error(e, self.program) raise errors.process_store_error(e, self.program)
except OSError as e: except OSError as e:

View File

@ -38,25 +38,25 @@ class APIError(requests.exceptions.HTTPError, DockerException):
def __init__(self, message, response=None, explanation=None): def __init__(self, message, response=None, explanation=None):
# requests 1.2 supports response as a keyword argument, but # requests 1.2 supports response as a keyword argument, but
# requests 1.1 doesn't # requests 1.1 doesn't
super(APIError, self).__init__(message) super().__init__(message)
self.response = response self.response = response
self.explanation = explanation self.explanation = explanation
def __str__(self): def __str__(self):
message = super(APIError, self).__str__() message = super().__str__()
if self.is_client_error(): if self.is_client_error():
message = '{0} Client Error for {1}: {2}'.format( message = '{} Client Error for {}: {}'.format(
self.response.status_code, self.response.url, self.response.status_code, self.response.url,
self.response.reason) self.response.reason)
elif self.is_server_error(): elif self.is_server_error():
message = '{0} Server Error for {1}: {2}'.format( message = '{} Server Error for {}: {}'.format(
self.response.status_code, self.response.url, self.response.status_code, self.response.url,
self.response.reason) self.response.reason)
if self.explanation: if self.explanation:
message = '{0} ("{1}")'.format(message, self.explanation) message = f'{message} ("{self.explanation}")'
return message return message
@ -133,11 +133,11 @@ class ContainerError(DockerException):
self.image = image self.image = image
self.stderr = stderr self.stderr = stderr
err = ": {}".format(stderr) if stderr is not None else "" err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{}' in image '{}' returned non-zero exit " msg = ("Command '{}' in image '{}' returned non-zero exit "
"status {}{}").format(command, image, exit_status, err) "status {}{}").format(command, image, exit_status, err)
super(ContainerError, self).__init__(msg) super().__init__(msg)
class StreamParseError(RuntimeError): class StreamParseError(RuntimeError):
@ -147,7 +147,7 @@ class StreamParseError(RuntimeError):
class BuildError(DockerException): class BuildError(DockerException):
def __init__(self, reason, build_log): def __init__(self, reason, build_log):
super(BuildError, self).__init__(reason) super().__init__(reason)
self.msg = reason self.msg = reason
self.build_log = build_log self.build_log = build_log
@ -157,8 +157,8 @@ class ImageLoadError(DockerException):
def create_unexpected_kwargs_error(name, kwargs): def create_unexpected_kwargs_error(name, kwargs):
quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)] quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)]
text = ["{}() ".format(name)] text = [f"{name}() "]
if len(quoted_kwargs) == 1: if len(quoted_kwargs) == 1:
text.append("got an unexpected keyword argument ") text.append("got an unexpected keyword argument ")
else: else:
@ -172,7 +172,7 @@ class MissingContextParameter(DockerException):
self.param = param self.param = param
def __str__(self): def __str__(self):
return ("missing parameter: {}".format(self.param)) return (f"missing parameter: {self.param}")
class ContextAlreadyExists(DockerException): class ContextAlreadyExists(DockerException):
@ -180,7 +180,7 @@ class ContextAlreadyExists(DockerException):
self.name = name self.name = name
def __str__(self): def __str__(self):
return ("context {} already exists".format(self.name)) return (f"context {self.name} already exists")
class ContextException(DockerException): class ContextException(DockerException):
@ -196,4 +196,4 @@ class ContextNotFound(DockerException):
self.name = name self.name = name
def __str__(self): def __str__(self):
return ("context '{}' not found".format(self.name)) return (f"context '{self.name}' not found")

View File

@ -7,7 +7,7 @@ class Config(Model):
id_attribute = 'ID' id_attribute = 'ID'
def __repr__(self): def __repr__(self):
return "<%s: '%s'>" % (self.__class__.__name__, self.name) return f"<{self.__class__.__name__}: '{self.name}'>"
@property @property
def name(self): def name(self):

View File

@ -761,6 +761,14 @@ class ContainerCollection(Collection):
{'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'}, {'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'},
'/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}} '/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}}
Or a list of strings which each one of its elements specifies a mount volume.
For example:
.. code-block:: python
['/home/user1/:/mnt/vol2','/var/www:/mnt/vol1']
volumes_from (:py:class:`list`): List of container names or IDs to volumes_from (:py:class:`list`): List of container names or IDs to
get volumes from. get volumes from.
working_dir (str): Path to the working directory. working_dir (str): Path to the working directory.

View File

@ -2,8 +2,6 @@ import itertools
import re import re
import warnings import warnings
import six
from ..api import APIClient from ..api import APIClient
from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..constants import DEFAULT_DATA_CHUNK_SIZE
from ..errors import BuildError, ImageLoadError, InvalidArgument from ..errors import BuildError, ImageLoadError, InvalidArgument
@ -17,7 +15,7 @@ class Image(Model):
An image on the server. An image on the server.
""" """
def __repr__(self): def __repr__(self):
return "<%s: '%s'>" % (self.__class__.__name__, "', '".join(self.tags)) return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags))
@property @property
def labels(self): def labels(self):
@ -84,19 +82,19 @@ class Image(Model):
Example: Example:
>>> image = cli.get_image("busybox:latest") >>> image = cli.images.get("busybox:latest")
>>> f = open('/tmp/busybox-latest.tar', 'wb') >>> f = open('/tmp/busybox-latest.tar', 'wb')
>>> for chunk in image: >>> for chunk in image.save():
>>> f.write(chunk) >>> f.write(chunk)
>>> f.close() >>> f.close()
""" """
img = self.id img = self.id
if named: if named:
img = self.tags[0] if self.tags else img img = self.tags[0] if self.tags else img
if isinstance(named, six.string_types): if isinstance(named, str):
if named not in self.tags: if named not in self.tags:
raise InvalidArgument( raise InvalidArgument(
"{} is not a valid tag for this image".format(named) f"{named} is not a valid tag for this image"
) )
img = named img = named
@ -127,7 +125,7 @@ class RegistryData(Model):
Image metadata stored on the registry, including available platforms. Image metadata stored on the registry, including available platforms.
""" """
def __init__(self, image_name, *args, **kwargs): def __init__(self, image_name, *args, **kwargs):
super(RegistryData, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.image_name = image_name self.image_name = image_name
@property @property
@ -180,7 +178,7 @@ class RegistryData(Model):
parts = platform.split('/') parts = platform.split('/')
if len(parts) > 3 or len(parts) < 1: if len(parts) > 3 or len(parts) < 1:
raise InvalidArgument( raise InvalidArgument(
'"{0}" is not a valid platform descriptor'.format(platform) f'"{platform}" is not a valid platform descriptor'
) )
platform = {'os': parts[0]} platform = {'os': parts[0]}
if len(parts) > 2: if len(parts) > 2:
@ -277,7 +275,7 @@ class ImageCollection(Collection):
If neither ``path`` nor ``fileobj`` is specified. If neither ``path`` nor ``fileobj`` is specified.
""" """
resp = self.client.api.build(**kwargs) resp = self.client.api.build(**kwargs)
if isinstance(resp, six.string_types): if isinstance(resp, str):
return self.get(resp) return self.get(resp)
last_event = None last_event = None
image_id = None image_id = None

View File

@ -7,7 +7,7 @@ class Plugin(Model):
A plugin on the server. A plugin on the server.
""" """
def __repr__(self): def __repr__(self):
return "<%s: '%s'>" % (self.__class__.__name__, self.name) return f"<{self.__class__.__name__}: '{self.name}'>"
@property @property
def name(self): def name(self):
@ -117,8 +117,7 @@ class Plugin(Model):
if remote is None: if remote is None:
remote = self.name remote = self.name
privileges = self.client.api.plugin_privileges(remote) privileges = self.client.api.plugin_privileges(remote)
for d in self.client.api.upgrade_plugin(self.name, remote, privileges): yield from self.client.api.upgrade_plugin(self.name, remote, privileges)
yield d
self.reload() self.reload()

View File

@ -1,5 +1,4 @@
class Model:
class Model(object):
""" """
A base class for representing a single object on the server. A base class for representing a single object on the server.
""" """
@ -18,13 +17,13 @@ class Model(object):
self.attrs = {} self.attrs = {}
def __repr__(self): def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.short_id) return f"<{self.__class__.__name__}: {self.short_id}>"
def __eq__(self, other): def __eq__(self, other):
return isinstance(other, self.__class__) and self.id == other.id return isinstance(other, self.__class__) and self.id == other.id
def __hash__(self): def __hash__(self):
return hash("%s:%s" % (self.__class__.__name__, self.id)) return hash(f"{self.__class__.__name__}:{self.id}")
@property @property
def id(self): def id(self):
@ -49,7 +48,7 @@ class Model(object):
self.attrs = new_model.attrs self.attrs = new_model.attrs
class Collection(object): class Collection:
""" """
A base class for representing all objects of a particular type on the A base class for representing all objects of a particular type on the
server. server.

View File

@ -7,7 +7,7 @@ class Secret(Model):
id_attribute = 'ID' id_attribute = 'ID'
def __repr__(self): def __repr__(self):
return "<%s: '%s'>" % (self.__class__.__name__, self.name) return f"<{self.__class__.__name__}: '{self.name}'>"
@property @property
def name(self): def name(self):

View File

@ -11,7 +11,7 @@ class Swarm(Model):
id_attribute = 'ID' id_attribute = 'ID'
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(Swarm, self).__init__(*args, **kwargs) super().__init__(*args, **kwargs)
if self.client: if self.client:
try: try:
self.reload() self.reload()

View File

@ -5,7 +5,7 @@ from . import errors
from .transport import SSLHTTPAdapter from .transport import SSLHTTPAdapter
class TLSConfig(object): class TLSConfig:
""" """
TLS configuration. TLS configuration.

View File

@ -3,6 +3,6 @@ import requests.adapters
class BaseHTTPAdapter(requests.adapters.HTTPAdapter): class BaseHTTPAdapter(requests.adapters.HTTPAdapter):
def close(self): def close(self):
super(BaseHTTPAdapter, self).close() super().close()
if hasattr(self, 'pools'): if hasattr(self, 'pools'):
self.pools.clear() self.pools.clear()

View File

@ -1,14 +1,11 @@
import six import queue
import requests.adapters import requests.adapters
from docker.transport.basehttpadapter import BaseHTTPAdapter from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants from .. import constants
from .npipesocket import NpipeSocket from .npipesocket import NpipeSocket
if six.PY3: import http.client as httplib
import http.client as httplib
else:
import httplib
try: try:
import requests.packages.urllib3 as urllib3 import requests.packages.urllib3 as urllib3
@ -18,9 +15,9 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class NpipeHTTPConnection(httplib.HTTPConnection, object): class NpipeHTTPConnection(httplib.HTTPConnection):
def __init__(self, npipe_path, timeout=60): def __init__(self, npipe_path, timeout=60):
super(NpipeHTTPConnection, self).__init__( super().__init__(
'localhost', timeout=timeout 'localhost', timeout=timeout
) )
self.npipe_path = npipe_path self.npipe_path = npipe_path
@ -35,7 +32,7 @@ class NpipeHTTPConnection(httplib.HTTPConnection, object):
class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, npipe_path, timeout=60, maxsize=10): def __init__(self, npipe_path, timeout=60, maxsize=10):
super(NpipeHTTPConnectionPool, self).__init__( super().__init__(
'localhost', timeout=timeout, maxsize=maxsize 'localhost', timeout=timeout, maxsize=maxsize
) )
self.npipe_path = npipe_path self.npipe_path = npipe_path
@ -57,7 +54,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
except six.moves.queue.Empty: except queue.Empty:
if self.block: if self.block:
raise urllib3.exceptions.EmptyPoolError( raise urllib3.exceptions.EmptyPoolError(
self, self,
@ -85,7 +82,7 @@ class NpipeHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer( self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close() pool_connections, dispose_func=lambda p: p.close()
) )
super(NpipeHTTPAdapter, self).__init__() super().__init__()
def get_connection(self, url, proxies=None): def get_connection(self, url, proxies=None):
with self.pools.lock: with self.pools.lock:

View File

@ -2,7 +2,6 @@ import functools
import time import time
import io import io
import six
import win32file import win32file
import win32pipe import win32pipe
@ -24,7 +23,7 @@ def check_closed(f):
return wrapped return wrapped
class NpipeSocket(object): class NpipeSocket:
""" Partial implementation of the socket API over windows named pipes. """ Partial implementation of the socket API over windows named pipes.
This implementation is only designed to be used as a client socket, This implementation is only designed to be used as a client socket,
and server-specific methods (bind, listen, accept...) are not and server-specific methods (bind, listen, accept...) are not
@ -128,9 +127,6 @@ class NpipeSocket(object):
@check_closed @check_closed
def recv_into(self, buf, nbytes=0): def recv_into(self, buf, nbytes=0):
if six.PY2:
return self._recv_into_py2(buf, nbytes)
readbuf = buf readbuf = buf
if not isinstance(buf, memoryview): if not isinstance(buf, memoryview):
readbuf = memoryview(buf) readbuf = memoryview(buf)
@ -195,7 +191,7 @@ class NpipeFileIOBase(io.RawIOBase):
self.sock = npipe_socket self.sock = npipe_socket
def close(self): def close(self):
super(NpipeFileIOBase, self).close() super().close()
self.sock = None self.sock = None
def fileno(self): def fileno(self):

View File

@ -1,6 +1,7 @@
import paramiko import paramiko
import queue
import urllib.parse
import requests.adapters import requests.adapters
import six
import logging import logging
import os import os
import signal import signal
@ -10,10 +11,7 @@ import subprocess
from docker.transport.basehttpadapter import BaseHTTPAdapter from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants from .. import constants
if six.PY3: import http.client as httplib
import http.client as httplib
else:
import httplib
try: try:
import requests.packages.urllib3 as urllib3 import requests.packages.urllib3 as urllib3
@ -25,7 +23,7 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class SSHSocket(socket.socket): class SSHSocket(socket.socket):
def __init__(self, host): def __init__(self, host):
super(SSHSocket, self).__init__( super().__init__(
socket.AF_INET, socket.SOCK_STREAM) socket.AF_INET, socket.SOCK_STREAM)
self.host = host self.host = host
self.port = None self.port = None
@ -90,8 +88,7 @@ class SSHSocket(socket.socket):
def makefile(self, mode): def makefile(self, mode):
if not self.proc: if not self.proc:
self.connect() self.connect()
if six.PY3: self.proc.stdout.channel = self
self.proc.stdout.channel = self
return self.proc.stdout return self.proc.stdout
@ -103,9 +100,9 @@ class SSHSocket(socket.socket):
self.proc.terminate() self.proc.terminate()
class SSHConnection(httplib.HTTPConnection, object): class SSHConnection(httplib.HTTPConnection):
def __init__(self, ssh_transport=None, timeout=60, host=None): def __init__(self, ssh_transport=None, timeout=60, host=None):
super(SSHConnection, self).__init__( super().__init__(
'localhost', timeout=timeout 'localhost', timeout=timeout
) )
self.ssh_transport = ssh_transport self.ssh_transport = ssh_transport
@ -129,7 +126,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
scheme = 'ssh' scheme = 'ssh'
def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
super(SSHConnectionPool, self).__init__( super().__init__(
'localhost', timeout=timeout, maxsize=maxsize 'localhost', timeout=timeout, maxsize=maxsize
) )
self.ssh_transport = None self.ssh_transport = None
@ -152,7 +149,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
except AttributeError: # self.pool is None except AttributeError: # self.pool is None
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
except six.moves.queue.Empty: except queue.Empty:
if self.block: if self.block:
raise urllib3.exceptions.EmptyPoolError( raise urllib3.exceptions.EmptyPoolError(
self, self,
@ -188,12 +185,12 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer( self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close() pool_connections, dispose_func=lambda p: p.close()
) )
super(SSHHTTPAdapter, self).__init__() super().__init__()
def _create_paramiko_client(self, base_url): def _create_paramiko_client(self, base_url):
logging.getLogger("paramiko").setLevel(logging.WARNING) logging.getLogger("paramiko").setLevel(logging.WARNING)
self.ssh_client = paramiko.SSHClient() self.ssh_client = paramiko.SSHClient()
base_url = six.moves.urllib_parse.urlparse(base_url) base_url = urllib.parse.urlparse(base_url)
self.ssh_params = { self.ssh_params = {
"hostname": base_url.hostname, "hostname": base_url.hostname,
"port": base_url.port, "port": base_url.port,
@ -205,7 +202,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
with open(ssh_config_file) as f: with open(ssh_config_file) as f:
conf.parse(f) conf.parse(f)
host_config = conf.lookup(base_url.hostname) host_config = conf.lookup(base_url.hostname)
self.ssh_conf = host_config
if 'proxycommand' in host_config: if 'proxycommand' in host_config:
self.ssh_params["sock"] = paramiko.ProxyCommand( self.ssh_params["sock"] = paramiko.ProxyCommand(
self.ssh_conf['proxycommand'] self.ssh_conf['proxycommand']
@ -213,9 +209,11 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
if 'hostname' in host_config: if 'hostname' in host_config:
self.ssh_params['hostname'] = host_config['hostname'] self.ssh_params['hostname'] = host_config['hostname']
if base_url.port is None and 'port' in host_config: if base_url.port is None and 'port' in host_config:
self.ssh_params['port'] = self.ssh_conf['port'] self.ssh_params['port'] = host_config['port']
if base_url.username is None and 'user' in host_config: if base_url.username is None and 'user' in host_config:
self.ssh_params['username'] = self.ssh_conf['user'] self.ssh_params['username'] = host_config['user']
if 'identityfile' in host_config:
self.ssh_params['key_filename'] = host_config['identityfile']
self.ssh_client.load_system_host_keys() self.ssh_client.load_system_host_keys()
self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
@ -252,6 +250,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter):
return pool return pool
def close(self): def close(self):
super(SSHHTTPAdapter, self).close() super().close()
if self.ssh_client: if self.ssh_client:
self.ssh_client.close() self.ssh_client.close()

View File

@ -36,7 +36,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
self.ssl_version = ssl_version self.ssl_version = ssl_version
self.assert_hostname = assert_hostname self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint self.assert_fingerprint = assert_fingerprint
super(SSLHTTPAdapter, self).__init__(**kwargs) super().__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False): def init_poolmanager(self, connections, maxsize, block=False):
kwargs = { kwargs = {
@ -59,7 +59,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter):
But we still need to take care of when there is a proxy poolmanager But we still need to take care of when there is a proxy poolmanager
""" """
conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs) conn = super().get_connection(*args, **kwargs)
if conn.assert_hostname != self.assert_hostname: if conn.assert_hostname != self.assert_hostname:
conn.assert_hostname = self.assert_hostname conn.assert_hostname = self.assert_hostname
return conn return conn

View File

@ -1,7 +1,6 @@
import six
import requests.adapters import requests.adapters
import socket import socket
from six.moves import http_client as httplib import http.client as httplib
from docker.transport.basehttpadapter import BaseHTTPAdapter from docker.transport.basehttpadapter import BaseHTTPAdapter
from .. import constants from .. import constants
@ -15,27 +14,15 @@ except ImportError:
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
class UnixHTTPResponse(httplib.HTTPResponse, object): class UnixHTTPConnection(httplib.HTTPConnection):
def __init__(self, sock, *args, **kwargs):
disable_buffering = kwargs.pop('disable_buffering', False)
if six.PY2:
# FIXME: We may need to disable buffering on Py3 as well,
# but there's no clear way to do it at the moment. See:
# https://github.com/docker/docker-py/issues/1799
kwargs['buffering'] = not disable_buffering
super(UnixHTTPResponse, self).__init__(sock, *args, **kwargs)
class UnixHTTPConnection(httplib.HTTPConnection, object):
def __init__(self, base_url, unix_socket, timeout=60): def __init__(self, base_url, unix_socket, timeout=60):
super(UnixHTTPConnection, self).__init__( super().__init__(
'localhost', timeout=timeout 'localhost', timeout=timeout
) )
self.base_url = base_url self.base_url = base_url
self.unix_socket = unix_socket self.unix_socket = unix_socket
self.timeout = timeout self.timeout = timeout
self.disable_buffering = False
def connect(self): def connect(self):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@ -44,20 +31,15 @@ class UnixHTTPConnection(httplib.HTTPConnection, object):
self.sock = sock self.sock = sock
def putheader(self, header, *values): def putheader(self, header, *values):
super(UnixHTTPConnection, self).putheader(header, *values) super().putheader(header, *values)
if header == 'Connection' and 'Upgrade' in values:
self.disable_buffering = True
def response_class(self, sock, *args, **kwargs): def response_class(self, sock, *args, **kwargs):
if self.disable_buffering: return httplib.HTTPResponse(sock, *args, **kwargs)
kwargs['disable_buffering'] = True
return UnixHTTPResponse(sock, *args, **kwargs)
class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
def __init__(self, base_url, socket_path, timeout=60, maxsize=10): def __init__(self, base_url, socket_path, timeout=60, maxsize=10):
super(UnixHTTPConnectionPool, self).__init__( super().__init__(
'localhost', timeout=timeout, maxsize=maxsize 'localhost', timeout=timeout, maxsize=maxsize
) )
self.base_url = base_url self.base_url = base_url
@ -89,7 +71,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
self.pools = RecentlyUsedContainer( self.pools = RecentlyUsedContainer(
pool_connections, dispose_func=lambda p: p.close() pool_connections, dispose_func=lambda p: p.close()
) )
super(UnixHTTPAdapter, self).__init__() super().__init__()
def get_connection(self, url, proxies=None): def get_connection(self, url, proxies=None):
with self.pools.lock: with self.pools.lock:

View File

@ -1,7 +1,4 @@
import six
class DictType(dict): class DictType(dict):
def __init__(self, init): def __init__(self, init):
for k, v in six.iteritems(init): for k, v in init.items():
self[k] = v self[k] = v

View File

@ -1,5 +1,3 @@
import six
from .. import errors from .. import errors
from ..utils.utils import ( from ..utils.utils import (
convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds, convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds,
@ -10,7 +8,7 @@ from .base import DictType
from .healthcheck import Healthcheck from .healthcheck import Healthcheck
class LogConfigTypesEnum(object): class LogConfigTypesEnum:
_values = ( _values = (
'json-file', 'json-file',
'syslog', 'syslog',
@ -61,7 +59,7 @@ class LogConfig(DictType):
if config and not isinstance(config, dict): if config and not isinstance(config, dict):
raise ValueError("LogConfig.config must be a dictionary") raise ValueError("LogConfig.config must be a dictionary")
super(LogConfig, self).__init__({ super().__init__({
'Type': log_driver_type, 'Type': log_driver_type,
'Config': config 'Config': config
}) })
@ -117,13 +115,13 @@ class Ulimit(DictType):
name = kwargs.get('name', kwargs.get('Name')) name = kwargs.get('name', kwargs.get('Name'))
soft = kwargs.get('soft', kwargs.get('Soft')) soft = kwargs.get('soft', kwargs.get('Soft'))
hard = kwargs.get('hard', kwargs.get('Hard')) hard = kwargs.get('hard', kwargs.get('Hard'))
if not isinstance(name, six.string_types): if not isinstance(name, str):
raise ValueError("Ulimit.name must be a string") raise ValueError("Ulimit.name must be a string")
if soft and not isinstance(soft, int): if soft and not isinstance(soft, int):
raise ValueError("Ulimit.soft must be an integer") raise ValueError("Ulimit.soft must be an integer")
if hard and not isinstance(hard, int): if hard and not isinstance(hard, int):
raise ValueError("Ulimit.hard must be an integer") raise ValueError("Ulimit.hard must be an integer")
super(Ulimit, self).__init__({ super().__init__({
'Name': name, 'Name': name,
'Soft': soft, 'Soft': soft,
'Hard': hard 'Hard': hard
@ -184,7 +182,7 @@ class DeviceRequest(DictType):
if driver is None: if driver is None:
driver = '' driver = ''
elif not isinstance(driver, six.string_types): elif not isinstance(driver, str):
raise ValueError('DeviceRequest.driver must be a string') raise ValueError('DeviceRequest.driver must be a string')
if count is None: if count is None:
count = 0 count = 0
@ -203,7 +201,7 @@ class DeviceRequest(DictType):
elif not isinstance(options, dict): elif not isinstance(options, dict):
raise ValueError('DeviceRequest.options must be a dict') raise ValueError('DeviceRequest.options must be a dict')
super(DeviceRequest, self).__init__({ super().__init__({
'Driver': driver, 'Driver': driver,
'Count': count, 'Count': count,
'DeviceIDs': device_ids, 'DeviceIDs': device_ids,
@ -297,7 +295,7 @@ class HostConfig(dict):
self['MemorySwappiness'] = mem_swappiness self['MemorySwappiness'] = mem_swappiness
if shm_size is not None: if shm_size is not None:
if isinstance(shm_size, six.string_types): if isinstance(shm_size, str):
shm_size = parse_bytes(shm_size) shm_size = parse_bytes(shm_size)
self['ShmSize'] = shm_size self['ShmSize'] = shm_size
@ -358,7 +356,7 @@ class HostConfig(dict):
self['Devices'] = parse_devices(devices) self['Devices'] = parse_devices(devices)
if group_add: if group_add:
self['GroupAdd'] = [six.text_type(grp) for grp in group_add] self['GroupAdd'] = [str(grp) for grp in group_add]
if dns is not None: if dns is not None:
self['Dns'] = dns self['Dns'] = dns
@ -378,11 +376,11 @@ class HostConfig(dict):
if not isinstance(sysctls, dict): if not isinstance(sysctls, dict):
raise host_config_type_error('sysctls', sysctls, 'dict') raise host_config_type_error('sysctls', sysctls, 'dict')
self['Sysctls'] = {} self['Sysctls'] = {}
for k, v in six.iteritems(sysctls): for k, v in sysctls.items():
self['Sysctls'][k] = six.text_type(v) self['Sysctls'][k] = str(v)
if volumes_from is not None: if volumes_from is not None:
if isinstance(volumes_from, six.string_types): if isinstance(volumes_from, str):
volumes_from = volumes_from.split(',') volumes_from = volumes_from.split(',')
self['VolumesFrom'] = volumes_from self['VolumesFrom'] = volumes_from
@ -404,7 +402,7 @@ class HostConfig(dict):
if isinstance(lxc_conf, dict): if isinstance(lxc_conf, dict):
formatted = [] formatted = []
for k, v in six.iteritems(lxc_conf): for k, v in lxc_conf.items():
formatted.append({'Key': k, 'Value': str(v)}) formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted lxc_conf = formatted
@ -559,7 +557,7 @@ class HostConfig(dict):
self["PidsLimit"] = pids_limit self["PidsLimit"] = pids_limit
if isolation: if isolation:
if not isinstance(isolation, six.string_types): if not isinstance(isolation, str):
raise host_config_type_error('isolation', isolation, 'string') raise host_config_type_error('isolation', isolation, 'string')
if version_lt(version, '1.24'): if version_lt(version, '1.24'):
raise host_config_version_error('isolation', '1.24') raise host_config_version_error('isolation', '1.24')
@ -609,7 +607,7 @@ class HostConfig(dict):
self['CpuPercent'] = cpu_percent self['CpuPercent'] = cpu_percent
if nano_cpus: if nano_cpus:
if not isinstance(nano_cpus, six.integer_types): if not isinstance(nano_cpus, int):
raise host_config_type_error('nano_cpus', nano_cpus, 'int') raise host_config_type_error('nano_cpus', nano_cpus, 'int')
if version_lt(version, '1.25'): if version_lt(version, '1.25'):
raise host_config_version_error('nano_cpus', '1.25') raise host_config_version_error('nano_cpus', '1.25')
@ -699,17 +697,17 @@ class ContainerConfig(dict):
'version 1.29' 'version 1.29'
) )
if isinstance(command, six.string_types): if isinstance(command, str):
command = split_command(command) command = split_command(command)
if isinstance(entrypoint, six.string_types): if isinstance(entrypoint, str):
entrypoint = split_command(entrypoint) entrypoint = split_command(entrypoint)
if isinstance(environment, dict): if isinstance(environment, dict):
environment = format_environment(environment) environment = format_environment(environment)
if isinstance(labels, list): if isinstance(labels, list):
labels = dict((lbl, six.text_type('')) for lbl in labels) labels = {lbl: '' for lbl in labels}
if isinstance(ports, list): if isinstance(ports, list):
exposed_ports = {} exposed_ports = {}
@ -720,10 +718,10 @@ class ContainerConfig(dict):
if len(port_definition) == 2: if len(port_definition) == 2:
proto = port_definition[1] proto = port_definition[1]
port = port_definition[0] port = port_definition[0]
exposed_ports['{0}/{1}'.format(port, proto)] = {} exposed_ports[f'{port}/{proto}'] = {}
ports = exposed_ports ports = exposed_ports
if isinstance(volumes, six.string_types): if isinstance(volumes, str):
volumes = [volumes, ] volumes = [volumes, ]
if isinstance(volumes, list): if isinstance(volumes, list):
@ -752,7 +750,7 @@ class ContainerConfig(dict):
'Hostname': hostname, 'Hostname': hostname,
'Domainname': domainname, 'Domainname': domainname,
'ExposedPorts': ports, 'ExposedPorts': ports,
'User': six.text_type(user) if user is not None else None, 'User': str(user) if user is not None else None,
'Tty': tty, 'Tty': tty,
'OpenStdin': stdin_open, 'OpenStdin': stdin_open,
'StdinOnce': stdin_once, 'StdinOnce': stdin_once,

View File

@ -8,7 +8,7 @@ except ImportError:
from ..errors import DockerException from ..errors import DockerException
class CancellableStream(object): class CancellableStream:
""" """
Stream wrapper for real-time events, logs, etc. from the server. Stream wrapper for real-time events, logs, etc. from the server.
@ -32,7 +32,7 @@ class CancellableStream(object):
return next(self._stream) return next(self._stream)
except urllib3.exceptions.ProtocolError: except urllib3.exceptions.ProtocolError:
raise StopIteration raise StopIteration
except socket.error: except OSError:
raise StopIteration raise StopIteration
next = __next__ next = __next__

View File

@ -1,7 +1,5 @@
from .base import DictType from .base import DictType
import six
class Healthcheck(DictType): class Healthcheck(DictType):
""" """
@ -31,7 +29,7 @@ class Healthcheck(DictType):
""" """
def __init__(self, **kwargs): def __init__(self, **kwargs):
test = kwargs.get('test', kwargs.get('Test')) test = kwargs.get('test', kwargs.get('Test'))
if isinstance(test, six.string_types): if isinstance(test, str):
test = ["CMD-SHELL", test] test = ["CMD-SHELL", test]
interval = kwargs.get('interval', kwargs.get('Interval')) interval = kwargs.get('interval', kwargs.get('Interval'))
@ -39,7 +37,7 @@ class Healthcheck(DictType):
retries = kwargs.get('retries', kwargs.get('Retries')) retries = kwargs.get('retries', kwargs.get('Retries'))
start_period = kwargs.get('start_period', kwargs.get('StartPeriod')) start_period = kwargs.get('start_period', kwargs.get('StartPeriod'))
super(Healthcheck, self).__init__({ super().__init__({
'Test': test, 'Test': test,
'Interval': interval, 'Interval': interval,
'Timeout': timeout, 'Timeout': timeout,
@ -53,7 +51,7 @@ class Healthcheck(DictType):
@test.setter @test.setter
def test(self, value): def test(self, value):
if isinstance(value, six.string_types): if isinstance(value, str):
value = ["CMD-SHELL", value] value = ["CMD-SHELL", value]
self['Test'] = value self['Test'] = value

View File

@ -1,5 +1,3 @@
import six
from .. import errors from .. import errors
from ..constants import IS_WINDOWS_PLATFORM from ..constants import IS_WINDOWS_PLATFORM
from ..utils import ( from ..utils import (
@ -121,7 +119,7 @@ class ContainerSpec(dict):
privileges=None, isolation=None, init=None): privileges=None, isolation=None, init=None):
self['Image'] = image self['Image'] = image
if isinstance(command, six.string_types): if isinstance(command, str):
command = split_command(command) command = split_command(command)
self['Command'] = command self['Command'] = command
self['Args'] = args self['Args'] = args
@ -151,7 +149,7 @@ class ContainerSpec(dict):
if mounts is not None: if mounts is not None:
parsed_mounts = [] parsed_mounts = []
for mount in mounts: for mount in mounts:
if isinstance(mount, six.string_types): if isinstance(mount, str):
parsed_mounts.append(Mount.parse_mount_string(mount)) parsed_mounts.append(Mount.parse_mount_string(mount))
else: else:
# If mount already parsed # If mount already parsed
@ -224,7 +222,7 @@ class Mount(dict):
self['Source'] = source self['Source'] = source
if type not in ('bind', 'volume', 'tmpfs', 'npipe'): if type not in ('bind', 'volume', 'tmpfs', 'npipe'):
raise errors.InvalidArgument( raise errors.InvalidArgument(
'Unsupported mount type: "{}"'.format(type) f'Unsupported mount type: "{type}"'
) )
self['Type'] = type self['Type'] = type
self['ReadOnly'] = read_only self['ReadOnly'] = read_only
@ -260,7 +258,7 @@ class Mount(dict):
elif type == 'tmpfs': elif type == 'tmpfs':
tmpfs_opts = {} tmpfs_opts = {}
if tmpfs_mode: if tmpfs_mode:
if not isinstance(tmpfs_mode, six.integer_types): if not isinstance(tmpfs_mode, int):
raise errors.InvalidArgument( raise errors.InvalidArgument(
'tmpfs_mode must be an integer' 'tmpfs_mode must be an integer'
) )
@ -280,7 +278,7 @@ class Mount(dict):
parts = string.split(':') parts = string.split(':')
if len(parts) > 3: if len(parts) > 3:
raise errors.InvalidArgument( raise errors.InvalidArgument(
'Invalid mount format "{0}"'.format(string) f'Invalid mount format "{string}"'
) )
if len(parts) == 1: if len(parts) == 1:
return cls(target=parts[0], source=None) return cls(target=parts[0], source=None)
@ -347,7 +345,7 @@ def _convert_generic_resources_dict(generic_resources):
' (found {})'.format(type(generic_resources)) ' (found {})'.format(type(generic_resources))
) )
resources = [] resources = []
for kind, value in six.iteritems(generic_resources): for kind, value in generic_resources.items():
resource_type = None resource_type = None
if isinstance(value, int): if isinstance(value, int):
resource_type = 'DiscreteResourceSpec' resource_type = 'DiscreteResourceSpec'
@ -443,7 +441,7 @@ class RollbackConfig(UpdateConfig):
pass pass
class RestartConditionTypesEnum(object): class RestartConditionTypesEnum:
_values = ( _values = (
'none', 'none',
'on-failure', 'on-failure',
@ -474,7 +472,7 @@ class RestartPolicy(dict):
max_attempts=0, window=0): max_attempts=0, window=0):
if condition not in self.condition_types._values: if condition not in self.condition_types._values:
raise TypeError( raise TypeError(
'Invalid RestartPolicy condition {0}'.format(condition) f'Invalid RestartPolicy condition {condition}'
) )
self['Condition'] = condition self['Condition'] = condition
@ -533,7 +531,7 @@ def convert_service_ports(ports):
) )
result = [] result = []
for k, v in six.iteritems(ports): for k, v in ports.items():
port_spec = { port_spec = {
'Protocol': 'tcp', 'Protocol': 'tcp',
'PublishedPort': k 'PublishedPort': k

View File

@ -4,8 +4,6 @@ import re
import tarfile import tarfile
import tempfile import tempfile
import six
from .fnmatch import fnmatch from .fnmatch import fnmatch
from ..constants import IS_WINDOWS_PLATFORM from ..constants import IS_WINDOWS_PLATFORM
@ -69,7 +67,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
if files is None: if files is None:
files = build_file_list(root) files = build_file_list(root)
extra_names = set(e[0] for e in extra_files) extra_names = {e[0] for e in extra_files}
for path in files: for path in files:
if path in extra_names: if path in extra_names:
# Extra files override context files with the same name # Extra files override context files with the same name
@ -95,9 +93,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
try: try:
with open(full_path, 'rb') as f: with open(full_path, 'rb') as f:
t.addfile(i, f) t.addfile(i, f)
except IOError: except OSError:
raise IOError( raise OSError(
'Can not read file in context: {}'.format(full_path) f'Can not read file in context: {full_path}'
) )
else: else:
# Directories, FIFOs, symlinks... don't need to be read. # Directories, FIFOs, symlinks... don't need to be read.
@ -119,12 +117,8 @@ def mkbuildcontext(dockerfile):
t = tarfile.open(mode='w', fileobj=f) t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO): if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile') dfinfo = tarfile.TarInfo('Dockerfile')
if six.PY3: raise TypeError('Please use io.BytesIO to create in-memory '
raise TypeError('Please use io.BytesIO to create in-memory ' 'Dockerfiles with Python 3')
'Dockerfiles with Python 3')
else:
dfinfo.size = len(dockerfile.getvalue())
dockerfile.seek(0)
elif isinstance(dockerfile, io.BytesIO): elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile') dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue()) dfinfo.size = len(dockerfile.getvalue())
@ -154,7 +148,7 @@ def walk(root, patterns, default=True):
# Heavily based on # Heavily based on
# https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go # https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go
class PatternMatcher(object): class PatternMatcher:
def __init__(self, patterns): def __init__(self, patterns):
self.patterns = list(filter( self.patterns = list(filter(
lambda p: p.dirs, [Pattern(p) for p in patterns] lambda p: p.dirs, [Pattern(p) for p in patterns]
@ -212,13 +206,12 @@ class PatternMatcher(object):
break break
if skip: if skip:
continue continue
for sub in rec_walk(cur): yield from rec_walk(cur)
yield sub
return rec_walk(root) return rec_walk(root)
class Pattern(object): class Pattern:
def __init__(self, pattern_str): def __init__(self, pattern_str):
self.exclusion = False self.exclusion = False
if pattern_str.startswith('!'): if pattern_str.startswith('!'):

View File

@ -18,11 +18,11 @@ def find_config_file(config_path=None):
os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4 os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4
])) ]))
log.debug("Trying paths: {0}".format(repr(paths))) log.debug(f"Trying paths: {repr(paths)}")
for path in paths: for path in paths:
if os.path.exists(path): if os.path.exists(path):
log.debug("Found file at path: {0}".format(path)) log.debug(f"Found file at path: {path}")
return path return path
log.debug("No config file found") log.debug("No config file found")
@ -57,7 +57,7 @@ def load_general_config(config_path=None):
try: try:
with open(config_file) as f: with open(config_file) as f:
return json.load(f) return json.load(f)
except (IOError, ValueError) as e: except (OSError, ValueError) as e:
# In the case of a legacy `.dockercfg` file, we won't # In the case of a legacy `.dockercfg` file, we won't
# be able to load any JSON data. # be able to load any JSON data.
log.debug(e) log.debug(e)

View File

@ -27,7 +27,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version): if utils.version_lt(self._version, version):
raise errors.InvalidVersion( raise errors.InvalidVersion(
'{0} is not available for version < {1}'.format( '{} is not available for version < {}'.format(
f.__name__, version f.__name__, version
) )
) )

View File

@ -108,7 +108,7 @@ def translate(pat):
stuff = '^' + stuff[1:] stuff = '^' + stuff[1:]
elif stuff[0] == '^': elif stuff[0] == '^':
stuff = '\\' + stuff stuff = '\\' + stuff
res = '%s[%s]' % (res, stuff) res = f'{res}[{stuff}]'
else: else:
res = res + re.escape(c) res = res + re.escape(c)

View File

@ -1,11 +1,6 @@
from __future__ import absolute_import
from __future__ import unicode_literals
import json import json
import json.decoder import json.decoder
import six
from ..errors import StreamParseError from ..errors import StreamParseError
@ -20,7 +15,7 @@ def stream_as_text(stream):
instead of byte streams. instead of byte streams.
""" """
for data in stream: for data in stream:
if not isinstance(data, six.text_type): if not isinstance(data, str):
data = data.decode('utf-8', 'replace') data = data.decode('utf-8', 'replace')
yield data yield data
@ -46,8 +41,8 @@ def json_stream(stream):
return split_buffer(stream, json_splitter, json_decoder.decode) return split_buffer(stream, json_splitter, json_decoder.decode)
def line_splitter(buffer, separator=u'\n'): def line_splitter(buffer, separator='\n'):
index = buffer.find(six.text_type(separator)) index = buffer.find(str(separator))
if index == -1: if index == -1:
return None return None
return buffer[:index + 1], buffer[index + 1:] return buffer[:index + 1], buffer[index + 1:]
@ -61,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
of the input. of the input.
""" """
splitter = splitter or line_splitter splitter = splitter or line_splitter
buffered = six.text_type('') buffered = ''
for data in stream_as_text(stream): for data in stream_as_text(stream):
buffered += data buffered += data

View File

@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
if not end: if not end:
return [start + proto] return [start + proto]
if randomly_available_port: if randomly_available_port:
return ['{}-{}'.format(start, end) + proto] return [f'{start}-{end}' + proto]
return [str(port) + proto for port in range(int(start), int(end) + 1)] return [str(port) + proto for port in range(int(start), int(end) + 1)]

View File

@ -4,8 +4,6 @@ import select
import socket as pysocket import socket as pysocket
import struct import struct
import six
try: try:
from ..transport import NpipeSocket from ..transport import NpipeSocket
except ImportError: except ImportError:
@ -27,16 +25,16 @@ def read(socket, n=4096):
recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK)
if six.PY3 and not isinstance(socket, NpipeSocket): if not isinstance(socket, NpipeSocket):
select.select([socket], [], []) select.select([socket], [], [])
try: try:
if hasattr(socket, 'recv'): if hasattr(socket, 'recv'):
return socket.recv(n) return socket.recv(n)
if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')): if isinstance(socket, getattr(pysocket, 'SocketIO')):
return socket.read(n) return socket.read(n)
return os.read(socket.fileno(), n) return os.read(socket.fileno(), n)
except EnvironmentError as e: except OSError as e:
if e.errno not in recoverable_errors: if e.errno not in recoverable_errors:
raise raise
@ -46,7 +44,7 @@ def read_exactly(socket, n):
Reads exactly n bytes from socket Reads exactly n bytes from socket
Raises SocketError if there isn't enough data Raises SocketError if there isn't enough data
""" """
data = six.binary_type() data = bytes()
while len(data) < n: while len(data) < n:
next_data = read(socket, n - len(data)) next_data = read(socket, n - len(data))
if not next_data: if not next_data:
@ -134,7 +132,7 @@ def consume_socket_output(frames, demux=False):
if demux is False: if demux is False:
# If the streams are multiplexed, the generator returns strings, that # If the streams are multiplexed, the generator returns strings, that
# we just need to concatenate. # we just need to concatenate.
return six.binary_type().join(frames) return bytes().join(frames)
# If the streams are demultiplexed, the generator yields tuples # If the streams are demultiplexed, the generator yields tuples
# (stdout, stderr) # (stdout, stderr)
@ -166,4 +164,4 @@ def demux_adaptor(stream_id, data):
elif stream_id == STDERR: elif stream_id == STDERR:
return (None, data) return (None, data)
else: else:
raise ValueError('{0} is not a valid stream'.format(stream_id)) raise ValueError(f'{stream_id} is not a valid stream')

View File

@ -136,13 +136,13 @@ def convert_volume_binds(binds):
mode = 'rw' mode = 'rw'
result.append( result.append(
str('{0}:{1}:{2}').format(k, bind, mode) f'{k}:{bind}:{mode}'
) )
else: else:
if isinstance(v, bytes): if isinstance(v, bytes):
v = v.decode('utf-8') v = v.decode('utf-8')
result.append( result.append(
str('{0}:{1}:rw').format(k, v) f'{k}:{v}:rw'
) )
return result return result
@ -233,14 +233,14 @@ def parse_host(addr, is_win32=False, tls=False):
if proto not in ('tcp', 'unix', 'npipe', 'ssh'): if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
raise errors.DockerException( raise errors.DockerException(
"Invalid bind address protocol: {}".format(addr) f"Invalid bind address protocol: {addr}"
) )
if proto == 'tcp' and not parsed_url.netloc: if proto == 'tcp' and not parsed_url.netloc:
# "tcp://" is exceptionally disallowed by convention; # "tcp://" is exceptionally disallowed by convention;
# omitting a hostname for other protocols is fine # omitting a hostname for other protocols is fine
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: {}'.format(addr) f'Invalid bind address format: {addr}'
) )
if any([ if any([
@ -248,7 +248,7 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url.password parsed_url.password
]): ]):
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: {}'.format(addr) f'Invalid bind address format: {addr}'
) )
if parsed_url.path and proto == 'ssh': if parsed_url.path and proto == 'ssh':
@ -285,8 +285,8 @@ def parse_host(addr, is_win32=False, tls=False):
proto = 'http+unix' proto = 'http+unix'
if proto in ('http+unix', 'npipe'): if proto in ('http+unix', 'npipe'):
return "{}://{}".format(proto, path).rstrip('/') return f"{proto}://{path}".rstrip('/')
return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/') return f'{proto}://{host}:{port}{path}'.rstrip('/')
def parse_devices(devices): def parse_devices(devices):
@ -297,7 +297,7 @@ def parse_devices(devices):
continue continue
if not isinstance(device, str): if not isinstance(device, str):
raise errors.DockerException( raise errors.DockerException(
'Invalid device type {0}'.format(type(device)) f'Invalid device type {type(device)}'
) )
device_mapping = device.split(':') device_mapping = device.split(':')
if device_mapping: if device_mapping:
@ -408,7 +408,7 @@ def parse_bytes(s):
digits = float(digits_part) digits = float(digits_part)
except ValueError: except ValueError:
raise errors.DockerException( raise errors.DockerException(
'Failed converting the string value for memory ({0}) to' 'Failed converting the string value for memory ({}) to'
' an integer.'.format(digits_part) ' an integer.'.format(digits_part)
) )
@ -416,7 +416,7 @@ def parse_bytes(s):
s = int(digits * units[suffix]) s = int(digits * units[suffix])
else: else:
raise errors.DockerException( raise errors.DockerException(
'The specified value for memory ({0}) should specify the' 'The specified value for memory ({}) should specify the'
' units. The postfix should be one of the `b` `k` `m` `g`' ' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s) ' characters'.format(s)
) )
@ -428,7 +428,7 @@ def normalize_links(links):
if isinstance(links, dict): if isinstance(links, dict):
links = iter(links.items()) links = iter(links.items())
return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)] return [f'{k}:{v}' if v else k for k, v in sorted(links)]
def parse_env_file(env_file): def parse_env_file(env_file):
@ -438,7 +438,7 @@ def parse_env_file(env_file):
""" """
environment = {} environment = {}
with open(env_file, 'r') as f: with open(env_file) as f:
for line in f: for line in f:
if line[0] == '#': if line[0] == '#':
@ -454,7 +454,7 @@ def parse_env_file(env_file):
environment[k] = v environment[k] = v
else: else:
raise errors.DockerException( raise errors.DockerException(
'Invalid line in environment file {0}:\n{1}'.format( 'Invalid line in environment file {}:\n{}'.format(
env_file, line)) env_file, line))
return environment return environment
@ -471,7 +471,7 @@ def format_environment(environment):
if isinstance(value, bytes): if isinstance(value, bytes):
value = value.decode('utf-8') value = value.decode('utf-8')
return u'{key}={value}'.format(key=key, value=value) return f'{key}={value}'
return [format_env(*var) for var in iter(environment.items())] return [format_env(*var) for var in iter(environment.items())]
@ -479,11 +479,11 @@ def format_extra_hosts(extra_hosts, task=False):
# Use format dictated by Swarm API if container is part of a task # Use format dictated by Swarm API if container is part of a task
if task: if task:
return [ return [
'{} {}'.format(v, k) for k, v in sorted(iter(extra_hosts.items())) f'{v} {k}' for k, v in sorted(iter(extra_hosts.items()))
] ]
return [ return [
'{}:{}'.format(k, v) for k, v in sorted(iter(extra_hosts.items())) f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items()))
] ]

View File

@ -1,2 +1,2 @@
version = "4.5.0-dev" version = "5.1.0-dev"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) version_info = tuple(int(d) for d in version.split("-")[0].split("."))

View File

@ -1,6 +1,46 @@
Change log Change log
========== ==========
5.0.2
-----
[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/75?closed=1)
### Bugfixes
- Fix `disable_buffering` regression
5.0.1
-----
[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/74?closed=1)
### Bugfixes
- Bring back support for ssh identity file
- Cleanup remaining python-2 dependencies
- Fix image save example in docs
### Miscellaneous
- Bump urllib3 to 1.26.5
- Bump requests to 2.26.0
5.0.0
-----
[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/70?closed=1)
### Breaking changes
- Remove support for Python 2.7
- Make Python 3.6 the minimum version supported
### Features
- Add `limit` parameter to image search endpoint
### Bugfixes
- Fix `KeyError` exception on secret create
- Verify TLS keys loaded from docker contexts
- Update PORT_SPEC regex to allow square brackets for IPv6 addresses
- Fix containers and images documentation examples
4.4.4 4.4.4
----- -----

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# #
# docker-sdk-python documentation build configuration file, created by # docker-sdk-python documentation build configuration file, created by
# sphinx-quickstart on Wed Sep 14 15:48:58 2016. # sphinx-quickstart on Wed Sep 14 15:48:58 2016.
@ -60,21 +59,21 @@ source_suffix = ['.rst', '.md']
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'Docker SDK for Python' project = 'Docker SDK for Python'
year = datetime.datetime.now().year year = datetime.datetime.now().year
copyright = u'%d Docker Inc' % year copyright = '%d Docker Inc' % year
author = u'Docker Inc' author = 'Docker Inc'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
# built documents. # built documents.
# #
with open('../docker/version.py', 'r') as vfile: with open('../docker/version.py') as vfile:
exec(vfile.read()) exec(vfile.read())
# The full version, including alpha/beta/rc tags. # The full version, including alpha/beta/rc tags.
release = version release = version
# The short X.Y version. # The short X.Y version.
version = '{}.{}'.format(version_info[0], version_info[1]) version = f'{version_info[0]}.{version_info[1]}'
# The language for content autogenerated by Sphinx. Refer to documentation # The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages. # for a list of supported languages.
@ -283,8 +282,8 @@ latex_elements = {
# (source start file, target name, title, # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]). # author, documentclass [howto, manual, or own class]).
latex_documents = [ latex_documents = [
(master_doc, 'docker-sdk-python.tex', u'docker-sdk-python Documentation', (master_doc, 'docker-sdk-python.tex', 'docker-sdk-python Documentation',
u'Docker Inc.', 'manual'), 'Docker Inc.', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@ -325,7 +324,7 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
(master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
[author], 1) [author], 1)
] ]
@ -340,7 +339,7 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
(master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation',
author, 'docker-sdk-python', 'One line description of project.', author, 'docker-sdk-python', 'One line description of project.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View File

@ -11,7 +11,7 @@ paramiko==2.4.2
pycparser==2.17 pycparser==2.17
pyOpenSSL==18.0.0 pyOpenSSL==18.0.0
pyparsing==2.2.0 pyparsing==2.2.0
pywin32==227; sys_platform == 'win32' pywin32==301; sys_platform == 'win32'
requests==2.20.0 requests==2.26.0
urllib3==1.24.3 urllib3==1.26.5
websocket-client==0.56.0 websocket-client==0.56.0

View File

@ -52,8 +52,8 @@ class Version(namedtuple('_Version', 'major minor patch stage edition')):
return (int(self.major), int(self.minor), int(self.patch)) + stage return (int(self.major), int(self.minor), int(self.patch)) + stage
def __str__(self): def __str__(self):
stage = '-{}'.format(self.stage) if self.stage else '' stage = f'-{self.stage}' if self.stage else ''
edition = '-{}'.format(self.edition) if self.edition else '' edition = f'-{self.edition}' if self.edition else ''
return '.'.join(map(str, self[:3])) + edition + stage return '.'.join(map(str, self[:3])) + edition + stage

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
from __future__ import print_function
import codecs import codecs
import os import os

View File

@ -11,7 +11,6 @@ import time
import docker import docker
import paramiko import paramiko
import pytest import pytest
import six
def make_tree(dirs, files): def make_tree(dirs, files):
@ -54,7 +53,7 @@ def requires_api_version(version):
return pytest.mark.skipif( return pytest.mark.skipif(
docker.utils.version_lt(test_version, version), docker.utils.version_lt(test_version, version),
reason="API version is too low (< {0})".format(version) reason=f"API version is too low (< {version})"
) )
@ -86,7 +85,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
def random_name(): def random_name():
return u'dockerpytest_{0:x}'.format(random.getrandbits(64)) return f'dockerpytest_{random.getrandbits(64):x}'
def force_leave_swarm(client): def force_leave_swarm(client):
@ -105,11 +104,11 @@ def force_leave_swarm(client):
def swarm_listen_addr(): def swarm_listen_addr():
return '0.0.0.0:{0}'.format(random.randrange(10000, 25000)) return f'0.0.0.0:{random.randrange(10000, 25000)}'
def assert_cat_socket_detached_with_keys(sock, inputs): def assert_cat_socket_detached_with_keys(sock, inputs):
if six.PY3 and hasattr(sock, '_sock'): if hasattr(sock, '_sock'):
sock = sock._sock sock = sock._sock
for i in inputs: for i in inputs:
@ -128,7 +127,7 @@ def assert_cat_socket_detached_with_keys(sock, inputs):
# of the daemon no longer cause this to raise an error. # of the daemon no longer cause this to raise an error.
try: try:
sock.sendall(b'make sure the socket is closed\n') sock.sendall(b'make sure the socket is closed\n')
except socket.error: except OSError:
return return
sock.sendall(b"make sure the socket is closed\n") sock.sendall(b"make sure the socket is closed\n")

View File

@ -7,7 +7,6 @@ from docker import errors
from docker.utils.proxy import ProxyConfig from docker.utils.proxy import ProxyConfig
import pytest import pytest
import six
from .base import BaseAPIIntegrationTest, TEST_IMG from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental from ..helpers import random_name, requires_api_version, requires_experimental
@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert len(logs) > 0 assert len(logs) > 0
def test_build_from_stringio(self): def test_build_from_stringio(self):
if six.PY3: return
return script = io.StringIO('\n'.join([
script = io.StringIO(six.text_type('\n').join([
'FROM busybox', 'FROM busybox',
'RUN mkdir -p /tmp/test', 'RUN mkdir -p /tmp/test',
'EXPOSE 8080', 'EXPOSE 8080',
@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest):
stream = self.client.build(fileobj=script) stream = self.client.build(fileobj=script)
logs = '' logs = ''
for chunk in stream: for chunk in stream:
if six.PY3: chunk = chunk.decode('utf-8')
chunk = chunk.decode('utf-8')
logs += chunk logs += chunk
assert logs != '' assert logs != ''
@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
self.client.wait(c) self.client.wait(c)
logs = self.client.logs(c) logs = self.client.logs(c)
if six.PY3: logs = logs.decode('utf-8')
logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([ assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt', '/test/#file.txt',
@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name) assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file') ctnr = self.run_container(img_name, 'cat /hosts-file')
logs = self.client.logs(ctnr) logs = self.client.logs(ctnr)
if six.PY3: logs = logs.decode('utf-8')
logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs assert '127.0.0.1\thello.world.test' in logs
@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest):
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([ script = io.BytesIO(b'\n'.join([
b'FROM busybox', b'FROM busybox',
'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
])) ]))
stream = self.client.build( stream = self.client.build(
@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest):
@requires_api_version('1.32') @requires_api_version('1.32')
@requires_experimental(until=None) @requires_experimental(until=None)
def test_build_invalid_platform(self): def test_build_invalid_platform(self):
script = io.BytesIO('FROM busybox\n'.encode('ascii')) script = io.BytesIO(b'FROM busybox\n')
with pytest.raises(errors.APIError) as excinfo: with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar') stream = self.client.build(fileobj=script, platform='foobar')

View File

@ -72,6 +72,6 @@ class UnixconnTest(unittest.TestCase):
client.close() client.close()
del client del client
assert len(w) == 0, "No warnings produced: {0}".format( assert len(w) == 0, "No warnings produced: {}".format(
w[0].message w[0].message
) )

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import docker import docker
import pytest import pytest
@ -31,7 +29,7 @@ class ConfigAPITest(BaseAPIIntegrationTest):
def test_create_config_unicode_data(self): def test_create_config_unicode_data(self):
config_id = self.client.create_config( config_id = self.client.create_config(
'favorite_character', u'いざよいさくや' 'favorite_character', 'いざよいさくや'
) )
self.tmp_configs.append(config_id) self.tmp_configs.append(config_id)
assert 'ID' in config_id assert 'ID' in config_id

View File

@ -34,7 +34,7 @@ class ListContainersTest(BaseAPIIntegrationTest):
assert len(retrieved) == 1 assert len(retrieved) == 1
retrieved = retrieved[0] retrieved = retrieved[0]
assert 'Command' in retrieved assert 'Command' in retrieved
assert retrieved['Command'] == str('true') assert retrieved['Command'] == 'true'
assert 'Image' in retrieved assert 'Image' in retrieved
assert re.search(r'alpine:.*', retrieved['Image']) assert re.search(r'alpine:.*', retrieved['Image'])
assert 'Status' in retrieved assert 'Status' in retrieved
@ -104,10 +104,10 @@ class CreateContainerTest(BaseAPIIntegrationTest):
assert self.client.wait(container3_id)['StatusCode'] == 0 assert self.client.wait(container3_id)['StatusCode'] == 0
logs = self.client.logs(container3_id).decode('utf-8') logs = self.client.logs(container3_id).decode('utf-8')
assert '{0}_NAME='.format(link_env_prefix1) in logs assert f'{link_env_prefix1}_NAME=' in logs
assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs assert f'{link_env_prefix1}_ENV_FOO=1' in logs
assert '{0}_NAME='.format(link_env_prefix2) in logs assert f'{link_env_prefix2}_NAME=' in logs
assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs assert f'{link_env_prefix2}_ENV_FOO=1' in logs
def test_create_with_restart_policy(self): def test_create_with_restart_policy(self):
container = self.client.create_container( container = self.client.create_container(
@ -487,7 +487,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
) )
class VolumeBindTest(BaseAPIIntegrationTest): class VolumeBindTest(BaseAPIIntegrationTest):
def setUp(self): def setUp(self):
super(VolumeBindTest, self).setUp() super().setUp()
self.mount_dest = '/mnt' self.mount_dest = '/mnt'
@ -618,7 +618,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_archive_from_container(self): def test_get_file_archive_from_container(self):
data = 'The Maid and the Pocket Watch of Blood' data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container( ctnr = self.client.create_container(
TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), TEST_IMG, f'sh -c "echo {data} > /vol1/data.txt"',
volumes=['/vol1'] volumes=['/vol1']
) )
self.tmp_containers.append(ctnr) self.tmp_containers.append(ctnr)
@ -636,7 +636,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
def test_get_file_stat_from_container(self): def test_get_file_stat_from_container(self):
data = 'The Maid and the Pocket Watch of Blood' data = 'The Maid and the Pocket Watch of Blood'
ctnr = self.client.create_container( ctnr = self.client.create_container(
TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), TEST_IMG, f'sh -c "echo -n {data} > /vol1/data.txt"',
volumes=['/vol1'] volumes=['/vol1']
) )
self.tmp_containers.append(ctnr) self.tmp_containers.append(ctnr)
@ -655,7 +655,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
test_file.seek(0) test_file.seek(0)
ctnr = self.client.create_container( ctnr = self.client.create_container(
TEST_IMG, TEST_IMG,
'cat {0}'.format( 'cat {}'.format(
os.path.join('/vol1/', os.path.basename(test_file.name)) os.path.join('/vol1/', os.path.basename(test_file.name))
), ),
volumes=['/vol1'] volumes=['/vol1']
@ -701,7 +701,7 @@ class RenameContainerTest(BaseAPIIntegrationTest):
if version == '1.5.0': if version == '1.5.0':
assert name == inspect['Name'] assert name == inspect['Name']
else: else:
assert '/{0}'.format(name) == inspect['Name'] assert f'/{name}' == inspect['Name']
class StartContainerTest(BaseAPIIntegrationTest): class StartContainerTest(BaseAPIIntegrationTest):
@ -807,7 +807,7 @@ class LogsTest(BaseAPIIntegrationTest):
def test_logs(self): def test_logs(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo {0}'.format(snippet) TEST_IMG, f'echo {snippet}'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -821,7 +821,7 @@ class LogsTest(BaseAPIIntegrationTest):
snippet = '''Line1 snippet = '''Line1
Line2''' Line2'''
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo "{0}"'.format(snippet) TEST_IMG, f'echo "{snippet}"'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -834,7 +834,7 @@ Line2'''
def test_logs_streaming_and_follow(self): def test_logs_streaming_and_follow(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo {0}'.format(snippet) TEST_IMG, f'echo {snippet}'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -854,7 +854,7 @@ Line2'''
def test_logs_streaming_and_follow_and_cancel(self): def test_logs_streaming_and_follow_and_cancel(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) TEST_IMG, f'sh -c "echo \\"{snippet}\\" && sleep 3"'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -872,7 +872,7 @@ Line2'''
def test_logs_with_dict_instead_of_id(self): def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo {0}'.format(snippet) TEST_IMG, f'echo {snippet}'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -885,7 +885,7 @@ Line2'''
def test_logs_with_tail_0(self): def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo "{0}"'.format(snippet) TEST_IMG, f'echo "{snippet}"'
) )
id = container['Id'] id = container['Id']
self.tmp_containers.append(id) self.tmp_containers.append(id)
@ -899,7 +899,7 @@ Line2'''
def test_logs_with_until(self): def test_logs_with_until(self):
snippet = 'Shanghai Teahouse (Hong Meiling)' snippet = 'Shanghai Teahouse (Hong Meiling)'
container = self.client.create_container( container = self.client.create_container(
TEST_IMG, 'echo "{0}"'.format(snippet) TEST_IMG, f'echo "{snippet}"'
) )
self.tmp_containers.append(container) self.tmp_containers.append(container)
@ -1095,7 +1095,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container) self.client.start(container)
res = self.client.top(container) res = self.client.top(container)
if not IS_WINDOWS_PLATFORM: if not IS_WINDOWS_PLATFORM:
assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND'] assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND']
assert len(res['Processes']) == 1 assert len(res['Processes']) == 1
assert res['Processes'][0][-1] == 'sleep 60' assert res['Processes'][0][-1] == 'sleep 60'
self.client.kill(container) self.client.kill(container)
@ -1113,7 +1113,7 @@ class ContainerTopTest(BaseAPIIntegrationTest):
self.client.start(container) self.client.start(container)
res = self.client.top(container, '-eopid,user') res = self.client.top(container, '-eopid,user')
assert res['Titles'] == [u'PID', u'USER'] assert res['Titles'] == ['PID', 'USER']
assert len(res['Processes']) == 1 assert len(res['Processes']) == 1
assert res['Processes'][0][10] == 'sleep 60' assert res['Processes'][0][10] == 'sleep 60'
@ -1203,7 +1203,7 @@ class AttachContainerTest(BaseAPIIntegrationTest):
def test_run_container_reading_socket(self): def test_run_container_reading_socket(self):
line = 'hi there and stuff and things, words!' line = 'hi there and stuff and things, words!'
# `echo` appends CRLF, `printf` doesn't # `echo` appends CRLF, `printf` doesn't
command = "printf '{0}'".format(line) command = f"printf '{line}'"
container = self.client.create_container(TEST_IMG, command, container = self.client.create_container(TEST_IMG, command,
detach=True, tty=False) detach=True, tty=False)
self.tmp_containers.append(container) self.tmp_containers.append(container)
@ -1487,7 +1487,7 @@ class LinkTest(BaseAPIIntegrationTest):
# Remove link # Remove link
linked_name = self.client.inspect_container(container2_id)['Name'][1:] linked_name = self.client.inspect_container(container2_id)['Name'][1:]
link_name = '%s/%s' % (linked_name, link_alias) link_name = f'{linked_name}/{link_alias}'
self.client.remove_container(link_name, link=True) self.client.remove_container(link_name, link=True)
# Link is gone # Link is gone

View File

@ -239,7 +239,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest):
) )
def setUp(self): def setUp(self):
super(ExecDemuxTest, self).setUp() super().setUp()
self.container = self.client.create_container( self.container = self.client.create_container(
TEST_IMG, 'cat', detach=True, stdin_open=True TEST_IMG, 'cat', detach=True, stdin_open=True
) )

View File

@ -265,7 +265,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
output = self.client.load_image(data) output = self.client.load_image(data)
assert any([ assert any([
line for line in output line for line in output
if 'Loaded image: {}'.format(test_img) in line.get('stream', '') if f'Loaded image: {test_img}' in line.get('stream', '')
]) ])
@contextlib.contextmanager @contextlib.contextmanager
@ -284,7 +284,7 @@ class ImportImageTest(BaseAPIIntegrationTest):
thread.setDaemon(True) thread.setDaemon(True)
thread.start() thread.start()
yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) yield f'http://{socket.gethostname()}:{server.server_address[1]}'
server.shutdown() server.shutdown()
@ -350,7 +350,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest):
result = self.client.load_image(f.read()) result = self.client.load_image(f.read())
success = False success = False
result_line = 'Loaded image: {}\n'.format(TEST_IMG) result_line = f'Loaded image: {TEST_IMG}\n'
for data in result: for data in result:
print(data) print(data)
if 'stream' in data: if 'stream' in data:

View File

@ -9,7 +9,7 @@ from .base import BaseAPIIntegrationTest, TEST_IMG
class TestNetworks(BaseAPIIntegrationTest): class TestNetworks(BaseAPIIntegrationTest):
def tearDown(self): def tearDown(self):
self.client.leave_swarm(force=True) self.client.leave_swarm(force=True)
super(TestNetworks, self).tearDown() super().tearDown()
def create_network(self, *args, **kwargs): def create_network(self, *args, **kwargs):
net_name = random_name() net_name = random_name()

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import docker import docker
import pytest import pytest
@ -31,7 +29,7 @@ class SecretAPITest(BaseAPIIntegrationTest):
def test_create_secret_unicode_data(self): def test_create_secret_unicode_data(self):
secret_id = self.client.create_secret( secret_id = self.client.create_secret(
'favorite_character', u'いざよいさくや' 'favorite_character', 'いざよいさくや'
) )
self.tmp_secrets.append(secret_id) self.tmp_secrets.append(secret_id)
assert 'ID' in secret_id assert 'ID' in secret_id

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import random import random
import time import time
@ -30,10 +28,10 @@ class ServiceTest(BaseAPIIntegrationTest):
self.client.remove_service(service['ID']) self.client.remove_service(service['ID'])
except docker.errors.APIError: except docker.errors.APIError:
pass pass
super(ServiceTest, self).tearDown() super().tearDown()
def get_service_name(self): def get_service_name(self):
return 'dockerpytest_{0:x}'.format(random.getrandbits(64)) return f'dockerpytest_{random.getrandbits(64):x}'
def get_service_container(self, service_name, attempts=20, interval=0.5, def get_service_container(self, service_name, attempts=20, interval=0.5,
include_stopped=False): include_stopped=False):
@ -54,7 +52,7 @@ class ServiceTest(BaseAPIIntegrationTest):
def create_simple_service(self, name=None, labels=None): def create_simple_service(self, name=None, labels=None):
if name: if name:
name = 'dockerpytest_{0}'.format(name) name = f'dockerpytest_{name}'
else: else:
name = self.get_service_name() name = self.get_service_name()
@ -403,20 +401,20 @@ class ServiceTest(BaseAPIIntegrationTest):
node_id = self.client.nodes()[0]['ID'] node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
task_tmpl = docker.types.TaskTemplate( task_tmpl = docker.types.TaskTemplate(
container_spec, placement=['node.id=={}'.format(node_id)] container_spec, placement=[f'node.id=={node_id}']
) )
name = self.get_service_name() name = self.get_service_name()
svc_id = self.client.create_service(task_tmpl, name=name) svc_id = self.client.create_service(task_tmpl, name=name)
svc_info = self.client.inspect_service(svc_id) svc_info = self.client.inspect_service(svc_id)
assert 'Placement' in svc_info['Spec']['TaskTemplate'] assert 'Placement' in svc_info['Spec']['TaskTemplate']
assert (svc_info['Spec']['TaskTemplate']['Placement'] == assert (svc_info['Spec']['TaskTemplate']['Placement'] ==
{'Constraints': ['node.id=={}'.format(node_id)]}) {'Constraints': [f'node.id=={node_id}']})
def test_create_service_with_placement_object(self): def test_create_service_with_placement_object(self):
node_id = self.client.nodes()[0]['ID'] node_id = self.client.nodes()[0]['ID']
container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) container_spec = docker.types.ContainerSpec(TEST_IMG, ['true'])
placemt = docker.types.Placement( placemt = docker.types.Placement(
constraints=['node.id=={}'.format(node_id)] constraints=[f'node.id=={node_id}']
) )
task_tmpl = docker.types.TaskTemplate( task_tmpl = docker.types.TaskTemplate(
container_spec, placement=placemt container_spec, placement=placemt
@ -508,7 +506,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990 assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp' assert port['Protocol'] == 'udp'
else: else:
self.fail('Invalid port specification: {0}'.format(port)) self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3 assert len(ports) == 3
@ -670,14 +668,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name) container = self.get_service_container(name)
assert container is not None assert container is not None
exec_id = self.client.exec_create( exec_id = self.client.exec_create(
container, 'cat /run/secrets/{0}'.format(secret_name) container, f'cat /run/secrets/{secret_name}'
) )
assert self.client.exec_start(exec_id) == secret_data assert self.client.exec_start(exec_id) == secret_data
@requires_api_version('1.25') @requires_api_version('1.25')
def test_create_service_with_unicode_secret(self): def test_create_service_with_unicode_secret(self):
secret_name = 'favorite_touhou' secret_name = 'favorite_touhou'
secret_data = u'東方花映塚' secret_data = '東方花映塚'
secret_id = self.client.create_secret(secret_name, secret_data) secret_id = self.client.create_secret(secret_name, secret_data)
self.tmp_secrets.append(secret_id) self.tmp_secrets.append(secret_id)
secret_ref = docker.types.SecretReference(secret_id, secret_name) secret_ref = docker.types.SecretReference(secret_id, secret_name)
@ -695,7 +693,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name) container = self.get_service_container(name)
assert container is not None assert container is not None
exec_id = self.client.exec_create( exec_id = self.client.exec_create(
container, 'cat /run/secrets/{0}'.format(secret_name) container, f'cat /run/secrets/{secret_name}'
) )
container_secret = self.client.exec_start(exec_id) container_secret = self.client.exec_start(exec_id)
container_secret = container_secret.decode('utf-8') container_secret = container_secret.decode('utf-8')
@ -722,14 +720,14 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name) container = self.get_service_container(name)
assert container is not None assert container is not None
exec_id = self.client.exec_create( exec_id = self.client.exec_create(
container, 'cat /{0}'.format(config_name) container, f'cat /{config_name}'
) )
assert self.client.exec_start(exec_id) == config_data assert self.client.exec_start(exec_id) == config_data
@requires_api_version('1.30') @requires_api_version('1.30')
def test_create_service_with_unicode_config(self): def test_create_service_with_unicode_config(self):
config_name = 'favorite_touhou' config_name = 'favorite_touhou'
config_data = u'東方花映塚' config_data = '東方花映塚'
config_id = self.client.create_config(config_name, config_data) config_id = self.client.create_config(config_name, config_data)
self.tmp_configs.append(config_id) self.tmp_configs.append(config_id)
config_ref = docker.types.ConfigReference(config_id, config_name) config_ref = docker.types.ConfigReference(config_id, config_name)
@ -747,7 +745,7 @@ class ServiceTest(BaseAPIIntegrationTest):
container = self.get_service_container(name) container = self.get_service_container(name)
assert container is not None assert container is not None
exec_id = self.client.exec_create( exec_id = self.client.exec_create(
container, 'cat /{0}'.format(config_name) container, f'cat /{config_name}'
) )
container_config = self.client.exec_start(exec_id) container_config = self.client.exec_start(exec_id)
container_config = container_config.decode('utf-8') container_config = container_config.decode('utf-8')
@ -1136,7 +1134,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990 assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp' assert port['Protocol'] == 'udp'
else: else:
self.fail('Invalid port specification: {0}'.format(port)) self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3 assert len(ports) == 3
@ -1163,7 +1161,7 @@ class ServiceTest(BaseAPIIntegrationTest):
assert port['TargetPort'] == 1990 assert port['TargetPort'] == 1990
assert port['Protocol'] == 'udp' assert port['Protocol'] == 'udp'
else: else:
self.fail('Invalid port specification: {0}'.format(port)) self.fail(f'Invalid port specification: {port}')
assert len(ports) == 3 assert len(ports) == 3

View File

@ -8,7 +8,7 @@ from .base import BaseAPIIntegrationTest
class SwarmTest(BaseAPIIntegrationTest): class SwarmTest(BaseAPIIntegrationTest):
def setUp(self): def setUp(self):
super(SwarmTest, self).setUp() super().setUp()
force_leave_swarm(self.client) force_leave_swarm(self.client)
self._unlock_key = None self._unlock_key = None
@ -19,7 +19,7 @@ class SwarmTest(BaseAPIIntegrationTest):
except docker.errors.APIError: except docker.errors.APIError:
pass pass
force_leave_swarm(self.client) force_leave_swarm(self.client)
super(SwarmTest, self).tearDown() super().tearDown()
@requires_api_version('1.24') @requires_api_version('1.24')
def test_init_swarm_simple(self): def test_init_swarm_simple(self):

View File

@ -75,11 +75,11 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
""" """
def setUp(self): def setUp(self):
super(BaseAPIIntegrationTest, self).setUp() super().setUp()
self.client = self.get_client_instance() self.client = self.get_client_instance()
def tearDown(self): def tearDown(self):
super(BaseAPIIntegrationTest, self).tearDown() super().tearDown()
self.client.close() self.client.close()
@staticmethod @staticmethod

View File

@ -1,5 +1,3 @@
from __future__ import print_function
import sys import sys
import warnings import warnings
@ -17,11 +15,11 @@ def setup_test_session():
try: try:
c.inspect_image(TEST_IMG) c.inspect_image(TEST_IMG)
except docker.errors.NotFound: except docker.errors.NotFound:
print("\npulling {0}".format(TEST_IMG), file=sys.stderr) print(f"\npulling {TEST_IMG}", file=sys.stderr)
for data in c.pull(TEST_IMG, stream=True, decode=True): for data in c.pull(TEST_IMG, stream=True, decode=True):
status = data.get("status") status = data.get("status")
progress = data.get("progress") progress = data.get("progress")
detail = "{0} - {1}".format(status, progress) detail = f"{status} - {progress}"
print(detail, file=sys.stderr) print(detail, file=sys.stderr)
# Double make sure we now have busybox # Double make sure we now have busybox

View File

@ -3,7 +3,6 @@ import random
import sys import sys
import pytest import pytest
import six
from distutils.spawn import find_executable from distutils.spawn import find_executable
from docker.credentials import ( from docker.credentials import (
@ -12,7 +11,7 @@ from docker.credentials import (
) )
class TestStore(object): class TestStore:
def teardown_method(self): def teardown_method(self):
for server in self.tmp_keys: for server in self.tmp_keys:
try: try:
@ -33,7 +32,7 @@ class TestStore(object):
self.store = Store(DEFAULT_OSX_STORE) self.store = Store(DEFAULT_OSX_STORE)
def get_random_servername(self): def get_random_servername(self):
res = 'pycreds_test_{:x}'.format(random.getrandbits(32)) res = f'pycreds_test_{random.getrandbits(32):x}'
self.tmp_keys.append(res) self.tmp_keys.append(res)
return res return res
@ -61,7 +60,7 @@ class TestStore(object):
def test_unicode_strings(self): def test_unicode_strings(self):
key = self.get_random_servername() key = self.get_random_servername()
key = six.u(key) key = key
self.store.store(server=key, username='user', secret='pass') self.store.store(server=key, username='user', secret='pass')
data = self.store.get(key) data = self.store.get(key)
assert data assert data

View File

@ -5,7 +5,7 @@ from docker.credentials.utils import create_environment_dict
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
@mock.patch.dict(os.environ) @mock.patch.dict(os.environ)

View File

@ -13,8 +13,8 @@ class ImageCollectionTest(BaseIntegrationTest):
def test_build(self): def test_build(self):
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build(fileobj=io.BytesIO( image, _ = client.images.build(fileobj=io.BytesIO(
"FROM alpine\n" b"FROM alpine\n"
"CMD echo hello world".encode('ascii') b"CMD echo hello world"
)) ))
self.tmp_imgs.append(image.id) self.tmp_imgs.append(image.id)
assert client.containers.run(image) == b"hello world\n" assert client.containers.run(image) == b"hello world\n"
@ -24,8 +24,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
with pytest.raises(docker.errors.BuildError) as cm: with pytest.raises(docker.errors.BuildError) as cm:
client.images.build(fileobj=io.BytesIO( client.images.build(fileobj=io.BytesIO(
"FROM alpine\n" b"FROM alpine\n"
"RUN exit 1".encode('ascii') b"RUN exit 1"
)) ))
assert ( assert (
"The command '/bin/sh -c exit 1' returned a non-zero code: 1" "The command '/bin/sh -c exit 1' returned a non-zero code: 1"
@ -36,8 +36,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build( image, _ = client.images.build(
tag='some-tag', fileobj=io.BytesIO( tag='some-tag', fileobj=io.BytesIO(
"FROM alpine\n" b"FROM alpine\n"
"CMD echo hello world".encode('ascii') b"CMD echo hello world"
) )
) )
self.tmp_imgs.append(image.id) self.tmp_imgs.append(image.id)
@ -47,8 +47,8 @@ class ImageCollectionTest(BaseIntegrationTest):
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
image, _ = client.images.build( image, _ = client.images.build(
tag='dup-txt-tag', fileobj=io.BytesIO( tag='dup-txt-tag', fileobj=io.BytesIO(
"FROM alpine\n" b"FROM alpine\n"
"CMD echo Successfully built abcd1234".encode('ascii') b"CMD echo Successfully built abcd1234"
) )
) )
self.tmp_imgs.append(image.id) self.tmp_imgs.append(image.id)
@ -119,7 +119,7 @@ class ImageCollectionTest(BaseIntegrationTest):
self.tmp_imgs.append(additional_tag) self.tmp_imgs.append(additional_tag)
image.reload() image.reload()
with tempfile.TemporaryFile() as f: with tempfile.TemporaryFile() as f:
stream = image.save(named='{}:latest'.format(additional_tag)) stream = image.save(named=f'{additional_tag}:latest')
for chunk in stream: for chunk in stream:
f.write(chunk) f.write(chunk)
@ -129,7 +129,7 @@ class ImageCollectionTest(BaseIntegrationTest):
assert len(result) == 1 assert len(result) == 1
assert result[0].id == image.id assert result[0].id == image.id
assert '{}:latest'.format(additional_tag) in result[0].tags assert f'{additional_tag}:latest' in result[0].tags
def test_save_name_error(self): def test_save_name_error(self):
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
@ -143,7 +143,7 @@ class ImageTest(BaseIntegrationTest):
def test_tag_and_remove(self): def test_tag_and_remove(self):
repo = 'dockersdk.tests.images.test_tag' repo = 'dockersdk.tests.images.test_tag'
tag = 'some-tag' tag = 'some-tag'
identifier = '{}:{}'.format(repo, tag) identifier = f'{repo}:{tag}'
client = docker.from_env(version=TEST_API_VERSION) client = docker.from_env(version=TEST_API_VERSION)
image = client.images.pull('alpine:latest') image = client.images.pull('alpine:latest')

View File

@ -2,7 +2,6 @@ import io
import random import random
import docker import docker
import six
from .base import BaseAPIIntegrationTest, TEST_IMG from .base import BaseAPIIntegrationTest, TEST_IMG
import pytest import pytest
@ -39,8 +38,7 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr) self.client.start(ctnr)
self.client.wait(ctnr) self.client.wait(ctnr)
logs = self.client.logs(ctnr) logs = self.client.logs(ctnr)
if six.PY3: logs = logs.decode('utf-8')
logs = logs.decode('utf-8')
assert logs == '1000\n' assert logs == '1000\n'
def test_792_explicit_port_protocol(self): def test_792_explicit_port_protocol(self):
@ -56,10 +54,10 @@ class TestRegressions(BaseAPIIntegrationTest):
self.client.start(ctnr) self.client.start(ctnr)
assert self.client.port( assert self.client.port(
ctnr, 2000 ctnr, 2000
)[0]['HostPort'] == six.text_type(tcp_port) )[0]['HostPort'] == str(tcp_port)
assert self.client.port( assert self.client.port(
ctnr, '2000/tcp' ctnr, '2000/tcp'
)[0]['HostPort'] == six.text_type(tcp_port) )[0]['HostPort'] == str(tcp_port)
assert self.client.port( assert self.client.port(
ctnr, '2000/udp' ctnr, '2000/udp'
)[0]['HostPort'] == six.text_type(udp_port) )[0]['HostPort'] == str(udp_port)

View File

@ -7,7 +7,6 @@ from docker import errors
from docker.utils.proxy import ProxyConfig from docker.utils.proxy import ProxyConfig
import pytest import pytest
import six
from .base import BaseAPIIntegrationTest, TEST_IMG from .base import BaseAPIIntegrationTest, TEST_IMG
from ..helpers import random_name, requires_api_version, requires_experimental from ..helpers import random_name, requires_api_version, requires_experimental
@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest):
assert len(logs) > 0 assert len(logs) > 0
def test_build_from_stringio(self): def test_build_from_stringio(self):
if six.PY3: return
return script = io.StringIO('\n'.join([
script = io.StringIO(six.text_type('\n').join([
'FROM busybox', 'FROM busybox',
'RUN mkdir -p /tmp/test', 'RUN mkdir -p /tmp/test',
'EXPOSE 8080', 'EXPOSE 8080',
@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest):
stream = self.client.build(fileobj=script) stream = self.client.build(fileobj=script)
logs = '' logs = ''
for chunk in stream: for chunk in stream:
if six.PY3: chunk = chunk.decode('utf-8')
chunk = chunk.decode('utf-8')
logs += chunk logs += chunk
assert logs != '' assert logs != ''
@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
self.client.wait(c) self.client.wait(c)
logs = self.client.logs(c) logs = self.client.logs(c)
if six.PY3: logs = logs.decode('utf-8')
logs = logs.decode('utf-8')
assert sorted(list(filter(None, logs.split('\n')))) == sorted([ assert sorted(list(filter(None, logs.split('\n')))) == sorted([
'/test/#file.txt', '/test/#file.txt',
@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest):
assert self.client.inspect_image(img_name) assert self.client.inspect_image(img_name)
ctnr = self.run_container(img_name, 'cat /hosts-file') ctnr = self.run_container(img_name, 'cat /hosts-file')
logs = self.client.logs(ctnr) logs = self.client.logs(ctnr)
if six.PY3: logs = logs.decode('utf-8')
logs = logs.decode('utf-8')
assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\textrahost.local.test' in logs
assert '127.0.0.1\thello.world.test' in logs assert '127.0.0.1\thello.world.test' in logs
@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest):
snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)'
script = io.BytesIO(b'\n'.join([ script = io.BytesIO(b'\n'.join([
b'FROM busybox', b'FROM busybox',
'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8')
])) ]))
stream = self.client.build( stream = self.client.build(
@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest):
@requires_api_version('1.32') @requires_api_version('1.32')
@requires_experimental(until=None) @requires_experimental(until=None)
def test_build_invalid_platform(self): def test_build_invalid_platform(self):
script = io.BytesIO('FROM busybox\n'.encode('ascii')) script = io.BytesIO(b'FROM busybox\n')
with pytest.raises(errors.APIError) as excinfo: with pytest.raises(errors.APIError) as excinfo:
stream = self.client.build(fileobj=script, platform='foobar') stream = self.client.build(fileobj=script, platform='foobar')

View File

@ -79,7 +79,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
cls.client.pull(TEST_IMG) cls.client.pull(TEST_IMG)
def tearDown(self): def tearDown(self):
super(BaseAPIIntegrationTest, self).tearDown() super().tearDown()
self.client.close() self.client.close()
@staticmethod @staticmethod

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import datetime import datetime
import json import json
import signal import signal
@ -7,7 +5,6 @@ import signal
import docker import docker
from docker.api import APIClient from docker.api import APIClient
import pytest import pytest
import six
from . import fake_api from . import fake_api
from ..helpers import requires_api_version from ..helpers import requires_api_version
@ -19,7 +16,7 @@ from .api_test import (
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
def fake_inspect_container_tty(self, container): def fake_inspect_container_tty(self, container):
@ -771,7 +768,7 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_device_requests(self): def test_create_container_with_device_requests(self):
client = APIClient(version='1.40') client = APIClient(version='1.40')
fake_api.fake_responses.setdefault( fake_api.fake_responses.setdefault(
'{0}/v1.40/containers/create'.format(fake_api.prefix), f'{fake_api.prefix}/v1.40/containers/create',
fake_api.post_fake_create_container, fake_api.post_fake_create_container,
) )
client.create_container( client.create_container(
@ -831,8 +828,8 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_labels_dict(self): def test_create_container_with_labels_dict(self):
labels_dict = { labels_dict = {
six.text_type('foo'): six.text_type('1'), 'foo': '1',
six.text_type('bar'): six.text_type('2'), 'bar': '2',
} }
self.client.create_container( self.client.create_container(
@ -848,12 +845,12 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_labels_list(self): def test_create_container_with_labels_list(self):
labels_list = [ labels_list = [
six.text_type('foo'), 'foo',
six.text_type('bar'), 'bar',
] ]
labels_dict = { labels_dict = {
six.text_type('foo'): six.text_type(), 'foo': '',
six.text_type('bar'): six.text_type(), 'bar': '',
} }
self.client.create_container( self.client.create_container(
@ -1013,11 +1010,11 @@ class CreateContainerTest(BaseAPIClientTest):
def test_create_container_with_unicode_envvars(self): def test_create_container_with_unicode_envvars(self):
envvars_dict = { envvars_dict = {
'foo': u'', 'foo': '',
} }
expected = [ expected = [
u'foo=☃' 'foo=☃'
] ]
self.client.create_container( self.client.create_container(
@ -1138,7 +1135,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False stream=False
) )
assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_logs_with_dict_instead_of_id(self): def test_logs_with_dict_instead_of_id(self):
with mock.patch('docker.api.client.APIClient.inspect_container', with mock.patch('docker.api.client.APIClient.inspect_container',
@ -1154,7 +1151,7 @@ class ContainerTest(BaseAPIClientTest):
stream=False stream=False
) )
assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n'
def test_log_streaming(self): def test_log_streaming(self):
with mock.patch('docker.api.client.APIClient.inspect_container', with mock.patch('docker.api.client.APIClient.inspect_container',

View File

@ -11,7 +11,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1'])
args = fake_request.call_args args = fake_request.call_args
assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format( assert 'POST' == args[0][0], url_prefix + 'containers/{}/exec'.format(
fake_api.FAKE_CONTAINER_ID fake_api.FAKE_CONTAINER_ID
) )
@ -32,7 +32,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID) self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{0}/start'.format( assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID fake_api.FAKE_EXEC_ID
) )
@ -51,7 +51,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True) self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{0}/start'.format( assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID fake_api.FAKE_EXEC_ID
) )
@ -68,7 +68,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID) self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{0}/json'.format( assert args[0][1] == url_prefix + 'exec/{}/json'.format(
fake_api.FAKE_EXEC_ID fake_api.FAKE_EXEC_ID
) )
@ -77,7 +77,7 @@ class ExecTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize',
params={'h': 20, 'w': 60}, params={'h': 20, 'w': 60},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )

View File

@ -11,7 +11,7 @@ from .api_test import (
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
class ImageTest(BaseAPIClientTest): class ImageTest(BaseAPIClientTest):

View File

@ -1,14 +1,12 @@
import json import json
import six
from .api_test import BaseAPIClientTest, url_prefix, response from .api_test import BaseAPIClientTest, url_prefix, response
from docker.types import IPAMConfig, IPAMPool from docker.types import IPAMConfig, IPAMPool
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
class NetworkTest(BaseAPIClientTest): class NetworkTest(BaseAPIClientTest):
@ -103,16 +101,16 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id) self.client.remove_network(network_id)
args = delete.call_args args = delete.call_args
assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_inspect_network(self): def test_inspect_network(self):
network_id = 'abc12345' network_id = 'abc12345'
network_name = 'foo' network_name = 'foo'
network_data = { network_data = {
six.u('name'): network_name, 'name': network_name,
six.u('id'): network_id, 'id': network_id,
six.u('driver'): 'bridge', 'driver': 'bridge',
six.u('containers'): {}, 'containers': {},
} }
network_response = response(status_code=200, content=network_data) network_response = response(status_code=200, content=network_data)
@ -123,7 +121,7 @@ class NetworkTest(BaseAPIClientTest):
assert result == network_data assert result == network_data
args = get.call_args args = get.call_args
assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) assert args[0][0] == url_prefix + f'networks/{network_id}'
def test_connect_container_to_network(self): def test_connect_container_to_network(self):
network_id = 'abc12345' network_id = 'abc12345'
@ -141,7 +139,7 @@ class NetworkTest(BaseAPIClientTest):
) )
assert post.call_args[0][0] == ( assert post.call_args[0][0] == (
url_prefix + 'networks/{0}/connect'.format(network_id) url_prefix + f'networks/{network_id}/connect'
) )
assert json.loads(post.call_args[1]['data']) == { assert json.loads(post.call_args[1]['data']) == {
@ -164,7 +162,7 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id}, net_id=network_id) container={'Id': container_id}, net_id=network_id)
assert post.call_args[0][0] == ( assert post.call_args[0][0] == (
url_prefix + 'networks/{0}/disconnect'.format(network_id) url_prefix + f'networks/{network_id}/disconnect'
) )
assert json.loads(post.call_args[1]['data']) == { assert json.loads(post.call_args[1]['data']) == {
'Container': container_id 'Container': container_id

View File

@ -10,11 +10,12 @@ import tempfile
import threading import threading
import time import time
import unittest import unittest
import socketserver
import http.server
import docker import docker
import pytest import pytest
import requests import requests
import six
from docker.api import APIClient from docker.api import APIClient
from docker.constants import DEFAULT_DOCKER_API_VERSION from docker.constants import DEFAULT_DOCKER_API_VERSION
from requests.packages import urllib3 from requests.packages import urllib3
@ -24,7 +25,7 @@ from . import fake_api
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS
@ -34,7 +35,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0,
request=None, raw=None): request=None, raw=None):
res = requests.Response() res = requests.Response()
res.status_code = status_code res.status_code = status_code
if not isinstance(content, six.binary_type): if not isinstance(content, bytes):
content = json.dumps(content).encode('ascii') content = json.dumps(content).encode('ascii')
res._content = content res._content = content
res.headers = requests.structures.CaseInsensitiveDict(headers or {}) res.headers = requests.structures.CaseInsensitiveDict(headers or {})
@ -60,7 +61,7 @@ def fake_resp(method, url, *args, **kwargs):
elif (url, method) in fake_api.fake_responses: elif (url, method) in fake_api.fake_responses:
key = (url, method) key = (url, method)
if not key: if not key:
raise Exception('{0} {1}'.format(method, url)) raise Exception(f'{method} {url}')
status_code, content = fake_api.fake_responses[key]() status_code, content = fake_api.fake_responses[key]()
return response(status_code=status_code, content=content) return response(status_code=status_code, content=content)
@ -85,11 +86,11 @@ def fake_delete(self, url, *args, **kwargs):
def fake_read_from_socket(self, response, stream, tty=False, demux=False): def fake_read_from_socket(self, response, stream, tty=False, demux=False):
return six.binary_type() return bytes()
url_base = '{0}/'.format(fake_api.prefix) url_base = f'{fake_api.prefix}/'
url_prefix = '{0}v{1}/'.format( url_prefix = '{}v{}/'.format(
url_base, url_base,
docker.constants.DEFAULT_DOCKER_API_VERSION) docker.constants.DEFAULT_DOCKER_API_VERSION)
@ -133,20 +134,20 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self): def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename') url = self.client._url('/hello/{0}/world', 'somename')
assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world') assert url == '{}{}'.format(url_prefix, 'hello/somename/world')
url = self.client._url( url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername' '/hello/{0}/world/{1}', 'somename', 'someothername'
) )
assert url == '{0}{1}'.format( assert url == '{}{}'.format(
url_prefix, 'hello/somename/world/someothername' url_prefix, 'hello/somename/world/someothername'
) )
url = self.client._url('/hello/{0}/world', 'some?name') url = self.client._url('/hello/{0}/world', 'some?name')
assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world') assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world')
url = self.client._url("/images/{0}/push", "localhost:5000/image") url = self.client._url("/images/{0}/push", "localhost:5000/image")
assert url == '{0}{1}'.format( assert url == '{}{}'.format(
url_prefix, 'images/localhost:5000/image/push' url_prefix, 'images/localhost:5000/image/push'
) )
@ -156,13 +157,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self): def test_url_no_resource(self):
url = self.client._url('/simple') url = self.client._url('/simple')
assert url == '{0}{1}'.format(url_prefix, 'simple') assert url == '{}{}'.format(url_prefix, 'simple')
def test_url_unversioned_api(self): def test_url_unversioned_api(self):
url = self.client._url( url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False '/hello/{0}/world', 'somename', versioned_api=False
) )
assert url == '{0}{1}'.format(url_base, 'hello/somename/world') assert url == '{}{}'.format(url_base, 'hello/somename/world')
def test_version(self): def test_version(self):
self.client.version() self.client.version()
@ -184,13 +185,13 @@ class DockerApiTest(BaseAPIClientTest):
def test_retrieve_server_version(self): def test_retrieve_server_version(self):
client = APIClient(version="auto") client = APIClient(version="auto")
assert isinstance(client._version, six.string_types) assert isinstance(client._version, str)
assert not (client._version == "auto") assert not (client._version == "auto")
client.close() client.close()
def test_auto_retrieve_server_version(self): def test_auto_retrieve_server_version(self):
version = self.client._retrieve_server_version() version = self.client._retrieve_server_version()
assert isinstance(version, six.string_types) assert isinstance(version, str)
def test_info(self): def test_info(self):
self.client.info() self.client.info()
@ -337,8 +338,7 @@ class DockerApiTest(BaseAPIClientTest):
def test_stream_helper_decoding(self): def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']() status_code, content = fake_api.fake_responses[url_prefix + 'events']()
content_str = json.dumps(content) content_str = json.dumps(content)
if six.PY3: content_str = content_str.encode('utf-8')
content_str = content_str.encode('utf-8')
body = io.BytesIO(content_str) body = io.BytesIO(content_str)
# mock a stream interface # mock a stream interface
@ -405,7 +405,7 @@ class UnixSocketStreamTest(unittest.TestCase):
while not self.stop_server: while not self.stop_server:
try: try:
connection, client_address = self.server_socket.accept() connection, client_address = self.server_socket.accept()
except socket.error: except OSError:
# Probably no connection to accept yet # Probably no connection to accept yet
time.sleep(0.01) time.sleep(0.01)
continue continue
@ -489,7 +489,7 @@ class TCPSocketStreamTest(unittest.TestCase):
@classmethod @classmethod
def setup_class(cls): def setup_class(cls):
cls.server = six.moves.socketserver.ThreadingTCPServer( cls.server = socketserver.ThreadingTCPServer(
('', 0), cls.get_handler_class()) ('', 0), cls.get_handler_class())
cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread = threading.Thread(target=cls.server.serve_forever)
cls.thread.setDaemon(True) cls.thread.setDaemon(True)
@ -508,7 +508,7 @@ class TCPSocketStreamTest(unittest.TestCase):
stdout_data = cls.stdout_data stdout_data = cls.stdout_data
stderr_data = cls.stderr_data stderr_data = cls.stderr_data
class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object): class Handler(http.server.BaseHTTPRequestHandler):
def do_POST(self): def do_POST(self):
resp_data = self.get_resp_data() resp_data = self.get_resp_data()
self.send_response(101) self.send_response(101)
@ -534,7 +534,7 @@ class TCPSocketStreamTest(unittest.TestCase):
data += stderr_data data += stderr_data
return data return data
else: else:
raise Exception('Unknown path {0}'.format(path)) raise Exception(f'Unknown path {path}')
@staticmethod @staticmethod
def frame_header(stream, data): def frame_header(stream, data):
@ -632,7 +632,7 @@ class UserAgentTest(unittest.TestCase):
class DisableSocketTest(unittest.TestCase): class DisableSocketTest(unittest.TestCase):
class DummySocket(object): class DummySocket:
def __init__(self, timeout=60): def __init__(self, timeout=60):
self.timeout = timeout self.timeout = timeout

View File

@ -104,7 +104,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'GET' assert args[0][0] == 'GET'
assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) assert args[0][1] == f'{url_prefix}volumes/{name}'
def test_remove_volume(self): def test_remove_volume(self):
name = 'perfectcherryblossom' name = 'perfectcherryblossom'
@ -112,4 +112,4 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'DELETE' assert args[0][0] == 'DELETE'
assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) assert args[0][1] == f'{url_prefix}volumes/{name}'

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import base64 import base64
import json import json
import os import os
@ -15,7 +13,7 @@ import pytest
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
class RegressionTest(unittest.TestCase): class RegressionTest(unittest.TestCase):
@ -239,7 +237,7 @@ class LoadConfigTest(unittest.TestCase):
cfg_path = os.path.join(folder, '.dockercfg') cfg_path = os.path.join(folder, '.dockercfg')
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
with open(cfg_path, 'w') as f: with open(cfg_path, 'w') as f:
f.write('auth = {0}\n'.format(auth_)) f.write(f'auth = {auth_}\n')
f.write('email = sakuya@scarlet.net') f.write('email = sakuya@scarlet.net')
cfg = auth.load_config(cfg_path) cfg = auth.load_config(cfg_path)
@ -297,13 +295,13 @@ class LoadConfigTest(unittest.TestCase):
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, dockercfg_path = os.path.join(folder,
'.{0}.dockercfg'.format( '.{}.dockercfg'.format(
random.randrange(100000))) random.randrange(100000)))
registry = 'https://your.private.registry.io' registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = { config = {
registry: { registry: {
'auth': '{0}'.format(auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -329,7 +327,7 @@ class LoadConfigTest(unittest.TestCase):
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = { config = {
registry: { registry: {
'auth': '{0}'.format(auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -357,7 +355,7 @@ class LoadConfigTest(unittest.TestCase):
config = { config = {
'auths': { 'auths': {
registry: { registry: {
'auth': '{0}'.format(auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -386,7 +384,7 @@ class LoadConfigTest(unittest.TestCase):
config = { config = {
'auths': { 'auths': {
registry: { registry: {
'auth': '{0}'.format(auth_), 'auth': f'{auth_}',
'email': 'sakuya@scarlet.net' 'email': 'sakuya@scarlet.net'
} }
} }
@ -794,9 +792,9 @@ class InMemoryStore(credentials.Store):
} }
def list(self): def list(self):
return dict( return {
[(k, v['Username']) for k, v in self.__store.items()] k: v['Username'] for k, v in self.__store.items()
) }
def erase(self, server): def erase(self, server):
del self.__store[server] del self.__store[server]

View File

@ -15,7 +15,7 @@ from . import fake_api
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs')
POOL_SIZE = 20 POOL_SIZE = 20

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import unittest import unittest
import pytest import pytest
@ -15,7 +13,7 @@ from docker.types.services import convert_service_ports
try: try:
from unittest import mock from unittest import mock
except: # noqa: E722 except: # noqa: E722
import mock from unittest import mock
def create_host_config(*args, **kwargs): def create_host_config(*args, **kwargs):

View File

@ -126,7 +126,7 @@ class ContainerErrorTest(unittest.TestCase):
err = ContainerError(container, exit_status, command, image, stderr) err = ContainerError(container, exit_status, command, image, stderr)
msg = ("Command '{}' in image '{}' returned non-zero exit status {}" msg = ("Command '{}' in image '{}' returned non-zero exit status {}"
).format(command, image, exit_status, stderr) ).format(command, image, exit_status)
assert str(err) == msg assert str(err) == msg
def test_container_with_stderr(self): def test_container_with_stderr(self):

View File

@ -2,7 +2,7 @@ from docker import constants
from . import fake_stat from . import fake_stat
CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
FAKE_CONTAINER_ID = '3cc2351ab11b' FAKE_CONTAINER_ID = '3cc2351ab11b'
FAKE_IMAGE_ID = 'e9aa60c60128' FAKE_IMAGE_ID = 'e9aa60c60128'
@ -526,96 +526,96 @@ if constants.IS_WINDOWS_PLATFORM:
prefix = 'http+docker://localnpipe' prefix = 'http+docker://localnpipe'
fake_responses = { fake_responses = {
'{0}/version'.format(prefix): f'{prefix}/version':
get_fake_version, get_fake_version,
'{1}/{0}/version'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/version':
get_fake_version, get_fake_version,
'{1}/{0}/info'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/info':
get_fake_info, get_fake_info,
'{1}/{0}/auth'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/auth':
post_fake_auth, post_fake_auth,
'{1}/{0}/_ping'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/_ping':
get_fake_ping, get_fake_ping,
'{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/search':
get_fake_search, get_fake_search,
'{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/json':
get_fake_images, get_fake_images,
'{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/test_image/history':
get_fake_image_history, get_fake_image_history,
'{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_import_image, post_fake_import_image,
'{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/json':
get_fake_containers, get_fake_containers,
'{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start':
post_fake_start_container, post_fake_start_container,
'{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize':
post_fake_resize_container, post_fake_resize_container,
'{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json':
get_fake_inspect_container, get_fake_inspect_container,
'{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename':
post_fake_rename_container, post_fake_rename_container,
'{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag':
post_fake_tag_image, post_fake_tag_image,
'{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait':
get_fake_wait, get_fake_wait,
'{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs':
get_fake_logs, get_fake_logs,
'{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes':
get_fake_diff, get_fake_diff,
'{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export':
get_fake_export, get_fake_export,
'{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update':
post_fake_update_container, post_fake_update_container,
'{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec':
post_fake_exec_create, post_fake_exec_create,
'{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start':
post_fake_exec_start, post_fake_exec_start,
'{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json':
get_fake_exec_inspect, get_fake_exec_inspect,
'{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize':
post_fake_exec_resize, post_fake_exec_resize,
'{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats':
get_fake_stats, get_fake_stats,
'{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top':
get_fake_top, get_fake_top,
'{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop':
post_fake_stop_container, post_fake_stop_container,
'{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill':
post_fake_kill_container, post_fake_kill_container,
'{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause':
post_fake_pause_container, post_fake_pause_container,
'{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause':
post_fake_unpause_container, post_fake_unpause_container,
'{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart':
post_fake_restart_container, post_fake_restart_container,
'{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b':
delete_fake_remove_container, delete_fake_remove_container,
'{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/create':
post_fake_image_create, post_fake_image_create,
'{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128':
delete_fake_remove_image, delete_fake_remove_image,
'{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get':
get_fake_get_image, get_fake_get_image,
'{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/load':
post_fake_load_image, post_fake_load_image,
'{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/test_image/json':
get_fake_inspect_image, get_fake_inspect_image,
'{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/test_image/insert':
get_fake_insert_image, get_fake_insert_image,
'{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/images/test_image/push':
post_fake_push, post_fake_push,
'{1}/{0}/commit'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/commit':
post_fake_commit, post_fake_commit,
'{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/containers/create':
post_fake_create_container, post_fake_create_container,
'{1}/{0}/build'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/build':
post_fake_build_container, post_fake_build_container,
'{1}/{0}/events'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/events':
get_fake_events, get_fake_events,
('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'):
get_fake_volume_list, get_fake_volume_list,
('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'): (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume, get_fake_volume,
('{1}/{0}/volumes/{2}'.format( ('{1}/{0}/volumes/{2}'.format(
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
@ -629,11 +629,11 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NODE_ID CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'): ), 'POST'):
post_fake_update_node, post_fake_update_node,
('{1}/{0}/swarm/join'.format(CURRENT_VERSION, prefix), 'POST'): (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm, post_fake_join_swarm,
('{1}/{0}/networks'.format(CURRENT_VERSION, prefix), 'GET'): (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'):
get_fake_network_list, get_fake_network_list,
('{1}/{0}/networks/create'.format(CURRENT_VERSION, prefix), 'POST'): (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network, post_fake_network,
('{1}/{0}/networks/{2}'.format( ('{1}/{0}/networks/{2}'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID CURRENT_VERSION, prefix, FAKE_NETWORK_ID
@ -651,6 +651,6 @@ fake_responses = {
CURRENT_VERSION, prefix, FAKE_NETWORK_ID CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'): ), 'POST'):
post_fake_network_disconnect, post_fake_network_disconnect,
'{1}/{0}/secrets/create'.format(CURRENT_VERSION, prefix): f'{prefix}/{CURRENT_VERSION}/secrets/create':
post_fake_secret, post_fake_secret,
} }

View File

@ -7,7 +7,7 @@ from . import fake_api
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
class CopyReturnMagicMock(mock.MagicMock): class CopyReturnMagicMock(mock.MagicMock):
@ -15,7 +15,7 @@ class CopyReturnMagicMock(mock.MagicMock):
A MagicMock which deep copies every return value. A MagicMock which deep copies every return value.
""" """
def _mock_call(self, *args, **kwargs): def _mock_call(self, *args, **kwargs):
ret = super(CopyReturnMagicMock, self)._mock_call(*args, **kwargs) ret = super()._mock_call(*args, **kwargs)
if isinstance(ret, (dict, list)): if isinstance(ret, (dict, list)):
ret = copy.deepcopy(ret) ret = copy.deepcopy(ret)
return ret return ret

View File

@ -16,7 +16,7 @@ class ModelTest(unittest.TestCase):
def test_hash(self): def test_hash(self):
client = make_fake_client() client = make_fake_client()
container1 = client.containers.get(FAKE_CONTAINER_ID) container1 = client.containers.get(FAKE_CONTAINER_ID)
my_set = set([container1]) my_set = {container1}
assert len(my_set) == 1 assert len(my_set) == 1
container2 = client.containers.get(FAKE_CONTAINER_ID) container2 = client.containers.get(FAKE_CONTAINER_ID)

View File

@ -8,4 +8,4 @@ class CreateServiceTest(unittest.TestCase):
def test_secrets_repr(self): def test_secrets_repr(self):
client = make_fake_client() client = make_fake_client()
secret = client.secrets.create(name="super_secret", data="secret") secret = client.secrets.create(name="super_secret", data="secret")
assert secret.__repr__() == "<Secret: '{}'>".format(FAKE_SECRET_NAME) assert secret.__repr__() == f"<Secret: '{FAKE_SECRET_NAME}'>"

View File

@ -40,10 +40,10 @@ class CreateServiceKwargsTest(unittest.TestCase):
'update_config': {'update': 'config'}, 'update_config': {'update': 'config'},
'endpoint_spec': {'blah': 'blah'}, 'endpoint_spec': {'blah': 'blah'},
} }
assert set(task_template.keys()) == set([ assert set(task_template.keys()) == {
'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement', 'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement',
'LogDriver', 'Networks' 'LogDriver', 'Networks'
]) }
assert task_template['Placement'] == { assert task_template['Placement'] == {
'Constraints': ['foo=bar'], 'Constraints': ['foo=bar'],
'Preferences': ['bar=baz'], 'Preferences': ['bar=baz'],
@ -55,7 +55,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'Options': {'foo': 'bar'} 'Options': {'foo': 'bar'}
} }
assert task_template['Networks'] == [{'Target': 'somenet'}] assert task_template['Networks'] == [{'Target': 'somenet'}]
assert set(task_template['ContainerSpec'].keys()) == set([ assert set(task_template['ContainerSpec'].keys()) == {
'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User', 'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User',
'Labels', 'Mounts', 'StopGracePeriod' 'Labels', 'Mounts', 'StopGracePeriod'
]) }

View File

@ -32,30 +32,30 @@ class SSLAdapterTest(unittest.TestCase):
class MatchHostnameTest(unittest.TestCase): class MatchHostnameTest(unittest.TestCase):
cert = { cert = {
'issuer': ( 'issuer': (
(('countryName', u'US'),), (('countryName', 'US'),),
(('stateOrProvinceName', u'California'),), (('stateOrProvinceName', 'California'),),
(('localityName', u'San Francisco'),), (('localityName', 'San Francisco'),),
(('organizationName', u'Docker Inc'),), (('organizationName', 'Docker Inc'),),
(('organizationalUnitName', u'Docker-Python'),), (('organizationalUnitName', 'Docker-Python'),),
(('commonName', u'localhost'),), (('commonName', 'localhost'),),
(('emailAddress', u'info@docker.com'),) (('emailAddress', 'info@docker.com'),)
), ),
'notAfter': 'Mar 25 23:08:23 2030 GMT', 'notAfter': 'Mar 25 23:08:23 2030 GMT',
'notBefore': u'Mar 25 23:08:23 2016 GMT', 'notBefore': 'Mar 25 23:08:23 2016 GMT',
'serialNumber': u'BD5F894C839C548F', 'serialNumber': 'BD5F894C839C548F',
'subject': ( 'subject': (
(('countryName', u'US'),), (('countryName', 'US'),),
(('stateOrProvinceName', u'California'),), (('stateOrProvinceName', 'California'),),
(('localityName', u'San Francisco'),), (('localityName', 'San Francisco'),),
(('organizationName', u'Docker Inc'),), (('organizationName', 'Docker Inc'),),
(('organizationalUnitName', u'Docker-Python'),), (('organizationalUnitName', 'Docker-Python'),),
(('commonName', u'localhost'),), (('commonName', 'localhost'),),
(('emailAddress', u'info@docker.com'),) (('emailAddress', 'info@docker.com'),)
), ),
'subjectAltName': ( 'subjectAltName': (
('DNS', u'localhost'), ('DNS', 'localhost'),
('DNS', u'*.gensokyo.jp'), ('DNS', '*.gensokyo.jp'),
('IP Address', u'127.0.0.1'), ('IP Address', '127.0.0.1'),
), ),
'version': 3 'version': 3
} }

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import json import json
from . import fake_api from . import fake_api

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import os import os
import os.path import os.path
import shutil import shutil
@ -82,7 +80,7 @@ class ExcludePathsTest(unittest.TestCase):
assert sorted(paths) == sorted(set(paths)) assert sorted(paths) == sorted(set(paths))
def test_wildcard_exclude(self): def test_wildcard_exclude(self):
assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) assert self.exclude(['*']) == {'Dockerfile', '.dockerignore'}
def test_exclude_dockerfile_dockerignore(self): def test_exclude_dockerfile_dockerignore(self):
""" """
@ -99,18 +97,18 @@ class ExcludePathsTest(unittest.TestCase):
If we're using a custom Dockerfile, make sure that's not If we're using a custom Dockerfile, make sure that's not
excluded. excluded.
""" """
assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set( assert self.exclude(['*'], dockerfile='Dockerfile.alt') == {
['Dockerfile.alt', '.dockerignore'] 'Dockerfile.alt', '.dockerignore'
) }
assert self.exclude( assert self.exclude(
['*'], dockerfile='foo/Dockerfile3' ['*'], dockerfile='foo/Dockerfile3'
) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
# https://github.com/docker/docker-py/issues/1956 # https://github.com/docker/docker-py/issues/1956
assert self.exclude( assert self.exclude(
['*'], dockerfile='./foo/Dockerfile3' ['*'], dockerfile='./foo/Dockerfile3'
) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) ) == convert_paths({'foo/Dockerfile3', '.dockerignore'})
def test_exclude_dockerfile_child(self): def test_exclude_dockerfile_child(self):
includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3')
@ -119,56 +117,56 @@ class ExcludePathsTest(unittest.TestCase):
def test_single_filename(self): def test_single_filename(self):
assert self.exclude(['a.py']) == convert_paths( assert self.exclude(['a.py']) == convert_paths(
self.all_paths - set(['a.py']) self.all_paths - {'a.py'}
) )
def test_single_filename_leading_dot_slash(self): def test_single_filename_leading_dot_slash(self):
assert self.exclude(['./a.py']) == convert_paths( assert self.exclude(['./a.py']) == convert_paths(
self.all_paths - set(['a.py']) self.all_paths - {'a.py'}
) )
# As odd as it sounds, a filename pattern with a trailing slash on the # As odd as it sounds, a filename pattern with a trailing slash on the
# end *will* result in that file being excluded. # end *will* result in that file being excluded.
def test_single_filename_trailing_slash(self): def test_single_filename_trailing_slash(self):
assert self.exclude(['a.py/']) == convert_paths( assert self.exclude(['a.py/']) == convert_paths(
self.all_paths - set(['a.py']) self.all_paths - {'a.py'}
) )
def test_wildcard_filename_start(self): def test_wildcard_filename_start(self):
assert self.exclude(['*.py']) == convert_paths( assert self.exclude(['*.py']) == convert_paths(
self.all_paths - set(['a.py', 'b.py', 'cde.py']) self.all_paths - {'a.py', 'b.py', 'cde.py'}
) )
def test_wildcard_with_exception(self): def test_wildcard_with_exception(self):
assert self.exclude(['*.py', '!b.py']) == convert_paths( assert self.exclude(['*.py', '!b.py']) == convert_paths(
self.all_paths - set(['a.py', 'cde.py']) self.all_paths - {'a.py', 'cde.py'}
) )
def test_wildcard_with_wildcard_exception(self): def test_wildcard_with_wildcard_exception(self):
assert self.exclude(['*.*', '!*.go']) == convert_paths( assert self.exclude(['*.*', '!*.go']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt',
]) }
) )
def test_wildcard_filename_end(self): def test_wildcard_filename_end(self):
assert self.exclude(['a.*']) == convert_paths( assert self.exclude(['a.*']) == convert_paths(
self.all_paths - set(['a.py', 'a.go']) self.all_paths - {'a.py', 'a.go'}
) )
def test_question_mark(self): def test_question_mark(self):
assert self.exclude(['?.py']) == convert_paths( assert self.exclude(['?.py']) == convert_paths(
self.all_paths - set(['a.py', 'b.py']) self.all_paths - {'a.py', 'b.py'}
) )
def test_single_subdir_single_filename(self): def test_single_subdir_single_filename(self):
assert self.exclude(['foo/a.py']) == convert_paths( assert self.exclude(['foo/a.py']) == convert_paths(
self.all_paths - set(['foo/a.py']) self.all_paths - {'foo/a.py'}
) )
def test_single_subdir_single_filename_leading_slash(self): def test_single_subdir_single_filename_leading_slash(self):
assert self.exclude(['/foo/a.py']) == convert_paths( assert self.exclude(['/foo/a.py']) == convert_paths(
self.all_paths - set(['foo/a.py']) self.all_paths - {'foo/a.py'}
) )
def test_exclude_include_absolute_path(self): def test_exclude_include_absolute_path(self):
@ -176,57 +174,57 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths( assert exclude_paths(
base, base,
['/*', '!/*.py'] ['/*', '!/*.py']
) == set(['a.py', 'b.py']) ) == {'a.py', 'b.py'}
def test_single_subdir_with_path_traversal(self): def test_single_subdir_with_path_traversal(self):
assert self.exclude(['foo/whoops/../a.py']) == convert_paths( assert self.exclude(['foo/whoops/../a.py']) == convert_paths(
self.all_paths - set(['foo/a.py']) self.all_paths - {'foo/a.py'}
) )
def test_single_subdir_wildcard_filename(self): def test_single_subdir_wildcard_filename(self):
assert self.exclude(['foo/*.py']) == convert_paths( assert self.exclude(['foo/*.py']) == convert_paths(
self.all_paths - set(['foo/a.py', 'foo/b.py']) self.all_paths - {'foo/a.py', 'foo/b.py'}
) )
def test_wildcard_subdir_single_filename(self): def test_wildcard_subdir_single_filename(self):
assert self.exclude(['*/a.py']) == convert_paths( assert self.exclude(['*/a.py']) == convert_paths(
self.all_paths - set(['foo/a.py', 'bar/a.py']) self.all_paths - {'foo/a.py', 'bar/a.py'}
) )
def test_wildcard_subdir_wildcard_filename(self): def test_wildcard_subdir_wildcard_filename(self):
assert self.exclude(['*/*.py']) == convert_paths( assert self.exclude(['*/*.py']) == convert_paths(
self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py']) self.all_paths - {'foo/a.py', 'foo/b.py', 'bar/a.py'}
) )
def test_directory(self): def test_directory(self):
assert self.exclude(['foo']) == convert_paths( assert self.exclude(['foo']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py',
'foo/Dockerfile3' 'foo/Dockerfile3'
]) }
) )
def test_directory_with_trailing_slash(self): def test_directory_with_trailing_slash(self):
assert self.exclude(['foo']) == convert_paths( assert self.exclude(['foo']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo', 'foo/a.py', 'foo/b.py', 'foo', 'foo/a.py', 'foo/b.py',
'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3'
]) }
) )
def test_directory_with_single_exception(self): def test_directory_with_single_exception(self):
assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths( assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/bar', 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar',
'foo/Dockerfile3' 'foo/Dockerfile3'
]) }
) )
def test_directory_with_subdir_exception(self): def test_directory_with_subdir_exception(self):
assert self.exclude(['foo', '!foo/bar']) == convert_paths( assert self.exclude(['foo', '!foo/bar']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
]) }
) )
@pytest.mark.skipif( @pytest.mark.skipif(
@ -234,21 +232,21 @@ class ExcludePathsTest(unittest.TestCase):
) )
def test_directory_with_subdir_exception_win32_pathsep(self): def test_directory_with_subdir_exception_win32_pathsep(self):
assert self.exclude(['foo', '!foo\\bar']) == convert_paths( assert self.exclude(['foo', '!foo\\bar']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3'
]) }
) )
def test_directory_with_wildcard_exception(self): def test_directory_with_wildcard_exception(self):
assert self.exclude(['foo', '!foo/*.py']) == convert_paths( assert self.exclude(['foo', '!foo/*.py']) == convert_paths(
self.all_paths - set([ self.all_paths - {
'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3' 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3'
]) }
) )
def test_subdirectory(self): def test_subdirectory(self):
assert self.exclude(['foo/bar']) == convert_paths( assert self.exclude(['foo/bar']) == convert_paths(
self.all_paths - set(['foo/bar', 'foo/bar/a.py']) self.all_paths - {'foo/bar', 'foo/bar/a.py'}
) )
@pytest.mark.skipif( @pytest.mark.skipif(
@ -256,33 +254,33 @@ class ExcludePathsTest(unittest.TestCase):
) )
def test_subdirectory_win32_pathsep(self): def test_subdirectory_win32_pathsep(self):
assert self.exclude(['foo\\bar']) == convert_paths( assert self.exclude(['foo\\bar']) == convert_paths(
self.all_paths - set(['foo/bar', 'foo/bar/a.py']) self.all_paths - {'foo/bar', 'foo/bar/a.py'}
) )
def test_double_wildcard(self): def test_double_wildcard(self):
assert self.exclude(['**/a.py']) == convert_paths( assert self.exclude(['**/a.py']) == convert_paths(
self.all_paths - set( self.all_paths - {
['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'] 'a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'
) }
) )
assert self.exclude(['foo/**/bar']) == convert_paths( assert self.exclude(['foo/**/bar']) == convert_paths(
self.all_paths - set(['foo/bar', 'foo/bar/a.py']) self.all_paths - {'foo/bar', 'foo/bar/a.py'}
) )
def test_single_and_double_wildcard(self): def test_single_and_double_wildcard(self):
assert self.exclude(['**/target/*/*']) == convert_paths( assert self.exclude(['**/target/*/*']) == convert_paths(
self.all_paths - set( self.all_paths - {
['target/subdir/file.txt', 'target/subdir/file.txt',
'subdir/target/subdir/file.txt', 'subdir/target/subdir/file.txt',
'subdir/subdir2/target/subdir/file.txt'] 'subdir/subdir2/target/subdir/file.txt'
) }
) )
def test_trailing_double_wildcard(self): def test_trailing_double_wildcard(self):
assert self.exclude(['subdir/**']) == convert_paths( assert self.exclude(['subdir/**']) == convert_paths(
self.all_paths - set( self.all_paths - {
['subdir/file.txt', 'subdir/file.txt',
'subdir/target/file.txt', 'subdir/target/file.txt',
'subdir/target/subdir/file.txt', 'subdir/target/subdir/file.txt',
'subdir/subdir2/file.txt', 'subdir/subdir2/file.txt',
@ -292,16 +290,16 @@ class ExcludePathsTest(unittest.TestCase):
'subdir/target/subdir', 'subdir/target/subdir',
'subdir/subdir2', 'subdir/subdir2',
'subdir/subdir2/target', 'subdir/subdir2/target',
'subdir/subdir2/target/subdir'] 'subdir/subdir2/target/subdir'
) }
) )
def test_double_wildcard_with_exception(self): def test_double_wildcard_with_exception(self):
assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths( assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths(
set([ {
'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile', 'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile',
'.dockerignore', '.dockerignore',
]) }
) )
def test_include_wildcard(self): def test_include_wildcard(self):
@ -324,7 +322,7 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths( assert exclude_paths(
base, base,
['*.md', '!README*.md', 'README-secret.md'] ['*.md', '!README*.md', 'README-secret.md']
) == set(['README.md', 'README-bis.md']) ) == {'README.md', 'README-bis.md'}
def test_parent_directory(self): def test_parent_directory(self):
base = make_tree( base = make_tree(
@ -340,7 +338,7 @@ class ExcludePathsTest(unittest.TestCase):
assert exclude_paths( assert exclude_paths(
base, base,
['../a.py', '/../b.py'] ['../a.py', '/../b.py']
) == set(['c.py']) ) == {'c.py'}
class TarTest(unittest.TestCase): class TarTest(unittest.TestCase):
@ -374,14 +372,14 @@ class TarTest(unittest.TestCase):
'.dockerignore', '.dockerignore',
] ]
expected_names = set([ expected_names = {
'Dockerfile', 'Dockerfile',
'.dockerignore', '.dockerignore',
'a.go', 'a.go',
'b.py', 'b.py',
'bar', 'bar',
'bar/a.py', 'bar/a.py',
]) }
base = make_tree(dirs, files) base = make_tree(dirs, files)
self.addCleanup(shutil.rmtree, base) self.addCleanup(shutil.rmtree, base)
@ -413,7 +411,7 @@ class TarTest(unittest.TestCase):
with pytest.raises(IOError) as ei: with pytest.raises(IOError) as ei:
tar(base) tar(base)
assert 'Can not read file in context: {}'.format(full_path) in ( assert f'Can not read file in context: {full_path}' in (
ei.exconly() ei.exconly()
) )

View File

@ -11,7 +11,7 @@ from docker.utils import config
try: try:
from unittest import mock from unittest import mock
except ImportError: except ImportError:
import mock from unittest import mock
class FindConfigFileTest(unittest.TestCase): class FindConfigFileTest(unittest.TestCase):

View File

@ -1,11 +1,7 @@
# encoding: utf-8
from __future__ import absolute_import
from __future__ import unicode_literals
from docker.utils.json_stream import json_splitter, stream_as_text, json_stream from docker.utils.json_stream import json_splitter, stream_as_text, json_stream
class TestJsonSplitter(object): class TestJsonSplitter:
def test_json_splitter_no_object(self): def test_json_splitter_no_object(self):
data = '{"foo": "bar' data = '{"foo": "bar'
@ -20,7 +16,7 @@ class TestJsonSplitter(object):
assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}') assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}')
class TestStreamAsText(object): class TestStreamAsText:
def test_stream_with_non_utf_unicode_character(self): def test_stream_with_non_utf_unicode_character(self):
stream = [b'\xed\xf3\xf3'] stream = [b'\xed\xf3\xf3']
@ -28,12 +24,12 @@ class TestStreamAsText(object):
assert output == '<EFBFBD><EFBFBD><EFBFBD>' assert output == '<EFBFBD><EFBFBD><EFBFBD>'
def test_stream_with_utf_character(self): def test_stream_with_utf_character(self):
stream = ['ěĝ'.encode('utf-8')] stream = ['ěĝ'.encode()]
output, = stream_as_text(stream) output, = stream_as_text(stream)
assert output == 'ěĝ' assert output == 'ěĝ'
class TestJsonStream(object): class TestJsonStream:
def test_with_falsy_entries(self): def test_with_falsy_entries(self):
stream = [ stream = [

View File

@ -1,7 +1,4 @@
# -*- coding: utf-8 -*-
import unittest import unittest
import six
from docker.utils.proxy import ProxyConfig from docker.utils.proxy import ProxyConfig
@ -65,7 +62,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is None. # Proxy config is non null, env is None.
self.assertSetEqual( self.assertSetEqual(
set(CONFIG.inject_proxy_environment(None)), set(CONFIG.inject_proxy_environment(None)),
set(['{}={}'.format(k, v) for k, v in six.iteritems(ENV)])) {f'{k}={v}' for k, v in ENV.items()})
# Proxy config is null, env is None. # Proxy config is null, env is None.
self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None) self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None)
@ -74,7 +71,7 @@ class ProxyConfigTest(unittest.TestCase):
# Proxy config is non null, env is non null # Proxy config is non null, env is non null
actual = CONFIG.inject_proxy_environment(env) actual = CONFIG.inject_proxy_environment(env)
expected = ['{}={}'.format(k, v) for k, v in six.iteritems(ENV)] + env expected = [f'{k}={v}' for k, v in ENV.items()] + env
# It's important that the first 8 variables are the ones from the proxy # It's important that the first 8 variables are the ones from the proxy
# config, and the last 2 are the ones from the input environment # config, and the last 2 are the ones from the input environment
self.assertSetEqual(set(actual[:8]), set(expected[:8])) self.assertSetEqual(set(actual[:8]), set(expected[:8]))

View File

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
import base64 import base64
import json import json
import os import os
@ -9,7 +7,6 @@ import tempfile
import unittest import unittest
import pytest import pytest
import six
from docker.api.client import APIClient from docker.api.client import APIClient
from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION
from docker.errors import DockerException from docker.errors import DockerException
@ -195,22 +192,22 @@ class ConverVolumeBindsTest(unittest.TestCase):
assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw'] assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw']
def test_convert_volume_binds_unicode_bytes_input(self): def test_convert_volume_binds_unicode_bytes_input(self):
expected = [u'/mnt/지연:/unicode/박:rw'] expected = ['/mnt/지연:/unicode/박:rw']
data = { data = {
u'/mnt/지연'.encode('utf-8'): { '/mnt/지연'.encode(): {
'bind': u'/unicode/박'.encode('utf-8'), 'bind': '/unicode/박'.encode(),
'mode': 'rw' 'mode': 'rw'
} }
} }
assert convert_volume_binds(data) == expected assert convert_volume_binds(data) == expected
def test_convert_volume_binds_unicode_unicode_input(self): def test_convert_volume_binds_unicode_unicode_input(self):
expected = [u'/mnt/지연:/unicode/박:rw'] expected = ['/mnt/지연:/unicode/박:rw']
data = { data = {
u'/mnt/지연': { '/mnt/지연': {
'bind': u'/unicode/박', 'bind': '/unicode/박',
'mode': 'rw' 'mode': 'rw'
} }
} }
@ -359,14 +356,14 @@ class ParseRepositoryTagTest(unittest.TestCase):
) )
def test_index_image_sha(self): def test_index_image_sha(self):
assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == ( assert parse_repository_tag(f"root@sha256:{self.sha}") == (
"root", "sha256:{0}".format(self.sha) "root", f"sha256:{self.sha}"
) )
def test_private_reg_image_sha(self): def test_private_reg_image_sha(self):
assert parse_repository_tag( assert parse_repository_tag(
"url:5000/repo@sha256:{0}".format(self.sha) f"url:5000/repo@sha256:{self.sha}"
) == ("url:5000/repo", "sha256:{0}".format(self.sha)) ) == ("url:5000/repo", f"sha256:{self.sha}")
class ParseDeviceTest(unittest.TestCase): class ParseDeviceTest(unittest.TestCase):
@ -463,20 +460,13 @@ class UtilsTest(unittest.TestCase):
def test_decode_json_header(self): def test_decode_json_header(self):
obj = {'a': 'b', 'c': 1} obj = {'a': 'b', 'c': 1}
data = None data = None
if six.PY3: data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8'))
else:
data = base64.urlsafe_b64encode(json.dumps(obj))
decoded_data = decode_json_header(data) decoded_data = decode_json_header(data)
assert obj == decoded_data assert obj == decoded_data
class SplitCommandTest(unittest.TestCase): class SplitCommandTest(unittest.TestCase):
def test_split_command_with_unicode(self): def test_split_command_with_unicode(self):
assert split_command(u'echo μμ') == ['echo', 'μμ']
@pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3")
def test_split_command_with_bytes(self):
assert split_command('echo μμ') == ['echo', 'μμ'] assert split_command('echo μμ') == ['echo', 'μμ']
@ -626,7 +616,7 @@ class FormatEnvironmentTest(unittest.TestCase):
env_dict = { env_dict = {
'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80' 'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80'
} }
assert format_environment(env_dict) == [u'ARTIST_NAME=송지은'] assert format_environment(env_dict) == ['ARTIST_NAME=송지은']
def test_format_env_no_value(self): def test_format_env_no_value(self):
env_dict = { env_dict = {