Replace string formatting with f-strings

Signed-off-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
Aarni Koskela 2023-05-11 16:12:41 +03:00
parent 78439ebbe1
commit 002c035e45
40 changed files with 214 additions and 294 deletions

View File

@ -314,9 +314,8 @@ class BuildApiMixin:
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug( log.debug(
'Sending auth config ({})'.format( "Sending auth config (%s)",
', '.join(repr(k) for k in auth_data.keys()) ', '.join(repr(k) for k in auth_data),
)
) )
if auth_data: if auth_data:
@ -336,12 +335,9 @@ def process_dockerfile(dockerfile, path):
abs_dockerfile = os.path.join(path, dockerfile) abs_dockerfile = os.path.join(path, dockerfile)
if constants.IS_WINDOWS_PLATFORM and path.startswith( if constants.IS_WINDOWS_PLATFORM and path.startswith(
constants.WINDOWS_LONGPATH_PREFIX): constants.WINDOWS_LONGPATH_PREFIX):
abs_dockerfile = '{}{}'.format( normpath = os.path.normpath(
constants.WINDOWS_LONGPATH_PREFIX, abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):])
os.path.normpath( abs_dockerfile = f'{constants.WINDOWS_LONGPATH_PREFIX}{normpath}'
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
)
)
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')): os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later # Dockerfile not in context - read data to insert into tar later

View File

@ -199,14 +199,13 @@ class APIClient(
self._version = version self._version = version
if not isinstance(self._version, str): if not isinstance(self._version, str):
raise DockerException( raise DockerException(
'Version parameter must be a string or None. Found {}'.format( 'Version parameter must be a string or None. '
type(version).__name__ f'Found {type(version).__name__}'
)
) )
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION): if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
raise InvalidVersion( raise InvalidVersion(
'API versions below {} are no longer supported by this ' f'API versions below {MINIMUM_DOCKER_API_VERSION} are '
'library.'.format(MINIMUM_DOCKER_API_VERSION) f'no longer supported by this library.'
) )
def _retrieve_server_version(self): def _retrieve_server_version(self):
@ -248,19 +247,17 @@ class APIClient(
for arg in args: for arg in args:
if not isinstance(arg, str): if not isinstance(arg, str):
raise ValueError( raise ValueError(
'Expected a string but found {} ({}) ' f'Expected a string but found {arg} ({type(arg)}) instead'
'instead'.format(arg, type(arg))
) )
quote_f = partial(urllib.parse.quote, safe="/:") quote_f = partial(urllib.parse.quote, safe="/:")
args = map(quote_f, args) args = map(quote_f, args)
formatted_path = pathfmt.format(*args)
if kwargs.get('versioned_api', True): if kwargs.get('versioned_api', True):
return '{}/v{}{}'.format( return f'{self.base_url}/v{self._version}{formatted_path}'
self.base_url, self._version, pathfmt.format(*args)
)
else: else:
return f'{self.base_url}{pathfmt.format(*args)}' return f'{self.base_url}{formatted_path}'
def _raise_for_status(self, response): def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred.""" """Raises stored :class:`APIError`, if one occurred."""

View File

@ -861,8 +861,8 @@ class ContainerApiMixin:
params['since'] = since params['since'] = since
else: else:
raise errors.InvalidArgument( raise errors.InvalidArgument(
'since value should be datetime or positive int/float, ' 'since value should be datetime or positive int/float,'
'not {}'.format(type(since)) f' not {type(since)}'
) )
if until is not None: if until is not None:
@ -878,8 +878,8 @@ class ContainerApiMixin:
params['until'] = until params['until'] = until
else: else:
raise errors.InvalidArgument( raise errors.InvalidArgument(
'until value should be datetime or positive int/float, ' f'until value should be datetime or positive int/float, '
'not {}'.format(type(until)) f'not {type(until)}'
) )
url = self._url("/containers/{0}/logs", container) url = self._url("/containers/{0}/logs", container)
@ -951,7 +951,7 @@ class ContainerApiMixin:
return port_settings.get(private_port) return port_settings.get(private_port)
for protocol in ['tcp', 'udp', 'sctp']: for protocol in ['tcp', 'udp', 'sctp']:
h_ports = port_settings.get(private_port + '/' + protocol) h_ports = port_settings.get(f"{private_port}/{protocol}")
if h_ports: if h_ports:
break break

View File

@ -7,9 +7,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
def raise_version_error(param, min_version): def raise_version_error(param, min_version):
raise errors.InvalidVersion( raise errors.InvalidVersion(
'{} is not supported in API version < {}'.format( f'{param} is not supported in API version < {min_version}'
param, min_version
)
) )
if update_config is not None: if update_config is not None:

View File

@ -22,15 +22,15 @@ def resolve_repository_name(repo_name):
index_name, remote_name = split_repo_name(repo_name) index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-': if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository( raise errors.InvalidRepository(
'Invalid index name ({}). Cannot begin or end with a' f'Invalid index name ({index_name}). '
' hyphen.'.format(index_name) 'Cannot begin or end with a hyphen.'
) )
return resolve_index_name(index_name), remote_name return resolve_index_name(index_name), remote_name
def resolve_index_name(index_name): def resolve_index_name(index_name):
index_name = convert_to_hostname(index_name) index_name = convert_to_hostname(index_name)
if index_name == 'index.' + INDEX_NAME: if index_name == f"index.{INDEX_NAME}":
index_name = INDEX_NAME index_name = INDEX_NAME
return index_name return index_name
@ -99,9 +99,7 @@ class AuthConfig(dict):
for registry, entry in entries.items(): for registry, entry in entries.items():
if not isinstance(entry, dict): if not isinstance(entry, dict):
log.debug( log.debug(
'Config entry for key {} is not auth config'.format( f'Config entry for key {registry} is not auth config'
registry
)
) )
# We sometimes fall back to parsing the whole config as if it # We sometimes fall back to parsing the whole config as if it
# was the auth config by itself, for legacy purposes. In that # was the auth config by itself, for legacy purposes. In that
@ -109,17 +107,11 @@ class AuthConfig(dict):
# keys is not formatted properly. # keys is not formatted properly.
if raise_on_error: if raise_on_error:
raise errors.InvalidConfigFile( raise errors.InvalidConfigFile(
'Invalid configuration for registry {}'.format( f'Invalid configuration for registry {registry}'
registry
)
) )
return {} return {}
if 'identitytoken' in entry: if 'identitytoken' in entry:
log.debug( log.debug(f'Found an IdentityToken entry for registry {registry}')
'Found an IdentityToken entry for registry {}'.format(
registry
)
)
conf[registry] = { conf[registry] = {
'IdentityToken': entry['identitytoken'] 'IdentityToken': entry['identitytoken']
} }
@ -130,16 +122,15 @@ class AuthConfig(dict):
# a valid value in the auths config. # a valid value in the auths config.
# https://github.com/docker/compose/issues/3265 # https://github.com/docker/compose/issues/3265
log.debug( log.debug(
'Auth data for {} is absent. Client might be using a ' f'Auth data for {registry} is absent. '
'credentials store instead.'.format(registry) f'Client might be using a credentials store instead.'
) )
conf[registry] = {} conf[registry] = {}
continue continue
username, password = decode_auth(entry['auth']) username, password = decode_auth(entry['auth'])
log.debug( log.debug(
'Found entry (registry={}, username={})' f'Found entry (registry={registry!r}, username={username!r})'
.format(repr(registry), repr(username))
) )
conf[registry] = { conf[registry] = {

View File

@ -113,8 +113,8 @@ class ContextAPI:
names.append(data["Name"]) names.append(data["Name"])
except Exception as e: except Exception as e:
raise errors.ContextException( raise errors.ContextException(
"Failed to load metafile {}: {}".format( f"Failed to load metafile {filename}: {e}",
filename, e)) )
contexts = [cls.DEFAULT_CONTEXT] contexts = [cls.DEFAULT_CONTEXT]
for name in names: for name in names:

View File

@ -42,8 +42,9 @@ class Context:
for k, v in endpoints.items(): for k, v in endpoints.items():
if not isinstance(v, dict): if not isinstance(v, dict):
# unknown format # unknown format
raise ContextException("""Unknown endpoint format for raise ContextException(
context {}: {}""".format(name, v)) f"Unknown endpoint format for context {name}: {v}",
)
self.endpoints[k] = v self.endpoints[k] = v
if k != "docker": if k != "docker":
@ -96,8 +97,9 @@ class Context:
metadata = json.load(f) metadata = json.load(f)
except (OSError, KeyError, ValueError) as e: except (OSError, KeyError, ValueError) as e:
# unknown format # unknown format
raise Exception("""Detected corrupted meta file for raise Exception(
context {} : {}""".format(name, e)) f"Detected corrupted meta file for context {name} : {e}"
)
# for docker endpoints, set defaults for # for docker endpoints, set defaults for
# Host and SkipTLSVerify fields # Host and SkipTLSVerify fields

View File

@ -13,13 +13,5 @@ class InitializationError(StoreError):
def process_store_error(cpe, program): def process_store_error(cpe, program):
message = cpe.output.decode('utf-8') message = cpe.output.decode('utf-8')
if 'credentials not found in native keychain' in message: if 'credentials not found in native keychain' in message:
return CredentialsNotFound( return CredentialsNotFound(f'No matching credentials in {program}')
'No matching credentials in {}'.format( return StoreError(f'Credentials store {program} exited with "{message}".')
program
)
)
return StoreError(
'Credentials store {} exited with "{}".'.format(
program, cpe.output.decode('utf-8').strip()
)
)

View File

@ -20,9 +20,7 @@ class Store:
self.environment = environment self.environment = environment
if self.exe is None: if self.exe is None:
warnings.warn( warnings.warn(
'{} not installed or not available in PATH'.format( f'{self.program} not installed or not available in PATH'
self.program
)
) )
def get(self, server): def get(self, server):
@ -73,10 +71,8 @@ class Store:
def _execute(self, subcmd, data_input): def _execute(self, subcmd, data_input):
if self.exe is None: if self.exe is None:
raise errors.StoreError( raise errors.StoreError(
'{} not installed or not available in PATH'.format( f'{self.program} not installed or not available in PATH'
self.program )
)
)
output = None output = None
env = create_environment_dict(self.environment) env = create_environment_dict(self.environment)
try: try:
@ -88,14 +84,10 @@ class Store:
except OSError as e: except OSError as e:
if e.errno == errno.ENOENT: if e.errno == errno.ENOENT:
raise errors.StoreError( raise errors.StoreError(
'{} not installed or not available in PATH'.format( f'{self.program} not installed or not available in PATH'
self.program
)
) )
else: else:
raise errors.StoreError( raise errors.StoreError(
'Unexpected OS error "{}", errno={}'.format( f'Unexpected OS error "{e.strerror}", errno={e.errno}'
e.strerror, e.errno
)
) )
return output return output

View File

@ -54,14 +54,16 @@ class APIError(requests.exceptions.HTTPError, DockerException):
message = super().__str__() message = super().__str__()
if self.is_client_error(): if self.is_client_error():
message = '{} Client Error for {}: {}'.format( message = (
self.response.status_code, self.response.url, f'{self.response.status_code} Client Error for '
self.response.reason) f'{self.response.url}: {self.response.reason}'
)
elif self.is_server_error(): elif self.is_server_error():
message = '{} Server Error for {}: {}'.format( message = (
self.response.status_code, self.response.url, f'{self.response.status_code} Server Error for '
self.response.reason) f'{self.response.url}: {self.response.reason}'
)
if self.explanation: if self.explanation:
message = f'{message} ("{self.explanation}")' message = f'{message} ("{self.explanation}")'
@ -142,10 +144,10 @@ class ContainerError(DockerException):
self.stderr = stderr self.stderr = stderr
err = f": {stderr}" if stderr is not None else "" err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{}' in image '{}' returned non-zero exit " super().__init__(
"status {}{}").format(command, image, exit_status, err) f"Command '{command}' in image '{image}' "
f"returned non-zero exit status {exit_status}{err}"
super().__init__(msg) )
class StreamParseError(RuntimeError): class StreamParseError(RuntimeError):

View File

@ -15,10 +15,8 @@ class Image(Model):
An image on the server. An image on the server.
""" """
def __repr__(self): def __repr__(self):
return "<{}: '{}'>".format( tag_str = "', '".join(self.tags)
self.__class__.__name__, return f"<{self.__class__.__name__}: '{tag_str}'>"
"', '".join(self.tags),
)
@property @property
def labels(self): def labels(self):
@ -471,9 +469,8 @@ class ImageCollection(Collection):
# to be pulled. # to be pulled.
pass pass
if not all_tags: if not all_tags:
return self.get('{0}{2}{1}'.format( sep = '@' if tag.startswith('sha256:') else ':'
repository, tag, '@' if tag.startswith('sha256:') else ':' return self.get(f'{repository}{sep}{tag}')
))
return self.list(repository) return self.list(repository)
def push(self, repository, tag=None, **kwargs): def push(self, repository, tag=None, **kwargs):

View File

@ -64,9 +64,10 @@ class Collection:
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
raise TypeError( raise TypeError(
"'{}' object is not callable. You might be trying to use the old " f"'{self.__class__.__name__}' object is not callable. "
"(pre-2.0) API - use docker.APIClient if so." "You might be trying to use the old (pre-2.0) API - "
.format(self.__class__.__name__)) "use docker.APIClient if so."
)
def list(self): def list(self):
raise NotImplementedError raise NotImplementedError
@ -88,5 +89,4 @@ class Collection:
elif isinstance(attrs, dict): elif isinstance(attrs, dict):
return self.model(attrs=attrs, client=self.client, collection=self) return self.model(attrs=attrs, client=self.client, collection=self)
else: else:
raise Exception("Can't create %s from %s" % raise Exception(f"Can't create {self.model.__name__} from {attrs}")
(self.model.__name__, attrs))

View File

@ -55,7 +55,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
socket_path = socket_url.replace('http+unix://', '') socket_path = socket_url.replace('http+unix://', '')
if not socket_path.startswith('/'): if not socket_path.startswith('/'):
socket_path = '/' + socket_path socket_path = f"/{socket_path}"
self.socket_path = socket_path self.socket_path = socket_path
self.timeout = timeout self.timeout = timeout
self.max_pool_size = max_pool_size self.max_pool_size = max_pool_size

View File

@ -652,25 +652,25 @@ class HostConfig(dict):
def host_config_type_error(param, param_value, expected): def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}' return TypeError(
return TypeError(error_msg.format(param, expected, type(param_value))) f'Invalid type for {param} param: expected {expected} '
f'but found {type(param_value)}'
)
def host_config_version_error(param, version, less_than=True): def host_config_version_error(param, version, less_than=True):
operator = '<' if less_than else '>' operator = '<' if less_than else '>'
error_msg = '{0} param is not supported in API versions {1} {2}' return errors.InvalidVersion(
return errors.InvalidVersion(error_msg.format(param, operator, version)) f'{param} param is not supported in API versions {operator} {version}',
)
def host_config_value_error(param, param_value): def host_config_value_error(param, param_value):
error_msg = 'Invalid value for {0} param: {1}' return ValueError(f'Invalid value for {param} param: {param_value}')
return ValueError(error_msg.format(param, param_value))
def host_config_incompatible_error(param, param_value, incompatible_param): def host_config_incompatible_error(param, param_value, incompatible_param):
error_msg = '\"{1}\" {0} is incompatible with {2}'
return errors.InvalidArgument( return errors.InvalidArgument(
error_msg.format(param, param_value, incompatible_param) f'\"{param_value}\" {param} is incompatible with {incompatible_param}'
) )

View File

@ -370,8 +370,8 @@ def _convert_generic_resources_dict(generic_resources):
return generic_resources return generic_resources
if not isinstance(generic_resources, dict): if not isinstance(generic_resources, dict):
raise errors.InvalidArgument( raise errors.InvalidArgument(
'generic_resources must be a dict or a list' 'generic_resources must be a dict or a list '
' (found {})'.format(type(generic_resources)) f'(found {type(generic_resources)})'
) )
resources = [] resources = []
for kind, value in generic_resources.items(): for kind, value in generic_resources.items():
@ -381,9 +381,9 @@ def _convert_generic_resources_dict(generic_resources):
elif isinstance(value, str): elif isinstance(value, str):
resource_type = 'NamedResourceSpec' resource_type = 'NamedResourceSpec'
else: else:
kv = {kind: value}
raise errors.InvalidArgument( raise errors.InvalidArgument(
'Unsupported generic resource reservation ' f'Unsupported generic resource reservation type: {kv}'
'type: {}'.format({kind: value})
) )
resources.append({ resources.append({
resource_type: {'Kind': kind, 'Value': value} resource_type: {'Kind': kind, 'Value': value}
@ -764,8 +764,8 @@ class PlacementPreference(dict):
def __init__(self, strategy, descriptor): def __init__(self, strategy, descriptor):
if strategy != 'spread': if strategy != 'spread':
raise errors.InvalidArgument( raise errors.InvalidArgument(
'PlacementPreference strategy value is invalid ({}):' f'PlacementPreference strategy value is invalid ({strategy}): '
' must be "spread".'.format(strategy) 'must be "spread".'
) )
self['Spread'] = {'SpreadDescriptor': descriptor} self['Spread'] = {'SpreadDescriptor': descriptor}

View File

@ -42,7 +42,7 @@ def exclude_paths(root, patterns, dockerfile=None):
if dockerfile is None: if dockerfile is None:
dockerfile = 'Dockerfile' dockerfile = 'Dockerfile'
patterns.append('!' + dockerfile) patterns.append(f"!{dockerfile}")
pm = PatternMatcher(patterns) pm = PatternMatcher(patterns)
return set(pm.walk(root)) return set(pm.walk(root))
@ -180,7 +180,7 @@ class PatternMatcher:
fpath = os.path.join( fpath = os.path.join(
os.path.relpath(current_dir, root), f os.path.relpath(current_dir, root), f
) )
if fpath.startswith('.' + os.path.sep): if fpath.startswith(f".{os.path.sep}"):
fpath = fpath[2:] fpath = fpath[2:]
match = self.matches(fpath) match = self.matches(fpath)
if not match: if not match:

View File

@ -27,9 +27,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs): def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version): if utils.version_lt(self._version, version):
raise errors.InvalidVersion( raise errors.InvalidVersion(
'{} is not available for version < {}'.format( f'{f.__name__} is not available for version < {version}',
f.__name__, version
)
) )
return f(self, *args, **kwargs) return f(self, *args, **kwargs)
return wrapper return wrapper

View File

@ -79,18 +79,18 @@ def translate(pat):
i = i + 1 i = i + 1
if i >= n: if i >= n:
# is "**EOF" - to align with .gitignore just accept all # is "**EOF" - to align with .gitignore just accept all
res = res + '.*' res = f"{res}.*"
else: else:
# is "**" # is "**"
# Note that this allows for any # of /'s (even 0) because # Note that this allows for any # of /'s (even 0) because
# the .* will eat everything, even /'s # the .* will eat everything, even /'s
res = res + '(.*/)?' res = f"{res}(.*/)?"
else: else:
# is "*" so map it to anything but "/" # is "*" so map it to anything but "/"
res = res + '[^/]*' res = f"{res}[^/]*"
elif c == '?': elif c == '?':
# "?" is any char except "/" # "?" is any char except "/"
res = res + '[^/]' res = f"{res}[^/]"
elif c == '[': elif c == '[':
j = i j = i
if j < n and pat[j] == '!': if j < n and pat[j] == '!':
@ -100,16 +100,16 @@ def translate(pat):
while j < n and pat[j] != ']': while j < n and pat[j] != ']':
j = j + 1 j = j + 1
if j >= n: if j >= n:
res = res + '\\[' res = f"{res}\\["
else: else:
stuff = pat[i:j].replace('\\', '\\\\') stuff = pat[i:j].replace('\\', '\\\\')
i = j + 1 i = j + 1
if stuff[0] == '!': if stuff[0] == '!':
stuff = '^' + stuff[1:] stuff = f"^{stuff[1:]}"
elif stuff[0] == '^': elif stuff[0] == '^':
stuff = '\\' + stuff stuff = f"\\{stuff}"
res = f'{res}[{stuff}]' res = f'{res}[{stuff}]'
else: else:
res = res + re.escape(c) res = res + re.escape(c)
return res + '$' return f"{res}$"

View File

@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
if not end: if not end:
return [start + proto] return [start + proto]
if randomly_available_port: if randomly_available_port:
return [f'{start}-{end}' + proto] return [f"{start}-{end}{proto}"]
return [str(port) + proto for port in range(int(start), int(end) + 1)] return [str(port) + proto for port in range(int(start), int(end) + 1)]

View File

@ -69,5 +69,9 @@ class ProxyConfig(dict):
return proxy_env + environment return proxy_env + environment
def __str__(self): def __str__(self):
return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format( return (
self.http, self.https, self.ftp, self.no_proxy) 'ProxyConfig('
f'http={self.http}, https={self.https}, '
f'ftp={self.ftp}, no_proxy={self.no_proxy}'
')'
)

View File

@ -127,8 +127,7 @@ def convert_volume_binds(binds):
if isinstance(v, dict): if isinstance(v, dict):
if 'ro' in v and 'mode' in v: if 'ro' in v and 'mode' in v:
raise ValueError( raise ValueError(
'Binding cannot contain both "ro" and "mode": {}' f'Binding cannot contain both "ro" and "mode": {v!r}'
.format(repr(v))
) )
bind = v['bind'] bind = v['bind']
@ -160,8 +159,8 @@ def convert_tmpfs_mounts(tmpfs):
if not isinstance(tmpfs, list): if not isinstance(tmpfs, list):
raise ValueError( raise ValueError(
'Expected tmpfs value to be either a list or a dict, found: {}' 'Expected tmpfs value to be either a list or a dict, '
.format(type(tmpfs).__name__) f'found: {type(tmpfs).__name__}'
) )
result = {} result = {}
@ -175,8 +174,8 @@ def convert_tmpfs_mounts(tmpfs):
else: else:
raise ValueError( raise ValueError(
"Expected item in tmpfs list to be a string, found: {}" "Expected item in tmpfs list to be a string, "
.format(type(mount).__name__) f"found: {type(mount).__name__}"
) )
result[name] = options result[name] = options
@ -218,9 +217,9 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url = urlparse(addr) parsed_url = urlparse(addr)
proto = parsed_url.scheme proto = parsed_url.scheme
if not proto or any([x not in string.ascii_letters + '+' for x in proto]): if not proto or any([x not in f"{string.ascii_letters}+" for x in proto]):
# https://bugs.python.org/issue754016 # https://bugs.python.org/issue754016
parsed_url = urlparse('//' + addr, 'tcp') parsed_url = urlparse(f"//{addr}", 'tcp')
proto = 'tcp' proto = 'tcp'
if proto == 'fd': if proto == 'fd':
@ -256,15 +255,14 @@ def parse_host(addr, is_win32=False, tls=False):
if parsed_url.path and proto == 'ssh': if parsed_url.path and proto == 'ssh':
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: no path allowed for this protocol:' f'Invalid bind address format: no path allowed for this protocol: {addr}'
' {}'.format(addr)
) )
else: else:
path = parsed_url.path path = parsed_url.path
if proto == 'unix' and parsed_url.hostname is not None: if proto == 'unix' and parsed_url.hostname is not None:
# For legacy reasons, we consider unix://path # For legacy reasons, we consider unix://path
# to be valid and equivalent to unix:///path # to be valid and equivalent to unix:///path
path = '/'.join((parsed_url.hostname, path)) path = f"{parsed_url.hostname}/{path}"
netloc = parsed_url.netloc netloc = parsed_url.netloc
if proto in ('tcp', 'ssh'): if proto in ('tcp', 'ssh'):
@ -272,8 +270,7 @@ def parse_host(addr, is_win32=False, tls=False):
if port <= 0: if port <= 0:
if proto != 'ssh': if proto != 'ssh':
raise errors.DockerException( raise errors.DockerException(
'Invalid bind address format: port is required:' f'Invalid bind address format: port is required: {addr}'
' {}'.format(addr)
) )
port = 22 port = 22
netloc = f'{parsed_url.netloc}:{port}' netloc = f'{parsed_url.netloc}:{port}'
@ -283,7 +280,7 @@ def parse_host(addr, is_win32=False, tls=False):
# Rewrite schemes to fit library internals (requests adapters) # Rewrite schemes to fit library internals (requests adapters)
if proto == 'tcp': if proto == 'tcp':
proto = 'http{}'.format('s' if tls else '') proto = f"http{'s' if tls else ''}"
elif proto == 'unix': elif proto == 'unix':
proto = 'http+unix' proto = 'http+unix'
@ -419,17 +416,16 @@ def parse_bytes(s):
digits = float(digits_part) digits = float(digits_part)
except ValueError: except ValueError:
raise errors.DockerException( raise errors.DockerException(
'Failed converting the string value for memory ({}) to' 'Failed converting the string value for memory '
' an integer.'.format(digits_part) f'({digits_part}) to an integer.'
) )
# Reconvert to long for the final result # Reconvert to long for the final result
s = int(digits * units[suffix]) s = int(digits * units[suffix])
else: else:
raise errors.DockerException( raise errors.DockerException(
'The specified value for memory ({}) should specify the' f'The specified value for memory ({s}) should specify the units. '
' units. The postfix should be one of the `b` `k` `m` `g`' 'The postfix should be one of the `b` `k` `m` `g` characters'
' characters'.format(s)
) )
return s return s
@ -465,8 +461,7 @@ def parse_env_file(env_file):
environment[k] = v environment[k] = v
else: else:
raise errors.DockerException( raise errors.DockerException(
'Invalid line in environment file {}:\n{}'.format( f'Invalid line in environment file {env_file}:\n{line}')
env_file, line))
return environment return environment

View File

@ -56,7 +56,7 @@ master_doc = 'index'
# General information about the project. # General information about the project.
project = 'Docker SDK for Python' project = 'Docker SDK for Python'
year = datetime.datetime.now().year year = datetime.datetime.now().year
copyright = '%d Docker Inc' % year copyright = f'{year} Docker Inc'
author = 'Docker Inc' author = 'Docker Inc'
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for

View File

@ -80,7 +80,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
start_time = time.time() start_time = time.time()
while not condition(): while not condition():
if time.time() - start_time > timeout: if time.time() - start_time > timeout:
raise AssertionError("Timeout: %s" % condition) raise AssertionError(f"Timeout: {condition}")
time.sleep(delay) time.sleep(delay)

View File

@ -72,6 +72,4 @@ class UnixconnTest(unittest.TestCase):
client.close() client.close()
del client del client
assert len(w) == 0, "No warnings produced: {}".format( assert len(w) == 0, f"No warnings produced: {w[0].message}"
w[0].message
)

View File

@ -666,9 +666,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
test_file.seek(0) test_file.seek(0)
ctnr = self.client.create_container( ctnr = self.client.create_container(
TEST_IMG, TEST_IMG,
'cat {}'.format( f"cat {os.path.join('/vol1/', os.path.basename(test_file.name))}",
os.path.join('/vol1/', os.path.basename(test_file.name))
),
volumes=['/vol1'] volumes=['/vol1']
) )
self.tmp_containers.append(ctnr) self.tmp_containers.append(ctnr)
@ -826,7 +824,7 @@ class LogsTest(BaseAPIIntegrationTest):
exitcode = self.client.wait(id)['StatusCode'] exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0 assert exitcode == 0
logs = self.client.logs(id) logs = self.client.logs(id)
assert logs == (snippet + '\n').encode(encoding='ascii') assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_tail_option(self): def test_logs_tail_option(self):
snippet = '''Line1 snippet = '''Line1
@ -857,7 +855,7 @@ Line2'''
exitcode = self.client.wait(id)['StatusCode'] exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0 assert exitcode == 0
assert logs == (snippet + '\n').encode(encoding='ascii') assert logs == f"{snippet}\n".encode(encoding='ascii')
@pytest.mark.timeout(5) @pytest.mark.timeout(5)
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
@ -878,7 +876,7 @@ Line2'''
for chunk in generator: for chunk in generator:
logs += chunk logs += chunk
assert logs == (snippet + '\n').encode(encoding='ascii') assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_with_dict_instead_of_id(self): def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
@ -891,7 +889,7 @@ Line2'''
exitcode = self.client.wait(id)['StatusCode'] exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0 assert exitcode == 0
logs = self.client.logs(container) logs = self.client.logs(container)
assert logs == (snippet + '\n').encode(encoding='ascii') assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_with_tail_0(self): def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)' snippet = 'Flowering Nights (Sakuya Iyazoi)'
@ -920,7 +918,7 @@ Line2'''
logs_until_1 = self.client.logs(container, until=1) logs_until_1 = self.client.logs(container, until=1)
assert logs_until_1 == b'' assert logs_until_1 == b''
logs_until_now = self.client.logs(container, datetime.now()) logs_until_now = self.client.logs(container, datetime.now())
assert logs_until_now == (snippet + '\n').encode(encoding='ascii') assert logs_until_now == f"{snippet}\n".encode(encoding='ascii')
class DiffTest(BaseAPIIntegrationTest): class DiffTest(BaseAPIIntegrationTest):
@ -1086,7 +1084,7 @@ class PortTest(BaseAPIIntegrationTest):
ip, host_port = port_binding['HostIp'], port_binding['HostPort'] ip, host_port = port_binding['HostIp'], port_binding['HostPort']
port_binding = port if not protocol else port + "/" + protocol port_binding = port if not protocol else f"{port}/{protocol}"
assert ip == port_bindings[port_binding][0] assert ip == port_bindings[port_binding][0]
assert host_port == port_bindings[port_binding][1] assert host_port == port_bindings[port_binding][1]

View File

@ -103,8 +103,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
if exitcode != 0: if exitcode != 0:
output = self.client.logs(container) output = self.client.logs(container)
raise Exception( raise Exception(
"Container exited with code {}:\n{}" f"Container exited with code {exitcode}:\n{output}")
.format(exitcode, output))
return container return container

View File

@ -22,7 +22,7 @@ class TestStore:
def setup_method(self): def setup_method(self):
self.tmp_keys = [] self.tmp_keys = []
if sys.platform.startswith('linux'): if sys.platform.startswith('linux'):
if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE): if shutil.which(f"docker-credential-{DEFAULT_LINUX_STORE}"):
self.store = Store(DEFAULT_LINUX_STORE) self.store = Store(DEFAULT_LINUX_STORE)
elif shutil.which('docker-credential-pass'): elif shutil.which('docker-credential-pass'):
self.store = Store('pass') self.store = Store('pass')

View File

@ -49,7 +49,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
container = client.containers.run( container = client.containers.run(
"alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'", "alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'",
volumes=["%s:/insidecontainer" % path], volumes=[f"{path}:/insidecontainer"],
detach=True detach=True
) )
self.tmp_containers.append(container.id) self.tmp_containers.append(container.id)
@ -58,7 +58,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
name = "container_volume_test" name = "container_volume_test"
out = client.containers.run( out = client.containers.run(
"alpine", "cat /insidecontainer/test", "alpine", "cat /insidecontainer/test",
volumes=["%s:/insidecontainer" % path], volumes=[f"{path}:/insidecontainer"],
name=name name=name
) )
self.tmp_containers.append(name) self.tmp_containers.append(name)

View File

@ -110,8 +110,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
if exitcode != 0: if exitcode != 0:
output = self.client.logs(container) output = self.client.logs(container)
raise Exception( raise Exception(
"Container exited with code {}:\n{}" f"Container exited with code {exitcode}:\n{output}")
.format(exitcode, output))
return container return container

View File

@ -89,7 +89,7 @@ class BuildTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'build', f"{url_prefix}build",
stream=True, stream=True,
data=None, data=None,
headers=expected_headers, headers=expected_headers,
@ -193,10 +193,10 @@ class BuildTest(BaseAPIClientTest):
'foo/Dockerfile.foo', None 'foo/Dockerfile.foo', None
) )
assert process_dockerfile( assert process_dockerfile(
'../Dockerfile', pre(base + '\\foo') '../Dockerfile', pre(f"{base}\\foo")
)[1] is not None )[1] is not None
assert process_dockerfile( assert process_dockerfile(
'../baz/Dockerfile.baz', pre(base + '/baz') '../baz/Dockerfile.baz', pre(f"{base}/baz")
) == ('../baz/Dockerfile.baz', None) ) == ('../baz/Dockerfile.baz', None)
def test_process_dockerfile(self): def test_process_dockerfile(self):
@ -218,8 +218,8 @@ class BuildTest(BaseAPIClientTest):
'foo/Dockerfile.foo', None 'foo/Dockerfile.foo', None
) )
assert process_dockerfile( assert process_dockerfile(
'../Dockerfile', base + '/foo' '../Dockerfile', f"{base}/foo"
)[1] is not None )[1] is not None
assert process_dockerfile('../baz/Dockerfile.baz', base + '/baz') == ( assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == (
'../baz/Dockerfile.baz', None '../baz/Dockerfile.baz', None
) )

View File

@ -32,9 +32,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID) self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/start'.format( assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
fake_api.FAKE_EXEC_ID
)
assert json.loads(args[1]['data']) == { assert json.loads(args[1]['data']) == {
'Tty': False, 'Tty': False,
@ -51,9 +49,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True) self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/start'.format( assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
fake_api.FAKE_EXEC_ID
)
assert json.loads(args[1]['data']) == { assert json.loads(args[1]['data']) == {
'Tty': False, 'Tty': False,
@ -68,16 +64,14 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID) self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/json'.format( assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/json"
fake_api.FAKE_EXEC_ID
)
def test_exec_resize(self): def test_exec_resize(self):
self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60) self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60)
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize', f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/resize",
params={'h': 20, 'w': 60}, params={'h': 20, 'w': 60},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )

View File

@ -21,7 +21,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/json', f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 1}, params={'only_ids': 0, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -31,7 +31,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/json', f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 0, params={'only_ids': 0, 'all': 0,
'filters': '{"reference": ["foo:bar"]}'}, 'filters': '{"reference": ["foo:bar"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
@ -42,7 +42,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/json', f"{url_prefix}images/json",
params={'only_ids': 1, 'all': 1}, params={'only_ids': 1, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -52,7 +52,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/json', f"{url_prefix}images/json",
params={'only_ids': 1, 'all': 0}, params={'only_ids': 1, 'all': 0},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -62,7 +62,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/json', f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 0, params={'only_ids': 0, 'all': 0,
'filters': '{"dangling": ["true"]}'}, 'filters': '{"dangling": ["true"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
@ -72,7 +72,7 @@ class ImageTest(BaseAPIClientTest):
self.client.pull('joffrey/test001') self.client.pull('joffrey/test001')
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create' assert args[0][1] == f"{url_prefix}images/create"
assert args[1]['params'] == { assert args[1]['params'] == {
'tag': 'latest', 'fromImage': 'joffrey/test001' 'tag': 'latest', 'fromImage': 'joffrey/test001'
} }
@ -82,7 +82,7 @@ class ImageTest(BaseAPIClientTest):
self.client.pull('joffrey/test001', stream=True) self.client.pull('joffrey/test001', stream=True)
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create' assert args[0][1] == f"{url_prefix}images/create"
assert args[1]['params'] == { assert args[1]['params'] == {
'tag': 'latest', 'fromImage': 'joffrey/test001' 'tag': 'latest', 'fromImage': 'joffrey/test001'
} }
@ -93,7 +93,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'commit', f"{url_prefix}commit",
data='{}', data='{}',
headers={'Content-Type': 'application/json'}, headers={'Content-Type': 'application/json'},
params={ params={
@ -112,7 +112,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'DELETE', 'DELETE',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID, f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}",
params={'force': False, 'noprune': False}, params={'force': False, 'noprune': False},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -122,7 +122,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/test_image/history', f"{url_prefix}images/test_image/history",
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -135,7 +135,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/create', f"{url_prefix}images/create",
params={ params={
'repo': fake_api.FAKE_REPO_NAME, 'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME, 'tag': fake_api.FAKE_TAG_NAME,
@ -156,7 +156,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/create', f"{url_prefix}images/create",
params={ params={
'repo': fake_api.FAKE_REPO_NAME, 'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME, 'tag': fake_api.FAKE_TAG_NAME,
@ -178,7 +178,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/create', f"{url_prefix}images/create",
params={ params={
'repo': fake_api.FAKE_REPO_NAME, 'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME, 'tag': fake_api.FAKE_TAG_NAME,
@ -193,7 +193,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/test_image/json', f"{url_prefix}images/test_image/json",
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -211,7 +211,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/test_image/push', f"{url_prefix}images/test_image/push",
params={ params={
'tag': None 'tag': None
}, },
@ -230,7 +230,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/test_image/push', f"{url_prefix}images/test_image/push",
params={ params={
'tag': fake_api.FAKE_TAG_NAME, 'tag': fake_api.FAKE_TAG_NAME,
}, },
@ -254,7 +254,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/test_image/push', f"{url_prefix}images/test_image/push",
params={ params={
'tag': fake_api.FAKE_TAG_NAME, 'tag': fake_api.FAKE_TAG_NAME,
}, },
@ -272,7 +272,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/test_image/push', f"{url_prefix}images/test_image/push",
params={ params={
'tag': None 'tag': None
}, },
@ -287,7 +287,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={ params={
'tag': None, 'tag': None,
'repo': 'repo', 'repo': 'repo',
@ -305,7 +305,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={ params={
'tag': 'tag', 'tag': 'tag',
'repo': 'repo', 'repo': 'repo',
@ -320,7 +320,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={ params={
'tag': None, 'tag': None,
'repo': 'repo', 'repo': 'repo',
@ -334,7 +334,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get', f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/get",
stream=True, stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -344,7 +344,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/load', f"{url_prefix}images/load",
data='Byte Stream....', data='Byte Stream....',
stream=True, stream=True,
params={}, params={},
@ -356,7 +356,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'POST', 'POST',
url_prefix + 'images/load', f"{url_prefix}images/load",
data='Byte Stream....', data='Byte Stream....',
stream=True, stream=True,
params={'quiet': True}, params={'quiet': True},

View File

@ -28,7 +28,7 @@ class NetworkTest(BaseAPIClientTest):
with mock.patch('docker.api.client.APIClient.get', get): with mock.patch('docker.api.client.APIClient.get', get):
assert self.client.networks() == networks assert self.client.networks() == networks
assert get.call_args[0][0] == url_prefix + 'networks' assert get.call_args[0][0] == f"{url_prefix}networks"
filters = json.loads(get.call_args[1]['params']['filters']) filters = json.loads(get.call_args[1]['params']['filters'])
assert not filters assert not filters
@ -54,7 +54,7 @@ class NetworkTest(BaseAPIClientTest):
result = self.client.create_network('foo') result = self.client.create_network('foo')
assert result == network_data assert result == network_data
assert post.call_args[0][0] == url_prefix + 'networks/create' assert post.call_args[0][0] == f"{url_prefix}networks/create"
assert json.loads(post.call_args[1]['data']) == {"Name": "foo"} assert json.loads(post.call_args[1]['data']) == {"Name": "foo"}
@ -97,7 +97,7 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id) self.client.remove_network(network_id)
args = delete.call_args args = delete.call_args
assert args[0][0] == url_prefix + f'networks/{network_id}' assert args[0][0] == f"{url_prefix}networks/{network_id}"
def test_inspect_network(self): def test_inspect_network(self):
network_id = 'abc12345' network_id = 'abc12345'
@ -117,7 +117,7 @@ class NetworkTest(BaseAPIClientTest):
assert result == network_data assert result == network_data
args = get.call_args args = get.call_args
assert args[0][0] == url_prefix + f'networks/{network_id}' assert args[0][0] == f"{url_prefix}networks/{network_id}"
def test_connect_container_to_network(self): def test_connect_container_to_network(self):
network_id = 'abc12345' network_id = 'abc12345'
@ -135,7 +135,7 @@ class NetworkTest(BaseAPIClientTest):
) )
assert post.call_args[0][0] == ( assert post.call_args[0][0] == (
url_prefix + f'networks/{network_id}/connect' f"{url_prefix}networks/{network_id}/connect"
) )
assert json.loads(post.call_args[1]['data']) == { assert json.loads(post.call_args[1]['data']) == {
@ -158,7 +158,7 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id}, net_id=network_id) container={'Id': container_id}, net_id=network_id)
assert post.call_args[0][0] == ( assert post.call_args[0][0] == (
url_prefix + f'networks/{network_id}/disconnect' f"{url_prefix}networks/{network_id}/disconnect"
) )
assert json.loads(post.call_args[1]['data']) == { assert json.loads(post.call_args[1]['data']) == {
'Container': container_id 'Container': container_id

View File

@ -86,9 +86,7 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False):
url_base = f'{fake_api.prefix}/' url_base = f'{fake_api.prefix}/'
url_prefix = '{}v{}/'.format( url_prefix = f'{url_base}v{docker.constants.DEFAULT_DOCKER_API_VERSION}/'
url_base,
docker.constants.DEFAULT_DOCKER_API_VERSION)
class BaseAPIClientTest(unittest.TestCase): class BaseAPIClientTest(unittest.TestCase):
@ -130,22 +128,18 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self): def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename') url = self.client._url('/hello/{0}/world', 'somename')
assert url == '{}{}'.format(url_prefix, 'hello/somename/world') assert url == f"{url_prefix}hello/somename/world"
url = self.client._url( url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername' '/hello/{0}/world/{1}', 'somename', 'someothername'
) )
assert url == '{}{}'.format( assert url == f"{url_prefix}hello/somename/world/someothername"
url_prefix, 'hello/somename/world/someothername'
)
url = self.client._url('/hello/{0}/world', 'some?name') url = self.client._url('/hello/{0}/world', 'some?name')
assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world') assert url == f"{url_prefix}hello/some%3Fname/world"
url = self.client._url("/images/{0}/push", "localhost:5000/image") url = self.client._url("/images/{0}/push", "localhost:5000/image")
assert url == '{}{}'.format( assert url == f"{url_prefix}images/localhost:5000/image/push"
url_prefix, 'images/localhost:5000/image/push'
)
def test_url_invalid_resource(self): def test_url_invalid_resource(self):
with pytest.raises(ValueError): with pytest.raises(ValueError):
@ -153,20 +147,20 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self): def test_url_no_resource(self):
url = self.client._url('/simple') url = self.client._url('/simple')
assert url == '{}{}'.format(url_prefix, 'simple') assert url == f"{url_prefix}simple"
def test_url_unversioned_api(self): def test_url_unversioned_api(self):
url = self.client._url( url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False '/hello/{0}/world', 'somename', versioned_api=False
) )
assert url == '{}{}'.format(url_base, 'hello/somename/world') assert url == f"{url_base}hello/somename/world"
def test_version(self): def test_version(self):
self.client.version() self.client.version()
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'version', f"{url_prefix}version",
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -175,7 +169,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_base + 'version', f"{url_base}version",
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -194,7 +188,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'info', f"{url_prefix}info",
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -203,7 +197,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'images/search', f"{url_prefix}images/search",
params={'term': 'busybox'}, params={'term': 'busybox'},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -212,7 +206,7 @@ class DockerApiTest(BaseAPIClientTest):
self.client.login('sakuya', 'izayoi') self.client.login('sakuya', 'izayoi')
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'POST' assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'auth' assert args[0][1] == f"{url_prefix}auth"
assert json.loads(args[1]['data']) == { assert json.loads(args[1]['data']) == {
'username': 'sakuya', 'password': 'izayoi' 'username': 'sakuya', 'password': 'izayoi'
} }
@ -229,7 +223,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'events', f"{url_prefix}events",
params={'since': None, 'until': None, 'filters': None}, params={'since': None, 'until': None, 'filters': None},
stream=True, stream=True,
timeout=None timeout=None
@ -245,7 +239,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'events', f"{url_prefix}events",
params={ params={
'since': ts - 10, 'since': ts - 10,
'until': ts + 10, 'until': ts + 10,
@ -264,7 +258,7 @@ class DockerApiTest(BaseAPIClientTest):
expected_filters = docker.utils.convert_filters(filters) expected_filters = docker.utils.convert_filters(filters)
fake_request.assert_called_with( fake_request.assert_called_with(
'GET', 'GET',
url_prefix + 'events', f"{url_prefix}events",
params={ params={
'since': None, 'since': None,
'until': None, 'until': None,
@ -318,7 +312,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with( fake_request.assert_called_with(
'DELETE', 'DELETE',
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID, f"{url_prefix}containers/{fake_api.FAKE_CONTAINER_ID}",
params={'v': False, 'link': True, 'force': False}, params={'v': False, 'link': True, 'force': False},
timeout=DEFAULT_TIMEOUT_SECONDS timeout=DEFAULT_TIMEOUT_SECONDS
) )
@ -332,7 +326,7 @@ class DockerApiTest(BaseAPIClientTest):
self.client.create_host_config(security_opt='wrong') self.client.create_host_config(security_opt='wrong')
def test_stream_helper_decoding(self): def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']() status_code, content = fake_api.fake_responses[f"{url_prefix}events"]()
content_str = json.dumps(content) content_str = json.dumps(content)
content_str = content_str.encode('utf-8') content_str = content_str.encode('utf-8')
body = io.BytesIO(content_str) body = io.BytesIO(content_str)
@ -443,7 +437,7 @@ class UnixSocketStreamTest(unittest.TestCase):
lines = [] lines = []
for i in range(0, 50): for i in range(0, 50):
line = str(i).encode() line = str(i).encode()
lines += [('%x' % len(line)).encode(), line] lines += [f'{len(line):x}'.encode(), line]
lines.append(b'0') lines.append(b'0')
lines.append(b'') lines.append(b'')
@ -454,7 +448,7 @@ class UnixSocketStreamTest(unittest.TestCase):
) + b'\r\n'.join(lines) ) + b'\r\n'.join(lines)
with APIClient( with APIClient(
base_url="http+unix://" + self.socket_file, base_url=f"http+unix://{self.socket_file}",
version=DEFAULT_DOCKER_API_VERSION) as client: version=DEFAULT_DOCKER_API_VERSION) as client:
for i in range(5): for i in range(5):
try: try:
@ -490,8 +484,7 @@ class TCPSocketStreamTest(unittest.TestCase):
cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread = threading.Thread(target=cls.server.serve_forever)
cls.thread.daemon = True cls.thread.daemon = True
cls.thread.start() cls.thread.start()
cls.address = 'http://{}:{}'.format( cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}'
socket.gethostname(), cls.server.server_address[1])
@classmethod @classmethod
def teardown_class(cls): def teardown_class(cls):
@ -600,7 +593,7 @@ class UserAgentTest(unittest.TestCase):
self.patcher = mock.patch.object( self.patcher = mock.patch.object(
APIClient, APIClient,
'send', 'send',
return_value=fake_resp("GET", "%s/version" % fake_api.prefix) return_value=fake_resp("GET", f"{fake_api.prefix}/version")
) )
self.mock_send = self.patcher.start() self.mock_send = self.patcher.start()
@ -613,7 +606,7 @@ class UserAgentTest(unittest.TestCase):
assert self.mock_send.call_count == 1 assert self.mock_send.call_count == 1
headers = self.mock_send.call_args[0][0].headers headers = self.mock_send.call_args[0][0].headers
expected = 'docker-sdk-python/%s' % docker.__version__ expected = f'docker-sdk-python/{docker.__version__}'
assert headers['User-Agent'] == expected assert headers['User-Agent'] == expected
def test_custom_user_agent(self): def test_custom_user_agent(self):

View File

@ -14,7 +14,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'GET' assert args[0][0] == 'GET'
assert args[0][1] == url_prefix + 'volumes' assert args[0][1] == f"{url_prefix}volumes"
def test_list_volumes_and_filters(self): def test_list_volumes_and_filters(self):
volumes = self.client.volumes(filters={'dangling': True}) volumes = self.client.volumes(filters={'dangling': True})
@ -23,7 +23,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'GET' assert args[0][0] == 'GET'
assert args[0][1] == url_prefix + 'volumes' assert args[0][1] == f"{url_prefix}volumes"
assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'}, assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'},
'timeout': 60} 'timeout': 60}
@ -37,7 +37,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'POST' assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'volumes/create' assert args[0][1] == f"{url_prefix}volumes/create"
assert json.loads(args[1]['data']) == {'Name': name} assert json.loads(args[1]['data']) == {'Name': name}
@requires_api_version('1.23') @requires_api_version('1.23')
@ -63,7 +63,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert args[0][0] == 'POST' assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'volumes/create' assert args[0][1] == f"{url_prefix}volumes/create"
data = json.loads(args[1]['data']) data = json.loads(args[1]['data'])
assert 'Driver' in data assert 'Driver' in data
assert data['Driver'] == driver_name assert data['Driver'] == driver_name

View File

@ -290,9 +290,10 @@ class LoadConfigTest(unittest.TestCase):
folder = tempfile.mkdtemp() folder = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, folder) self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder, dockercfg_path = os.path.join(
'.{}.dockercfg'.format( folder,
random.randrange(100000))) f'.{random.randrange(100000)}.dockercfg',
)
registry = 'https://your.private.registry.io' registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = { config = {

View File

@ -85,10 +85,7 @@ class ClientTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200 mock_obj.return_value.urlopen.return_value.status = 200
client.ping() client.ping()
base_url = "{base_url}/v{version}/_ping".format( base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url, mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock", "/var/run/docker.sock",
@ -124,10 +121,7 @@ class ClientTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200 mock_obj.return_value.urlopen.return_value.status = 200
client.ping() client.ping()
base_url = "{base_url}/v{version}/_ping".format( base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url, mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock", "/var/run/docker.sock",
@ -198,10 +192,7 @@ class FromEnvTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200 mock_obj.return_value.urlopen.return_value.status = 200
client.ping() client.ping()
base_url = "{base_url}/v{version}/_ping".format( base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url, mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock", "/var/run/docker.sock",
@ -235,10 +226,7 @@ class FromEnvTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200 mock_obj.return_value.urlopen.return_value.status = 200
client.ping() client.ping()
base_url = "{base_url}/v{version}/_ping".format( base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
base_url=client.api.base_url,
version=client.api._version
)
mock_obj.assert_called_once_with(base_url, mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock", "/var/run/docker.sock",

View File

@ -617,17 +617,11 @@ fake_responses = {
get_fake_volume_list, get_fake_volume_list,
(f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'): (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume, get_fake_volume,
('{1}/{0}/volumes/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'):
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'GET'):
get_fake_volume, get_fake_volume,
('{1}/{0}/volumes/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'):
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'DELETE'):
fake_remove_volume, fake_remove_volume,
('{1}/{0}/nodes/{2}/update?version=1'.format( (f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'):
CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'):
post_fake_update_node, post_fake_update_node,
(f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'): (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm, post_fake_join_swarm,
@ -635,21 +629,13 @@ fake_responses = {
get_fake_network_list, get_fake_network_list,
(f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'): (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network, post_fake_network,
('{1}/{0}/networks/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'GET'):
get_fake_network, get_fake_network,
('{1}/{0}/networks/{2}'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'DELETE'):
delete_fake_network, delete_fake_network,
('{1}/{0}/networks/{2}/connect'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_connect, post_fake_network_connect,
('{1}/{0}/networks/{2}/disconnect'.format( (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'):
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
post_fake_network_disconnect, post_fake_network_disconnect,
f'{prefix}/{CURRENT_VERSION}/secrets/create': f'{prefix}/{CURRENT_VERSION}/secrets/create':
post_fake_secret, post_fake_secret,

View File

@ -20,7 +20,7 @@ class SwarmTest(BaseAPIClientTest):
) )
args = fake_request.call_args args = fake_request.call_args
assert args[0][1] == ( assert args[0][1] == (
url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1' f"{url_prefix}nodes/24ifsmvkjbyhk/update?version=1"
) )
assert json.loads(args[1]['data']) == node_spec assert json.loads(args[1]['data']) == node_spec
assert args[1]['headers']['Content-Type'] == 'application/json' assert args[1]['headers']['Content-Type'] == 'application/json'
@ -45,7 +45,7 @@ class SwarmTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert (args[0][1] == url_prefix + 'swarm/join') assert (args[0][1] == f"{url_prefix}swarm/join")
assert (json.loads(args[1]['data']) == data) assert (json.loads(args[1]['data']) == data)
assert (args[1]['headers']['Content-Type'] == 'application/json') assert (args[1]['headers']['Content-Type'] == 'application/json')
@ -64,6 +64,6 @@ class SwarmTest(BaseAPIClientTest):
args = fake_request.call_args args = fake_request.call_args
assert (args[0][1] == url_prefix + 'swarm/join') assert (args[0][1] == f"{url_prefix}swarm/join")
assert (json.loads(args[1]['data']) == data) assert (json.loads(args[1]['data']) == data)
assert (args[1]['headers']['Content-Type'] == 'application/json') assert (args[1]['headers']['Content-Type'] == 'application/json')

View File

@ -486,9 +486,9 @@ class PortsTest(unittest.TestCase):
def test_split_port_with_protocol(self): def test_split_port_with_protocol(self):
for protocol in ['tcp', 'udp', 'sctp']: for protocol in ['tcp', 'udp', 'sctp']:
internal_port, external_port = split_port( internal_port, external_port = split_port(
"127.0.0.1:1000:2000/" + protocol f"127.0.0.1:1000:2000/{protocol}"
) )
assert internal_port == ["2000/" + protocol] assert internal_port == [f"2000/{protocol}"]
assert external_port == [("127.0.0.1", "1000")] assert external_port == [("127.0.0.1", "1000")]
def test_split_port_with_host_ip_no_port(self): def test_split_port_with_host_ip_no_port(self):