Replace string formatting with f-strings

Signed-off-by: Aarni Koskela <akx@iki.fi>
This commit is contained in:
Aarni Koskela 2023-05-11 16:12:41 +03:00
parent ee2310595d
commit 8a3402c049
40 changed files with 214 additions and 294 deletions

View File

@ -314,9 +314,8 @@ class BuildApiMixin:
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
log.debug(
'Sending auth config ({})'.format(
', '.join(repr(k) for k in auth_data.keys())
)
"Sending auth config (%s)",
', '.join(repr(k) for k in auth_data),
)
if auth_data:
@ -336,12 +335,9 @@ def process_dockerfile(dockerfile, path):
abs_dockerfile = os.path.join(path, dockerfile)
if constants.IS_WINDOWS_PLATFORM and path.startswith(
constants.WINDOWS_LONGPATH_PREFIX):
abs_dockerfile = '{}{}'.format(
constants.WINDOWS_LONGPATH_PREFIX,
os.path.normpath(
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
)
)
normpath = os.path.normpath(
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):])
abs_dockerfile = f'{constants.WINDOWS_LONGPATH_PREFIX}{normpath}'
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
os.path.relpath(abs_dockerfile, path).startswith('..')):
# Dockerfile not in context - read data to insert into tar later

View File

@ -199,14 +199,13 @@ class APIClient(
self._version = version
if not isinstance(self._version, str):
raise DockerException(
'Version parameter must be a string or None. Found {}'.format(
type(version).__name__
)
'Version parameter must be a string or None. '
f'Found {type(version).__name__}'
)
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
raise InvalidVersion(
'API versions below {} are no longer supported by this '
'library.'.format(MINIMUM_DOCKER_API_VERSION)
f'API versions below {MINIMUM_DOCKER_API_VERSION} are '
f'no longer supported by this library.'
)
def _retrieve_server_version(self):
@ -248,19 +247,17 @@ class APIClient(
for arg in args:
if not isinstance(arg, str):
raise ValueError(
'Expected a string but found {} ({}) '
'instead'.format(arg, type(arg))
f'Expected a string but found {arg} ({type(arg)}) instead'
)
quote_f = partial(urllib.parse.quote, safe="/:")
args = map(quote_f, args)
formatted_path = pathfmt.format(*args)
if kwargs.get('versioned_api', True):
return '{}/v{}{}'.format(
self.base_url, self._version, pathfmt.format(*args)
)
return f'{self.base_url}/v{self._version}{formatted_path}'
else:
return f'{self.base_url}{pathfmt.format(*args)}'
return f'{self.base_url}{formatted_path}'
def _raise_for_status(self, response):
"""Raises stored :class:`APIError`, if one occurred."""

View File

@ -863,8 +863,8 @@ class ContainerApiMixin:
params['since'] = since
else:
raise errors.InvalidArgument(
'since value should be datetime or positive int/float, '
'not {}'.format(type(since))
'since value should be datetime or positive int/float,'
f' not {type(since)}'
)
if until is not None:
@ -880,8 +880,8 @@ class ContainerApiMixin:
params['until'] = until
else:
raise errors.InvalidArgument(
'until value should be datetime or positive int/float, '
'not {}'.format(type(until))
f'until value should be datetime or positive int/float, '
f'not {type(until)}'
)
url = self._url("/containers/{0}/logs", container)
@ -953,7 +953,7 @@ class ContainerApiMixin:
return port_settings.get(private_port)
for protocol in ['tcp', 'udp', 'sctp']:
h_ports = port_settings.get(private_port + '/' + protocol)
h_ports = port_settings.get(f"{private_port}/{protocol}")
if h_ports:
break

View File

@ -7,9 +7,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
def raise_version_error(param, min_version):
raise errors.InvalidVersion(
'{} is not supported in API version < {}'.format(
param, min_version
)
f'{param} is not supported in API version < {min_version}'
)
if update_config is not None:

View File

@ -22,15 +22,15 @@ def resolve_repository_name(repo_name):
index_name, remote_name = split_repo_name(repo_name)
if index_name[0] == '-' or index_name[-1] == '-':
raise errors.InvalidRepository(
'Invalid index name ({}). Cannot begin or end with a'
' hyphen.'.format(index_name)
f'Invalid index name ({index_name}). '
'Cannot begin or end with a hyphen.'
)
return resolve_index_name(index_name), remote_name
def resolve_index_name(index_name):
index_name = convert_to_hostname(index_name)
if index_name == 'index.' + INDEX_NAME:
if index_name == f"index.{INDEX_NAME}":
index_name = INDEX_NAME
return index_name
@ -99,9 +99,7 @@ class AuthConfig(dict):
for registry, entry in entries.items():
if not isinstance(entry, dict):
log.debug(
'Config entry for key {} is not auth config'.format(
registry
)
f'Config entry for key {registry} is not auth config'
)
# We sometimes fall back to parsing the whole config as if it
# was the auth config by itself, for legacy purposes. In that
@ -109,17 +107,11 @@ class AuthConfig(dict):
# keys is not formatted properly.
if raise_on_error:
raise errors.InvalidConfigFile(
'Invalid configuration for registry {}'.format(
registry
)
f'Invalid configuration for registry {registry}'
)
return {}
if 'identitytoken' in entry:
log.debug(
'Found an IdentityToken entry for registry {}'.format(
registry
)
)
log.debug(f'Found an IdentityToken entry for registry {registry}')
conf[registry] = {
'IdentityToken': entry['identitytoken']
}
@ -130,16 +122,15 @@ class AuthConfig(dict):
# a valid value in the auths config.
# https://github.com/docker/compose/issues/3265
log.debug(
'Auth data for {} is absent. Client might be using a '
'credentials store instead.'.format(registry)
f'Auth data for {registry} is absent. '
f'Client might be using a credentials store instead.'
)
conf[registry] = {}
continue
username, password = decode_auth(entry['auth'])
log.debug(
'Found entry (registry={}, username={})'
.format(repr(registry), repr(username))
f'Found entry (registry={registry!r}, username={username!r})'
)
conf[registry] = {

View File

@ -113,8 +113,8 @@ class ContextAPI:
names.append(data["Name"])
except Exception as e:
raise errors.ContextException(
"Failed to load metafile {}: {}".format(
filename, e))
f"Failed to load metafile {filename}: {e}",
)
contexts = [cls.DEFAULT_CONTEXT]
for name in names:

View File

@ -42,8 +42,9 @@ class Context:
for k, v in endpoints.items():
if not isinstance(v, dict):
# unknown format
raise ContextException("""Unknown endpoint format for
context {}: {}""".format(name, v))
raise ContextException(
f"Unknown endpoint format for context {name}: {v}",
)
self.endpoints[k] = v
if k != "docker":
@ -96,8 +97,9 @@ class Context:
metadata = json.load(f)
except (OSError, KeyError, ValueError) as e:
# unknown format
raise Exception("""Detected corrupted meta file for
context {} : {}""".format(name, e))
raise Exception(
f"Detected corrupted meta file for context {name} : {e}"
)
# for docker endpoints, set defaults for
# Host and SkipTLSVerify fields

View File

@ -13,13 +13,5 @@ class InitializationError(StoreError):
def process_store_error(cpe, program):
message = cpe.output.decode('utf-8')
if 'credentials not found in native keychain' in message:
return CredentialsNotFound(
'No matching credentials in {}'.format(
program
)
)
return StoreError(
'Credentials store {} exited with "{}".'.format(
program, cpe.output.decode('utf-8').strip()
)
)
return CredentialsNotFound(f'No matching credentials in {program}')
return StoreError(f'Credentials store {program} exited with "{message}".')

View File

@ -20,9 +20,7 @@ class Store:
self.environment = environment
if self.exe is None:
warnings.warn(
'{} not installed or not available in PATH'.format(
self.program
)
f'{self.program} not installed or not available in PATH'
)
def get(self, server):
@ -73,10 +71,8 @@ class Store:
def _execute(self, subcmd, data_input):
if self.exe is None:
raise errors.StoreError(
'{} not installed or not available in PATH'.format(
self.program
)
)
f'{self.program} not installed or not available in PATH'
)
output = None
env = create_environment_dict(self.environment)
try:
@ -88,14 +84,10 @@ class Store:
except OSError as e:
if e.errno == errno.ENOENT:
raise errors.StoreError(
'{} not installed or not available in PATH'.format(
self.program
)
f'{self.program} not installed or not available in PATH'
)
else:
raise errors.StoreError(
'Unexpected OS error "{}", errno={}'.format(
e.strerror, e.errno
)
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
)
return output

View File

@ -54,14 +54,16 @@ class APIError(requests.exceptions.HTTPError, DockerException):
message = super().__str__()
if self.is_client_error():
message = '{} Client Error for {}: {}'.format(
self.response.status_code, self.response.url,
self.response.reason)
message = (
f'{self.response.status_code} Client Error for '
f'{self.response.url}: {self.response.reason}'
)
elif self.is_server_error():
message = '{} Server Error for {}: {}'.format(
self.response.status_code, self.response.url,
self.response.reason)
message = (
f'{self.response.status_code} Server Error for '
f'{self.response.url}: {self.response.reason}'
)
if self.explanation:
message = f'{message} ("{self.explanation}")'
@ -142,10 +144,10 @@ class ContainerError(DockerException):
self.stderr = stderr
err = f": {stderr}" if stderr is not None else ""
msg = ("Command '{}' in image '{}' returned non-zero exit "
"status {}{}").format(command, image, exit_status, err)
super().__init__(msg)
super().__init__(
f"Command '{command}' in image '{image}' "
f"returned non-zero exit status {exit_status}{err}"
)
class StreamParseError(RuntimeError):

View File

@ -15,10 +15,8 @@ class Image(Model):
An image on the server.
"""
def __repr__(self):
return "<{}: '{}'>".format(
self.__class__.__name__,
"', '".join(self.tags),
)
tag_str = "', '".join(self.tags)
return f"<{self.__class__.__name__}: '{tag_str}'>"
@property
def labels(self):
@ -471,9 +469,8 @@ class ImageCollection(Collection):
# to be pulled.
pass
if not all_tags:
return self.get('{0}{2}{1}'.format(
repository, tag, '@' if tag.startswith('sha256:') else ':'
))
sep = '@' if tag.startswith('sha256:') else ':'
return self.get(f'{repository}{sep}{tag}')
return self.list(repository)
def push(self, repository, tag=None, **kwargs):

View File

@ -64,9 +64,10 @@ class Collection:
def __call__(self, *args, **kwargs):
raise TypeError(
"'{}' object is not callable. You might be trying to use the old "
"(pre-2.0) API - use docker.APIClient if so."
.format(self.__class__.__name__))
f"'{self.__class__.__name__}' object is not callable. "
"You might be trying to use the old (pre-2.0) API - "
"use docker.APIClient if so."
)
def list(self):
raise NotImplementedError
@ -88,5 +89,4 @@ class Collection:
elif isinstance(attrs, dict):
return self.model(attrs=attrs, client=self.client, collection=self)
else:
raise Exception("Can't create %s from %s" %
(self.model.__name__, attrs))
raise Exception(f"Can't create {self.model.__name__} from {attrs}")

View File

@ -55,7 +55,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
socket_path = socket_url.replace('http+unix://', '')
if not socket_path.startswith('/'):
socket_path = '/' + socket_path
socket_path = f"/{socket_path}"
self.socket_path = socket_path
self.timeout = timeout
self.max_pool_size = max_pool_size

View File

@ -652,25 +652,25 @@ class HostConfig(dict):
def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
return TypeError(error_msg.format(param, expected, type(param_value)))
return TypeError(
f'Invalid type for {param} param: expected {expected} '
f'but found {type(param_value)}'
)
def host_config_version_error(param, version, less_than=True):
operator = '<' if less_than else '>'
error_msg = '{0} param is not supported in API versions {1} {2}'
return errors.InvalidVersion(error_msg.format(param, operator, version))
return errors.InvalidVersion(
f'{param} param is not supported in API versions {operator} {version}',
)
def host_config_value_error(param, param_value):
error_msg = 'Invalid value for {0} param: {1}'
return ValueError(error_msg.format(param, param_value))
return ValueError(f'Invalid value for {param} param: {param_value}')
def host_config_incompatible_error(param, param_value, incompatible_param):
error_msg = '\"{1}\" {0} is incompatible with {2}'
return errors.InvalidArgument(
error_msg.format(param, param_value, incompatible_param)
f'\"{param_value}\" {param} is incompatible with {incompatible_param}'
)

View File

@ -370,8 +370,8 @@ def _convert_generic_resources_dict(generic_resources):
return generic_resources
if not isinstance(generic_resources, dict):
raise errors.InvalidArgument(
'generic_resources must be a dict or a list'
' (found {})'.format(type(generic_resources))
'generic_resources must be a dict or a list '
f'(found {type(generic_resources)})'
)
resources = []
for kind, value in generic_resources.items():
@ -381,9 +381,9 @@ def _convert_generic_resources_dict(generic_resources):
elif isinstance(value, str):
resource_type = 'NamedResourceSpec'
else:
kv = {kind: value}
raise errors.InvalidArgument(
'Unsupported generic resource reservation '
'type: {}'.format({kind: value})
f'Unsupported generic resource reservation type: {kv}'
)
resources.append({
resource_type: {'Kind': kind, 'Value': value}
@ -764,8 +764,8 @@ class PlacementPreference(dict):
def __init__(self, strategy, descriptor):
if strategy != 'spread':
raise errors.InvalidArgument(
'PlacementPreference strategy value is invalid ({}):'
' must be "spread".'.format(strategy)
f'PlacementPreference strategy value is invalid ({strategy}): '
'must be "spread".'
)
self['Spread'] = {'SpreadDescriptor': descriptor}

View File

@ -42,7 +42,7 @@ def exclude_paths(root, patterns, dockerfile=None):
if dockerfile is None:
dockerfile = 'Dockerfile'
patterns.append('!' + dockerfile)
patterns.append(f"!{dockerfile}")
pm = PatternMatcher(patterns)
return set(pm.walk(root))
@ -180,7 +180,7 @@ class PatternMatcher:
fpath = os.path.join(
os.path.relpath(current_dir, root), f
)
if fpath.startswith('.' + os.path.sep):
if fpath.startswith(f".{os.path.sep}"):
fpath = fpath[2:]
match = self.matches(fpath)
if not match:

View File

@ -27,9 +27,7 @@ def minimum_version(version):
def wrapper(self, *args, **kwargs):
if utils.version_lt(self._version, version):
raise errors.InvalidVersion(
'{} is not available for version < {}'.format(
f.__name__, version
)
f'{f.__name__} is not available for version < {version}',
)
return f(self, *args, **kwargs)
return wrapper

View File

@ -79,18 +79,18 @@ def translate(pat):
i = i + 1
if i >= n:
# is "**EOF" - to align with .gitignore just accept all
res = res + '.*'
res = f"{res}.*"
else:
# is "**"
# Note that this allows for any # of /'s (even 0) because
# the .* will eat everything, even /'s
res = res + '(.*/)?'
res = f"{res}(.*/)?"
else:
# is "*" so map it to anything but "/"
res = res + '[^/]*'
res = f"{res}[^/]*"
elif c == '?':
# "?" is any char except "/"
res = res + '[^/]'
res = f"{res}[^/]"
elif c == '[':
j = i
if j < n and pat[j] == '!':
@ -100,16 +100,16 @@ def translate(pat):
while j < n and pat[j] != ']':
j = j + 1
if j >= n:
res = res + '\\['
res = f"{res}\\["
else:
stuff = pat[i:j].replace('\\', '\\\\')
i = j + 1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
stuff = f"^{stuff[1:]}"
elif stuff[0] == '^':
stuff = '\\' + stuff
stuff = f"\\{stuff}"
res = f'{res}[{stuff}]'
else:
res = res + re.escape(c)
return res + '$'
return f"{res}$"

View File

@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
if not end:
return [start + proto]
if randomly_available_port:
return [f'{start}-{end}' + proto]
return [f"{start}-{end}{proto}"]
return [str(port) + proto for port in range(int(start), int(end) + 1)]

View File

@ -69,5 +69,9 @@ class ProxyConfig(dict):
return proxy_env + environment
def __str__(self):
return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format(
self.http, self.https, self.ftp, self.no_proxy)
return (
'ProxyConfig('
f'http={self.http}, https={self.https}, '
f'ftp={self.ftp}, no_proxy={self.no_proxy}'
')'
)

View File

@ -127,8 +127,7 @@ def convert_volume_binds(binds):
if isinstance(v, dict):
if 'ro' in v and 'mode' in v:
raise ValueError(
'Binding cannot contain both "ro" and "mode": {}'
.format(repr(v))
f'Binding cannot contain both "ro" and "mode": {v!r}'
)
bind = v['bind']
@ -160,8 +159,8 @@ def convert_tmpfs_mounts(tmpfs):
if not isinstance(tmpfs, list):
raise ValueError(
'Expected tmpfs value to be either a list or a dict, found: {}'
.format(type(tmpfs).__name__)
'Expected tmpfs value to be either a list or a dict, '
f'found: {type(tmpfs).__name__}'
)
result = {}
@ -175,8 +174,8 @@ def convert_tmpfs_mounts(tmpfs):
else:
raise ValueError(
"Expected item in tmpfs list to be a string, found: {}"
.format(type(mount).__name__)
"Expected item in tmpfs list to be a string, "
f"found: {type(mount).__name__}"
)
result[name] = options
@ -218,9 +217,9 @@ def parse_host(addr, is_win32=False, tls=False):
parsed_url = urlparse(addr)
proto = parsed_url.scheme
if not proto or any([x not in string.ascii_letters + '+' for x in proto]):
if not proto or any([x not in f"{string.ascii_letters}+" for x in proto]):
# https://bugs.python.org/issue754016
parsed_url = urlparse('//' + addr, 'tcp')
parsed_url = urlparse(f"//{addr}", 'tcp')
proto = 'tcp'
if proto == 'fd':
@ -256,15 +255,14 @@ def parse_host(addr, is_win32=False, tls=False):
if parsed_url.path and proto == 'ssh':
raise errors.DockerException(
'Invalid bind address format: no path allowed for this protocol:'
' {}'.format(addr)
f'Invalid bind address format: no path allowed for this protocol: {addr}'
)
else:
path = parsed_url.path
if proto == 'unix' and parsed_url.hostname is not None:
# For legacy reasons, we consider unix://path
# to be valid and equivalent to unix:///path
path = '/'.join((parsed_url.hostname, path))
path = f"{parsed_url.hostname}/{path}"
netloc = parsed_url.netloc
if proto in ('tcp', 'ssh'):
@ -272,8 +270,7 @@ def parse_host(addr, is_win32=False, tls=False):
if port <= 0:
if proto != 'ssh':
raise errors.DockerException(
'Invalid bind address format: port is required:'
' {}'.format(addr)
f'Invalid bind address format: port is required: {addr}'
)
port = 22
netloc = f'{parsed_url.netloc}:{port}'
@ -283,7 +280,7 @@ def parse_host(addr, is_win32=False, tls=False):
# Rewrite schemes to fit library internals (requests adapters)
if proto == 'tcp':
proto = 'http{}'.format('s' if tls else '')
proto = f"http{'s' if tls else ''}"
elif proto == 'unix':
proto = 'http+unix'
@ -419,17 +416,16 @@ def parse_bytes(s):
digits = float(digits_part)
except ValueError:
raise errors.DockerException(
'Failed converting the string value for memory ({}) to'
' an integer.'.format(digits_part)
'Failed converting the string value for memory '
f'({digits_part}) to an integer.'
)
# Reconvert to long for the final result
s = int(digits * units[suffix])
else:
raise errors.DockerException(
'The specified value for memory ({}) should specify the'
' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s)
f'The specified value for memory ({s}) should specify the units. '
'The postfix should be one of the `b` `k` `m` `g` characters'
)
return s
@ -465,8 +461,7 @@ def parse_env_file(env_file):
environment[k] = v
else:
raise errors.DockerException(
'Invalid line in environment file {}:\n{}'.format(
env_file, line))
f'Invalid line in environment file {env_file}:\n{line}')
return environment

View File

@ -56,7 +56,7 @@ master_doc = 'index'
# General information about the project.
project = 'Docker SDK for Python'
year = datetime.datetime.now().year
copyright = '%d Docker Inc' % year
copyright = f'{year} Docker Inc'
author = 'Docker Inc'
# The version info for the project you're documenting, acts as replacement for

View File

@ -80,7 +80,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
start_time = time.time()
while not condition():
if time.time() - start_time > timeout:
raise AssertionError("Timeout: %s" % condition)
raise AssertionError(f"Timeout: {condition}")
time.sleep(delay)

View File

@ -72,6 +72,4 @@ class UnixconnTest(unittest.TestCase):
client.close()
del client
assert len(w) == 0, "No warnings produced: {}".format(
w[0].message
)
assert len(w) == 0, f"No warnings produced: {w[0].message}"

View File

@ -666,9 +666,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
test_file.seek(0)
ctnr = self.client.create_container(
TEST_IMG,
'cat {}'.format(
os.path.join('/vol1/', os.path.basename(test_file.name))
),
f"cat {os.path.join('/vol1/', os.path.basename(test_file.name))}",
volumes=['/vol1']
)
self.tmp_containers.append(ctnr)
@ -826,7 +824,7 @@ class LogsTest(BaseAPIIntegrationTest):
exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0
logs = self.client.logs(id)
assert logs == (snippet + '\n').encode(encoding='ascii')
assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_tail_option(self):
snippet = '''Line1
@ -857,7 +855,7 @@ Line2'''
exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0
assert logs == (snippet + '\n').encode(encoding='ascii')
assert logs == f"{snippet}\n".encode(encoding='ascii')
@pytest.mark.timeout(5)
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
@ -878,7 +876,7 @@ Line2'''
for chunk in generator:
logs += chunk
assert logs == (snippet + '\n').encode(encoding='ascii')
assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_with_dict_instead_of_id(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
@ -891,7 +889,7 @@ Line2'''
exitcode = self.client.wait(id)['StatusCode']
assert exitcode == 0
logs = self.client.logs(container)
assert logs == (snippet + '\n').encode(encoding='ascii')
assert logs == f"{snippet}\n".encode(encoding='ascii')
def test_logs_with_tail_0(self):
snippet = 'Flowering Nights (Sakuya Iyazoi)'
@ -920,7 +918,7 @@ Line2'''
logs_until_1 = self.client.logs(container, until=1)
assert logs_until_1 == b''
logs_until_now = self.client.logs(container, datetime.now())
assert logs_until_now == (snippet + '\n').encode(encoding='ascii')
assert logs_until_now == f"{snippet}\n".encode(encoding='ascii')
class DiffTest(BaseAPIIntegrationTest):
@ -1086,7 +1084,7 @@ class PortTest(BaseAPIIntegrationTest):
ip, host_port = port_binding['HostIp'], port_binding['HostPort']
port_binding = port if not protocol else port + "/" + protocol
port_binding = port if not protocol else f"{port}/{protocol}"
assert ip == port_bindings[port_binding][0]
assert host_port == port_bindings[port_binding][1]

View File

@ -103,8 +103,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
if exitcode != 0:
output = self.client.logs(container)
raise Exception(
"Container exited with code {}:\n{}"
.format(exitcode, output))
f"Container exited with code {exitcode}:\n{output}")
return container

View File

@ -22,7 +22,7 @@ class TestStore:
def setup_method(self):
self.tmp_keys = []
if sys.platform.startswith('linux'):
if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE):
if shutil.which(f"docker-credential-{DEFAULT_LINUX_STORE}"):
self.store = Store(DEFAULT_LINUX_STORE)
elif shutil.which('docker-credential-pass'):
self.store = Store('pass')

View File

@ -49,7 +49,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
container = client.containers.run(
"alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'",
volumes=["%s:/insidecontainer" % path],
volumes=[f"{path}:/insidecontainer"],
detach=True
)
self.tmp_containers.append(container.id)
@ -58,7 +58,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
name = "container_volume_test"
out = client.containers.run(
"alpine", "cat /insidecontainer/test",
volumes=["%s:/insidecontainer" % path],
volumes=[f"{path}:/insidecontainer"],
name=name
)
self.tmp_containers.append(name)

View File

@ -110,8 +110,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
if exitcode != 0:
output = self.client.logs(container)
raise Exception(
"Container exited with code {}:\n{}"
.format(exitcode, output))
f"Container exited with code {exitcode}:\n{output}")
return container

View File

@ -89,7 +89,7 @@ class BuildTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'build',
f"{url_prefix}build",
stream=True,
data=None,
headers=expected_headers,
@ -193,10 +193,10 @@ class BuildTest(BaseAPIClientTest):
'foo/Dockerfile.foo', None
)
assert process_dockerfile(
'../Dockerfile', pre(base + '\\foo')
'../Dockerfile', pre(f"{base}\\foo")
)[1] is not None
assert process_dockerfile(
'../baz/Dockerfile.baz', pre(base + '/baz')
'../baz/Dockerfile.baz', pre(f"{base}/baz")
) == ('../baz/Dockerfile.baz', None)
def test_process_dockerfile(self):
@ -218,8 +218,8 @@ class BuildTest(BaseAPIClientTest):
'foo/Dockerfile.foo', None
)
assert process_dockerfile(
'../Dockerfile', base + '/foo'
'../Dockerfile', f"{base}/foo"
)[1] is not None
assert process_dockerfile('../baz/Dockerfile.baz', base + '/baz') == (
assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == (
'../baz/Dockerfile.baz', None
)

View File

@ -32,9 +32,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
assert json.loads(args[1]['data']) == {
'Tty': False,
@ -51,9 +49,7 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/start'.format(
fake_api.FAKE_EXEC_ID
)
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
assert json.loads(args[1]['data']) == {
'Tty': False,
@ -68,16 +64,14 @@ class ExecTest(BaseAPIClientTest):
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
args = fake_request.call_args
assert args[0][1] == url_prefix + 'exec/{}/json'.format(
fake_api.FAKE_EXEC_ID
)
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/json"
def test_exec_resize(self):
self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60)
fake_request.assert_called_with(
'POST',
url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize',
f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/resize",
params={'h': 20, 'w': 60},
timeout=DEFAULT_TIMEOUT_SECONDS
)

View File

@ -21,7 +21,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -31,7 +31,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 0,
'filters': '{"reference": ["foo:bar"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
@ -42,7 +42,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
f"{url_prefix}images/json",
params={'only_ids': 1, 'all': 1},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -52,7 +52,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
f"{url_prefix}images/json",
params={'only_ids': 1, 'all': 0},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -62,7 +62,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/json',
f"{url_prefix}images/json",
params={'only_ids': 0, 'all': 0,
'filters': '{"dangling": ["true"]}'},
timeout=DEFAULT_TIMEOUT_SECONDS
@ -72,7 +72,7 @@ class ImageTest(BaseAPIClientTest):
self.client.pull('joffrey/test001')
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[0][1] == f"{url_prefix}images/create"
assert args[1]['params'] == {
'tag': 'latest', 'fromImage': 'joffrey/test001'
}
@ -82,7 +82,7 @@ class ImageTest(BaseAPIClientTest):
self.client.pull('joffrey/test001', stream=True)
args = fake_request.call_args
assert args[0][1] == url_prefix + 'images/create'
assert args[0][1] == f"{url_prefix}images/create"
assert args[1]['params'] == {
'tag': 'latest', 'fromImage': 'joffrey/test001'
}
@ -93,7 +93,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'commit',
f"{url_prefix}commit",
data='{}',
headers={'Content-Type': 'application/json'},
params={
@ -113,7 +113,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID,
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}",
params={'force': False, 'noprune': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -123,7 +123,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/test_image/history',
f"{url_prefix}images/test_image/history",
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -136,7 +136,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/create',
f"{url_prefix}images/create",
params={
'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME,
@ -157,7 +157,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/create',
f"{url_prefix}images/create",
params={
'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME,
@ -179,7 +179,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/create',
f"{url_prefix}images/create",
params={
'repo': fake_api.FAKE_REPO_NAME,
'tag': fake_api.FAKE_TAG_NAME,
@ -194,7 +194,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/test_image/json',
f"{url_prefix}images/test_image/json",
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -212,7 +212,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/test_image/push',
f"{url_prefix}images/test_image/push",
params={
'tag': None
},
@ -231,7 +231,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/test_image/push',
f"{url_prefix}images/test_image/push",
params={
'tag': fake_api.FAKE_TAG_NAME,
},
@ -255,7 +255,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/test_image/push',
f"{url_prefix}images/test_image/push",
params={
'tag': fake_api.FAKE_TAG_NAME,
},
@ -273,7 +273,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/test_image/push',
f"{url_prefix}images/test_image/push",
params={
'tag': None
},
@ -288,7 +288,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={
'tag': None,
'repo': 'repo',
@ -306,7 +306,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={
'tag': 'tag',
'repo': 'repo',
@ -321,7 +321,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
params={
'tag': None,
'repo': 'repo',
@ -335,7 +335,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get',
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/get",
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -345,7 +345,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/load',
f"{url_prefix}images/load",
data='Byte Stream....',
stream=True,
params={},
@ -357,7 +357,7 @@ class ImageTest(BaseAPIClientTest):
fake_request.assert_called_with(
'POST',
url_prefix + 'images/load',
f"{url_prefix}images/load",
data='Byte Stream....',
stream=True,
params={'quiet': True},

View File

@ -28,7 +28,7 @@ class NetworkTest(BaseAPIClientTest):
with mock.patch('docker.api.client.APIClient.get', get):
assert self.client.networks() == networks
assert get.call_args[0][0] == url_prefix + 'networks'
assert get.call_args[0][0] == f"{url_prefix}networks"
filters = json.loads(get.call_args[1]['params']['filters'])
assert not filters
@ -54,7 +54,7 @@ class NetworkTest(BaseAPIClientTest):
result = self.client.create_network('foo')
assert result == network_data
assert post.call_args[0][0] == url_prefix + 'networks/create'
assert post.call_args[0][0] == f"{url_prefix}networks/create"
assert json.loads(post.call_args[1]['data']) == {"Name": "foo"}
@ -97,7 +97,7 @@ class NetworkTest(BaseAPIClientTest):
self.client.remove_network(network_id)
args = delete.call_args
assert args[0][0] == url_prefix + f'networks/{network_id}'
assert args[0][0] == f"{url_prefix}networks/{network_id}"
def test_inspect_network(self):
network_id = 'abc12345'
@ -117,7 +117,7 @@ class NetworkTest(BaseAPIClientTest):
assert result == network_data
args = get.call_args
assert args[0][0] == url_prefix + f'networks/{network_id}'
assert args[0][0] == f"{url_prefix}networks/{network_id}"
def test_connect_container_to_network(self):
network_id = 'abc12345'
@ -135,7 +135,7 @@ class NetworkTest(BaseAPIClientTest):
)
assert post.call_args[0][0] == (
url_prefix + f'networks/{network_id}/connect'
f"{url_prefix}networks/{network_id}/connect"
)
assert json.loads(post.call_args[1]['data']) == {
@ -158,7 +158,7 @@ class NetworkTest(BaseAPIClientTest):
container={'Id': container_id}, net_id=network_id)
assert post.call_args[0][0] == (
url_prefix + f'networks/{network_id}/disconnect'
f"{url_prefix}networks/{network_id}/disconnect"
)
assert json.loads(post.call_args[1]['data']) == {
'Container': container_id

View File

@ -86,9 +86,7 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False):
url_base = f'{fake_api.prefix}/'
url_prefix = '{}v{}/'.format(
url_base,
docker.constants.DEFAULT_DOCKER_API_VERSION)
url_prefix = f'{url_base}v{docker.constants.DEFAULT_DOCKER_API_VERSION}/'
class BaseAPIClientTest(unittest.TestCase):
@ -130,22 +128,18 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_valid_resource(self):
url = self.client._url('/hello/{0}/world', 'somename')
assert url == '{}{}'.format(url_prefix, 'hello/somename/world')
assert url == f"{url_prefix}hello/somename/world"
url = self.client._url(
'/hello/{0}/world/{1}', 'somename', 'someothername'
)
assert url == '{}{}'.format(
url_prefix, 'hello/somename/world/someothername'
)
assert url == f"{url_prefix}hello/somename/world/someothername"
url = self.client._url('/hello/{0}/world', 'some?name')
assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world')
assert url == f"{url_prefix}hello/some%3Fname/world"
url = self.client._url("/images/{0}/push", "localhost:5000/image")
assert url == '{}{}'.format(
url_prefix, 'images/localhost:5000/image/push'
)
assert url == f"{url_prefix}images/localhost:5000/image/push"
def test_url_invalid_resource(self):
with pytest.raises(ValueError):
@ -153,20 +147,20 @@ class DockerApiTest(BaseAPIClientTest):
def test_url_no_resource(self):
url = self.client._url('/simple')
assert url == '{}{}'.format(url_prefix, 'simple')
assert url == f"{url_prefix}simple"
def test_url_unversioned_api(self):
url = self.client._url(
'/hello/{0}/world', 'somename', versioned_api=False
)
assert url == '{}{}'.format(url_base, 'hello/somename/world')
assert url == f"{url_base}hello/somename/world"
def test_version(self):
self.client.version()
fake_request.assert_called_with(
'GET',
url_prefix + 'version',
f"{url_prefix}version",
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -175,7 +169,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_base + 'version',
f"{url_base}version",
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -194,7 +188,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'info',
f"{url_prefix}info",
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -203,7 +197,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'images/search',
f"{url_prefix}images/search",
params={'term': 'busybox'},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -212,7 +206,7 @@ class DockerApiTest(BaseAPIClientTest):
self.client.login('sakuya', 'izayoi')
args = fake_request.call_args
assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'auth'
assert args[0][1] == f"{url_prefix}auth"
assert json.loads(args[1]['data']) == {
'username': 'sakuya', 'password': 'izayoi'
}
@ -229,7 +223,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'events',
f"{url_prefix}events",
params={'since': None, 'until': None, 'filters': None},
stream=True,
timeout=None
@ -245,7 +239,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'GET',
url_prefix + 'events',
f"{url_prefix}events",
params={
'since': ts - 10,
'until': ts + 10,
@ -264,7 +258,7 @@ class DockerApiTest(BaseAPIClientTest):
expected_filters = docker.utils.convert_filters(filters)
fake_request.assert_called_with(
'GET',
url_prefix + 'events',
f"{url_prefix}events",
params={
'since': None,
'until': None,
@ -318,7 +312,7 @@ class DockerApiTest(BaseAPIClientTest):
fake_request.assert_called_with(
'DELETE',
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
f"{url_prefix}containers/{fake_api.FAKE_CONTAINER_ID}",
params={'v': False, 'link': True, 'force': False},
timeout=DEFAULT_TIMEOUT_SECONDS
)
@ -332,7 +326,7 @@ class DockerApiTest(BaseAPIClientTest):
self.client.create_host_config(security_opt='wrong')
def test_stream_helper_decoding(self):
status_code, content = fake_api.fake_responses[url_prefix + 'events']()
status_code, content = fake_api.fake_responses[f"{url_prefix}events"]()
content_str = json.dumps(content)
content_str = content_str.encode('utf-8')
body = io.BytesIO(content_str)
@ -443,7 +437,7 @@ class UnixSocketStreamTest(unittest.TestCase):
lines = []
for i in range(0, 50):
line = str(i).encode()
lines += [('%x' % len(line)).encode(), line]
lines += [f'{len(line):x}'.encode(), line]
lines.append(b'0')
lines.append(b'')
@ -454,7 +448,7 @@ class UnixSocketStreamTest(unittest.TestCase):
) + b'\r\n'.join(lines)
with APIClient(
base_url="http+unix://" + self.socket_file,
base_url=f"http+unix://{self.socket_file}",
version=DEFAULT_DOCKER_API_VERSION) as client:
for i in range(5):
try:
@ -490,8 +484,7 @@ class TCPSocketStreamTest(unittest.TestCase):
cls.thread = threading.Thread(target=cls.server.serve_forever)
cls.thread.daemon = True
cls.thread.start()
cls.address = 'http://{}:{}'.format(
socket.gethostname(), cls.server.server_address[1])
cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}'
@classmethod
def teardown_class(cls):
@ -600,7 +593,7 @@ class UserAgentTest(unittest.TestCase):
self.patcher = mock.patch.object(
APIClient,
'send',
return_value=fake_resp("GET", "%s/version" % fake_api.prefix)
return_value=fake_resp("GET", f"{fake_api.prefix}/version")
)
self.mock_send = self.patcher.start()
@ -613,7 +606,7 @@ class UserAgentTest(unittest.TestCase):
assert self.mock_send.call_count == 1
headers = self.mock_send.call_args[0][0].headers
expected = 'docker-sdk-python/%s' % docker.__version__
expected = f'docker-sdk-python/{docker.__version__}'
assert headers['User-Agent'] == expected
def test_custom_user_agent(self):

View File

@ -14,7 +14,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'GET'
assert args[0][1] == url_prefix + 'volumes'
assert args[0][1] == f"{url_prefix}volumes"
def test_list_volumes_and_filters(self):
volumes = self.client.volumes(filters={'dangling': True})
@ -23,7 +23,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'GET'
assert args[0][1] == url_prefix + 'volumes'
assert args[0][1] == f"{url_prefix}volumes"
assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'},
'timeout': 60}
@ -37,7 +37,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'volumes/create'
assert args[0][1] == f"{url_prefix}volumes/create"
assert json.loads(args[1]['data']) == {'Name': name}
@requires_api_version('1.23')
@ -63,7 +63,7 @@ class VolumeTest(BaseAPIClientTest):
args = fake_request.call_args
assert args[0][0] == 'POST'
assert args[0][1] == url_prefix + 'volumes/create'
assert args[0][1] == f"{url_prefix}volumes/create"
data = json.loads(args[1]['data'])
assert 'Driver' in data
assert data['Driver'] == driver_name

View File

@ -290,9 +290,10 @@ class LoadConfigTest(unittest.TestCase):
folder = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, folder)
dockercfg_path = os.path.join(folder,
'.{}.dockercfg'.format(
random.randrange(100000)))
dockercfg_path = os.path.join(
folder,
f'.{random.randrange(100000)}.dockercfg',
)
registry = 'https://your.private.registry.io'
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
config = {

View File

@ -85,10 +85,7 @@ class ClientTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
@ -124,10 +121,7 @@ class ClientTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
@ -198,10 +192,7 @@ class FromEnvTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",
@ -235,10 +226,7 @@ class FromEnvTest(unittest.TestCase):
mock_obj.return_value.urlopen.return_value.status = 200
client.ping()
base_url = "{base_url}/v{version}/_ping".format(
base_url=client.api.base_url,
version=client.api._version
)
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
mock_obj.assert_called_once_with(base_url,
"/var/run/docker.sock",

View File

@ -617,17 +617,11 @@ fake_responses = {
get_fake_volume_list,
(f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
get_fake_volume,
('{1}/{0}/volumes/{2}'.format(
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'GET'):
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'):
get_fake_volume,
('{1}/{0}/volumes/{2}'.format(
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
), 'DELETE'):
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'):
fake_remove_volume,
('{1}/{0}/nodes/{2}/update?version=1'.format(
CURRENT_VERSION, prefix, FAKE_NODE_ID
), 'POST'):
(f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'):
post_fake_update_node,
(f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
post_fake_join_swarm,
@ -635,21 +629,13 @@ fake_responses = {
get_fake_network_list,
(f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
post_fake_network,
('{1}/{0}/networks/{2}'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'GET'):
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'):
get_fake_network,
('{1}/{0}/networks/{2}'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'DELETE'):
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'):
delete_fake_network,
('{1}/{0}/networks/{2}/connect'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'):
post_fake_network_connect,
('{1}/{0}/networks/{2}/disconnect'.format(
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
), 'POST'):
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'):
post_fake_network_disconnect,
f'{prefix}/{CURRENT_VERSION}/secrets/create':
post_fake_secret,

View File

@ -20,7 +20,7 @@ class SwarmTest(BaseAPIClientTest):
)
args = fake_request.call_args
assert args[0][1] == (
url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1'
f"{url_prefix}nodes/24ifsmvkjbyhk/update?version=1"
)
assert json.loads(args[1]['data']) == node_spec
assert args[1]['headers']['Content-Type'] == 'application/json'
@ -45,7 +45,7 @@ class SwarmTest(BaseAPIClientTest):
args = fake_request.call_args
assert (args[0][1] == url_prefix + 'swarm/join')
assert (args[0][1] == f"{url_prefix}swarm/join")
assert (json.loads(args[1]['data']) == data)
assert (args[1]['headers']['Content-Type'] == 'application/json')
@ -64,6 +64,6 @@ class SwarmTest(BaseAPIClientTest):
args = fake_request.call_args
assert (args[0][1] == url_prefix + 'swarm/join')
assert (args[0][1] == f"{url_prefix}swarm/join")
assert (json.loads(args[1]['data']) == data)
assert (args[1]['headers']['Content-Type'] == 'application/json')

View File

@ -486,9 +486,9 @@ class PortsTest(unittest.TestCase):
def test_split_port_with_protocol(self):
for protocol in ['tcp', 'udp', 'sctp']:
internal_port, external_port = split_port(
"127.0.0.1:1000:2000/" + protocol
f"127.0.0.1:1000:2000/{protocol}"
)
assert internal_port == ["2000/" + protocol]
assert internal_port == [f"2000/{protocol}"]
assert external_port == [("127.0.0.1", "1000")]
def test_split_port_with_host_ip_no_port(self):