mirror of https://github.com/docker/docker-py.git
commit
803feb8696
|
|
@ -6,16 +6,16 @@ env:
|
||||||
DOCKER_BUILDKIT: '1'
|
DOCKER_BUILDKIT: '1'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
flake8:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.11'
|
||||||
- run: pip install -U flake8
|
- run: pip install -U ruff==0.0.265
|
||||||
- name: Run flake8
|
- name: Run ruff
|
||||||
run: flake8 docker/ tests/
|
run: ruff docker tests
|
||||||
|
|
||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ paragraph in the Docker contribution guidelines.
|
||||||
Before we can review your pull request, please ensure that nothing has been
|
Before we can review your pull request, please ensure that nothing has been
|
||||||
broken by your changes by running the test suite. You can do so simply by
|
broken by your changes by running the test suite. You can do so simply by
|
||||||
running `make test` in the project root. This also includes coding style using
|
running `make test` in the project root. This also includes coding style using
|
||||||
`flake8`
|
`ruff`
|
||||||
|
|
||||||
### 3. Write clear, self-contained commits
|
### 3. Write clear, self-contained commits
|
||||||
|
|
||||||
|
|
|
||||||
8
Makefile
8
Makefile
|
|
@ -46,7 +46,7 @@ build-dind-certs:
|
||||||
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
|
docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs .
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: flake8 unit-test-py3 integration-dind integration-dind-ssl
|
test: ruff unit-test-py3 integration-dind integration-dind-ssl
|
||||||
|
|
||||||
.PHONY: unit-test-py3
|
.PHONY: unit-test-py3
|
||||||
unit-test-py3: build-py3
|
unit-test-py3: build-py3
|
||||||
|
|
@ -163,9 +163,9 @@ integration-dind-ssl: build-dind-certs build-py3 setup-network
|
||||||
|
|
||||||
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
docker rm -vf dpy-dind-ssl dpy-dind-certs
|
||||||
|
|
||||||
.PHONY: flake8
|
.PHONY: ruff
|
||||||
flake8: build-py3
|
ruff: build-py3
|
||||||
docker run -t --rm docker-sdk-python3 flake8 docker tests
|
docker run -t --rm docker-sdk-python3 ruff docker tests
|
||||||
|
|
||||||
.PHONY: docs
|
.PHONY: docs
|
||||||
docs: build-docs
|
docs: build-docs
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
# flake8: noqa
|
|
||||||
from .api import APIClient
|
from .api import APIClient
|
||||||
from .client import DockerClient, from_env
|
from .client import DockerClient, from_env
|
||||||
from .context import Context
|
from .context import Context
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1 @@
|
||||||
# flake8: noqa
|
|
||||||
from .client import APIClient
|
from .client import APIClient
|
||||||
|
|
|
||||||
|
|
@ -314,9 +314,8 @@ class BuildApiMixin:
|
||||||
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
|
auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {})
|
||||||
|
|
||||||
log.debug(
|
log.debug(
|
||||||
'Sending auth config ({})'.format(
|
"Sending auth config (%s)",
|
||||||
', '.join(repr(k) for k in auth_data.keys())
|
', '.join(repr(k) for k in auth_data),
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if auth_data:
|
if auth_data:
|
||||||
|
|
@ -336,12 +335,9 @@ def process_dockerfile(dockerfile, path):
|
||||||
abs_dockerfile = os.path.join(path, dockerfile)
|
abs_dockerfile = os.path.join(path, dockerfile)
|
||||||
if constants.IS_WINDOWS_PLATFORM and path.startswith(
|
if constants.IS_WINDOWS_PLATFORM and path.startswith(
|
||||||
constants.WINDOWS_LONGPATH_PREFIX):
|
constants.WINDOWS_LONGPATH_PREFIX):
|
||||||
abs_dockerfile = '{}{}'.format(
|
normpath = os.path.normpath(
|
||||||
constants.WINDOWS_LONGPATH_PREFIX,
|
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):])
|
||||||
os.path.normpath(
|
abs_dockerfile = f'{constants.WINDOWS_LONGPATH_PREFIX}{normpath}'
|
||||||
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
|
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
|
||||||
os.path.relpath(abs_dockerfile, path).startswith('..')):
|
os.path.relpath(abs_dockerfile, path).startswith('..')):
|
||||||
# Dockerfile not in context - read data to insert into tar later
|
# Dockerfile not in context - read data to insert into tar later
|
||||||
|
|
|
||||||
|
|
@ -160,10 +160,10 @@ class APIClient(
|
||||||
base_url, timeout, pool_connections=num_pools,
|
base_url, timeout, pool_connections=num_pools,
|
||||||
max_pool_size=max_pool_size
|
max_pool_size=max_pool_size
|
||||||
)
|
)
|
||||||
except NameError:
|
except NameError as err:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Install pypiwin32 package to enable npipe:// support'
|
'Install pypiwin32 package to enable npipe:// support'
|
||||||
)
|
) from err
|
||||||
self.mount('http+docker://', self._custom_adapter)
|
self.mount('http+docker://', self._custom_adapter)
|
||||||
self.base_url = 'http+docker://localnpipe'
|
self.base_url = 'http+docker://localnpipe'
|
||||||
elif base_url.startswith('ssh://'):
|
elif base_url.startswith('ssh://'):
|
||||||
|
|
@ -172,10 +172,10 @@ class APIClient(
|
||||||
base_url, timeout, pool_connections=num_pools,
|
base_url, timeout, pool_connections=num_pools,
|
||||||
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
max_pool_size=max_pool_size, shell_out=use_ssh_client
|
||||||
)
|
)
|
||||||
except NameError:
|
except NameError as err:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Install paramiko package to enable ssh:// support'
|
'Install paramiko package to enable ssh:// support'
|
||||||
)
|
) from err
|
||||||
self.mount('http+docker://ssh', self._custom_adapter)
|
self.mount('http+docker://ssh', self._custom_adapter)
|
||||||
self._unmount('http://', 'https://')
|
self._unmount('http://', 'https://')
|
||||||
self.base_url = 'http+docker://ssh'
|
self.base_url = 'http+docker://ssh'
|
||||||
|
|
@ -199,28 +199,27 @@ class APIClient(
|
||||||
self._version = version
|
self._version = version
|
||||||
if not isinstance(self._version, str):
|
if not isinstance(self._version, str):
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Version parameter must be a string or None. Found {}'.format(
|
'Version parameter must be a string or None. '
|
||||||
type(version).__name__
|
f'Found {type(version).__name__}'
|
||||||
)
|
|
||||||
)
|
)
|
||||||
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
|
if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION):
|
||||||
raise InvalidVersion(
|
raise InvalidVersion(
|
||||||
'API versions below {} are no longer supported by this '
|
f'API versions below {MINIMUM_DOCKER_API_VERSION} are '
|
||||||
'library.'.format(MINIMUM_DOCKER_API_VERSION)
|
f'no longer supported by this library.'
|
||||||
)
|
)
|
||||||
|
|
||||||
def _retrieve_server_version(self):
|
def _retrieve_server_version(self):
|
||||||
try:
|
try:
|
||||||
return self.version(api_version=False)["ApiVersion"]
|
return self.version(api_version=False)["ApiVersion"]
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Invalid response from docker daemon: key "ApiVersion"'
|
'Invalid response from docker daemon: key "ApiVersion"'
|
||||||
' is missing.'
|
' is missing.'
|
||||||
)
|
) from ke
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
f'Error while fetching server API version: {e}'
|
f'Error while fetching server API version: {e}'
|
||||||
)
|
) from e
|
||||||
|
|
||||||
def _set_request_timeout(self, kwargs):
|
def _set_request_timeout(self, kwargs):
|
||||||
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
"""Prepare the kwargs for an HTTP request by inserting the timeout
|
||||||
|
|
@ -248,19 +247,17 @@ class APIClient(
|
||||||
for arg in args:
|
for arg in args:
|
||||||
if not isinstance(arg, str):
|
if not isinstance(arg, str):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Expected a string but found {} ({}) '
|
f'Expected a string but found {arg} ({type(arg)}) instead'
|
||||||
'instead'.format(arg, type(arg))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
quote_f = partial(urllib.parse.quote, safe="/:")
|
quote_f = partial(urllib.parse.quote, safe="/:")
|
||||||
args = map(quote_f, args)
|
args = map(quote_f, args)
|
||||||
|
|
||||||
|
formatted_path = pathfmt.format(*args)
|
||||||
if kwargs.get('versioned_api', True):
|
if kwargs.get('versioned_api', True):
|
||||||
return '{}/v{}{}'.format(
|
return f'{self.base_url}/v{self._version}{formatted_path}'
|
||||||
self.base_url, self._version, pathfmt.format(*args)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
return f'{self.base_url}{pathfmt.format(*args)}'
|
return f'{self.base_url}{formatted_path}'
|
||||||
|
|
||||||
def _raise_for_status(self, response):
|
def _raise_for_status(self, response):
|
||||||
"""Raises stored :class:`APIError`, if one occurred."""
|
"""Raises stored :class:`APIError`, if one occurred."""
|
||||||
|
|
@ -479,7 +476,7 @@ class APIClient(
|
||||||
return self._multiplexed_response_stream_helper(res)
|
return self._multiplexed_response_stream_helper(res)
|
||||||
else:
|
else:
|
||||||
return sep.join(
|
return sep.join(
|
||||||
[x for x in self._multiplexed_buffer_helper(res)]
|
list(self._multiplexed_buffer_helper(res))
|
||||||
)
|
)
|
||||||
|
|
||||||
def _unmount(self, *args):
|
def _unmount(self, *args):
|
||||||
|
|
|
||||||
|
|
@ -864,7 +864,7 @@ class ContainerApiMixin:
|
||||||
else:
|
else:
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'since value should be datetime or positive int/float,'
|
'since value should be datetime or positive int/float,'
|
||||||
'not {}'.format(type(since))
|
f' not {type(since)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
if until is not None:
|
if until is not None:
|
||||||
|
|
@ -880,8 +880,8 @@ class ContainerApiMixin:
|
||||||
params['until'] = until
|
params['until'] = until
|
||||||
else:
|
else:
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'until value should be datetime or positive int/float, '
|
f'until value should be datetime or positive int/float, '
|
||||||
'not {}'.format(type(until))
|
f'not {type(until)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
url = self._url("/containers/{0}/logs", container)
|
url = self._url("/containers/{0}/logs", container)
|
||||||
|
|
@ -953,7 +953,7 @@ class ContainerApiMixin:
|
||||||
return port_settings.get(private_port)
|
return port_settings.get(private_port)
|
||||||
|
|
||||||
for protocol in ['tcp', 'udp', 'sctp']:
|
for protocol in ['tcp', 'udp', 'sctp']:
|
||||||
h_ports = port_settings.get(private_port + '/' + protocol)
|
h_ports = port_settings.get(f"{private_port}/{protocol}")
|
||||||
if h_ports:
|
if h_ports:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec,
|
||||||
|
|
||||||
def raise_version_error(param, min_version):
|
def raise_version_error(param, min_version):
|
||||||
raise errors.InvalidVersion(
|
raise errors.InvalidVersion(
|
||||||
'{} is not supported in API version < {}'.format(
|
f'{param} is not supported in API version < {min_version}'
|
||||||
param, min_version
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if update_config is not None:
|
if update_config is not None:
|
||||||
|
|
|
||||||
|
|
@ -22,15 +22,15 @@ def resolve_repository_name(repo_name):
|
||||||
index_name, remote_name = split_repo_name(repo_name)
|
index_name, remote_name = split_repo_name(repo_name)
|
||||||
if index_name[0] == '-' or index_name[-1] == '-':
|
if index_name[0] == '-' or index_name[-1] == '-':
|
||||||
raise errors.InvalidRepository(
|
raise errors.InvalidRepository(
|
||||||
'Invalid index name ({}). Cannot begin or end with a'
|
f'Invalid index name ({index_name}). '
|
||||||
' hyphen.'.format(index_name)
|
'Cannot begin or end with a hyphen.'
|
||||||
)
|
)
|
||||||
return resolve_index_name(index_name), remote_name
|
return resolve_index_name(index_name), remote_name
|
||||||
|
|
||||||
|
|
||||||
def resolve_index_name(index_name):
|
def resolve_index_name(index_name):
|
||||||
index_name = convert_to_hostname(index_name)
|
index_name = convert_to_hostname(index_name)
|
||||||
if index_name == 'index.' + INDEX_NAME:
|
if index_name == f"index.{INDEX_NAME}":
|
||||||
index_name = INDEX_NAME
|
index_name = INDEX_NAME
|
||||||
return index_name
|
return index_name
|
||||||
|
|
||||||
|
|
@ -99,9 +99,7 @@ class AuthConfig(dict):
|
||||||
for registry, entry in entries.items():
|
for registry, entry in entries.items():
|
||||||
if not isinstance(entry, dict):
|
if not isinstance(entry, dict):
|
||||||
log.debug(
|
log.debug(
|
||||||
'Config entry for key {} is not auth config'.format(
|
f'Config entry for key {registry} is not auth config'
|
||||||
registry
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
# We sometimes fall back to parsing the whole config as if it
|
# We sometimes fall back to parsing the whole config as if it
|
||||||
# was the auth config by itself, for legacy purposes. In that
|
# was the auth config by itself, for legacy purposes. In that
|
||||||
|
|
@ -109,17 +107,11 @@ class AuthConfig(dict):
|
||||||
# keys is not formatted properly.
|
# keys is not formatted properly.
|
||||||
if raise_on_error:
|
if raise_on_error:
|
||||||
raise errors.InvalidConfigFile(
|
raise errors.InvalidConfigFile(
|
||||||
'Invalid configuration for registry {}'.format(
|
f'Invalid configuration for registry {registry}'
|
||||||
registry
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return {}
|
return {}
|
||||||
if 'identitytoken' in entry:
|
if 'identitytoken' in entry:
|
||||||
log.debug(
|
log.debug(f'Found an IdentityToken entry for registry {registry}')
|
||||||
'Found an IdentityToken entry for registry {}'.format(
|
|
||||||
registry
|
|
||||||
)
|
|
||||||
)
|
|
||||||
conf[registry] = {
|
conf[registry] = {
|
||||||
'IdentityToken': entry['identitytoken']
|
'IdentityToken': entry['identitytoken']
|
||||||
}
|
}
|
||||||
|
|
@ -130,16 +122,15 @@ class AuthConfig(dict):
|
||||||
# a valid value in the auths config.
|
# a valid value in the auths config.
|
||||||
# https://github.com/docker/compose/issues/3265
|
# https://github.com/docker/compose/issues/3265
|
||||||
log.debug(
|
log.debug(
|
||||||
'Auth data for {} is absent. Client might be using a '
|
f'Auth data for {registry} is absent. '
|
||||||
'credentials store instead.'.format(registry)
|
f'Client might be using a credentials store instead.'
|
||||||
)
|
)
|
||||||
conf[registry] = {}
|
conf[registry] = {}
|
||||||
continue
|
continue
|
||||||
|
|
||||||
username, password = decode_auth(entry['auth'])
|
username, password = decode_auth(entry['auth'])
|
||||||
log.debug(
|
log.debug(
|
||||||
'Found entry (registry={}, username={})'
|
f'Found entry (registry={registry!r}, username={username!r})'
|
||||||
.format(repr(registry), repr(username))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
conf[registry] = {
|
conf[registry] = {
|
||||||
|
|
@ -277,7 +268,7 @@ class AuthConfig(dict):
|
||||||
except credentials.StoreError as e:
|
except credentials.StoreError as e:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
f'Credentials store error: {repr(e)}'
|
f'Credentials store error: {repr(e)}'
|
||||||
)
|
) from e
|
||||||
|
|
||||||
def _get_store_instance(self, name):
|
def _get_store_instance(self, name):
|
||||||
if name not in self._stores:
|
if name not in self._stores:
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,2 @@
|
||||||
# flake8: noqa
|
|
||||||
from .context import Context
|
from .context import Context
|
||||||
from .api import ContextAPI
|
from .api import ContextAPI
|
||||||
|
|
|
||||||
|
|
@ -113,8 +113,8 @@ class ContextAPI:
|
||||||
names.append(data["Name"])
|
names.append(data["Name"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise errors.ContextException(
|
raise errors.ContextException(
|
||||||
"Failed to load metafile {}: {}".format(
|
f"Failed to load metafile {filename}: {e}",
|
||||||
filename, e))
|
) from e
|
||||||
|
|
||||||
contexts = [cls.DEFAULT_CONTEXT]
|
contexts = [cls.DEFAULT_CONTEXT]
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
|
||||||
|
|
@ -77,5 +77,6 @@ def get_context_host(path=None, tls=False):
|
||||||
host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls)
|
host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls)
|
||||||
if host == DEFAULT_UNIX_SOCKET:
|
if host == DEFAULT_UNIX_SOCKET:
|
||||||
# remove http+ from default docker socket url
|
# remove http+ from default docker socket url
|
||||||
return host.strip("http+")
|
if host.startswith("http+"):
|
||||||
|
host = host[5:]
|
||||||
return host
|
return host
|
||||||
|
|
|
||||||
|
|
@ -42,8 +42,9 @@ class Context:
|
||||||
for k, v in endpoints.items():
|
for k, v in endpoints.items():
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
# unknown format
|
# unknown format
|
||||||
raise ContextException("""Unknown endpoint format for
|
raise ContextException(
|
||||||
context {}: {}""".format(name, v))
|
f"Unknown endpoint format for context {name}: {v}",
|
||||||
|
)
|
||||||
|
|
||||||
self.endpoints[k] = v
|
self.endpoints[k] = v
|
||||||
if k != "docker":
|
if k != "docker":
|
||||||
|
|
@ -96,8 +97,9 @@ class Context:
|
||||||
metadata = json.load(f)
|
metadata = json.load(f)
|
||||||
except (OSError, KeyError, ValueError) as e:
|
except (OSError, KeyError, ValueError) as e:
|
||||||
# unknown format
|
# unknown format
|
||||||
raise Exception("""Detected corrupted meta file for
|
raise Exception(
|
||||||
context {} : {}""".format(name, e))
|
f"Detected corrupted meta file for context {name} : {e}"
|
||||||
|
) from e
|
||||||
|
|
||||||
# for docker endpoints, set defaults for
|
# for docker endpoints, set defaults for
|
||||||
# Host and SkipTLSVerify fields
|
# Host and SkipTLSVerify fields
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,8 @@
|
||||||
# flake8: noqa
|
|
||||||
from .store import Store
|
from .store import Store
|
||||||
from .errors import StoreError, CredentialsNotFound
|
from .errors import StoreError, CredentialsNotFound
|
||||||
from .constants import *
|
from .constants import (
|
||||||
|
DEFAULT_LINUX_STORE,
|
||||||
|
DEFAULT_OSX_STORE,
|
||||||
|
DEFAULT_WIN32_STORE,
|
||||||
|
PROGRAM_PREFIX,
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -13,13 +13,5 @@ class InitializationError(StoreError):
|
||||||
def process_store_error(cpe, program):
|
def process_store_error(cpe, program):
|
||||||
message = cpe.output.decode('utf-8')
|
message = cpe.output.decode('utf-8')
|
||||||
if 'credentials not found in native keychain' in message:
|
if 'credentials not found in native keychain' in message:
|
||||||
return CredentialsNotFound(
|
return CredentialsNotFound(f'No matching credentials in {program}')
|
||||||
'No matching credentials in {}'.format(
|
return StoreError(f'Credentials store {program} exited with "{message}".')
|
||||||
program
|
|
||||||
)
|
|
||||||
)
|
|
||||||
return StoreError(
|
|
||||||
'Credentials store {} exited with "{}".'.format(
|
|
||||||
program, cpe.output.decode('utf-8').strip()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -20,9 +20,8 @@ class Store:
|
||||||
self.environment = environment
|
self.environment = environment
|
||||||
if self.exe is None:
|
if self.exe is None:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'{} not installed or not available in PATH'.format(
|
f'{self.program} not installed or not available in PATH',
|
||||||
self.program
|
stacklevel=1,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def get(self, server):
|
def get(self, server):
|
||||||
|
|
@ -73,9 +72,7 @@ class Store:
|
||||||
def _execute(self, subcmd, data_input):
|
def _execute(self, subcmd, data_input):
|
||||||
if self.exe is None:
|
if self.exe is None:
|
||||||
raise errors.StoreError(
|
raise errors.StoreError(
|
||||||
'{} not installed or not available in PATH'.format(
|
f'{self.program} not installed or not available in PATH'
|
||||||
self.program
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
output = None
|
output = None
|
||||||
env = create_environment_dict(self.environment)
|
env = create_environment_dict(self.environment)
|
||||||
|
|
@ -84,18 +81,14 @@ class Store:
|
||||||
[self.exe, subcmd], input=data_input, env=env,
|
[self.exe, subcmd], input=data_input, env=env,
|
||||||
)
|
)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
raise errors.process_store_error(e, self.program)
|
raise errors.process_store_error(e, self.program) from e
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
raise errors.StoreError(
|
raise errors.StoreError(
|
||||||
'{} not installed or not available in PATH'.format(
|
f'{self.program} not installed or not available in PATH'
|
||||||
self.program
|
) from e
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
raise errors.StoreError(
|
raise errors.StoreError(
|
||||||
'Unexpected OS error "{}", errno={}'.format(
|
f'Unexpected OS error "{e.strerror}", errno={e.errno}'
|
||||||
e.strerror, e.errno
|
) from e
|
||||||
)
|
|
||||||
)
|
|
||||||
return output
|
return output
|
||||||
|
|
|
||||||
|
|
@ -54,14 +54,16 @@ class APIError(requests.exceptions.HTTPError, DockerException):
|
||||||
message = super().__str__()
|
message = super().__str__()
|
||||||
|
|
||||||
if self.is_client_error():
|
if self.is_client_error():
|
||||||
message = '{} Client Error for {}: {}'.format(
|
message = (
|
||||||
self.response.status_code, self.response.url,
|
f'{self.response.status_code} Client Error for '
|
||||||
self.response.reason)
|
f'{self.response.url}: {self.response.reason}'
|
||||||
|
)
|
||||||
|
|
||||||
elif self.is_server_error():
|
elif self.is_server_error():
|
||||||
message = '{} Server Error for {}: {}'.format(
|
message = (
|
||||||
self.response.status_code, self.response.url,
|
f'{self.response.status_code} Server Error for '
|
||||||
self.response.reason)
|
f'{self.response.url}: {self.response.reason}'
|
||||||
|
)
|
||||||
|
|
||||||
if self.explanation:
|
if self.explanation:
|
||||||
message = f'{message} ("{self.explanation}")'
|
message = f'{message} ("{self.explanation}")'
|
||||||
|
|
@ -142,10 +144,10 @@ class ContainerError(DockerException):
|
||||||
self.stderr = stderr
|
self.stderr = stderr
|
||||||
|
|
||||||
err = f": {stderr}" if stderr is not None else ""
|
err = f": {stderr}" if stderr is not None else ""
|
||||||
msg = ("Command '{}' in image '{}' returned non-zero exit "
|
super().__init__(
|
||||||
"status {}{}").format(command, image, exit_status, err)
|
f"Command '{command}' in image '{image}' "
|
||||||
|
f"returned non-zero exit status {exit_status}{err}"
|
||||||
super().__init__(msg)
|
)
|
||||||
|
|
||||||
|
|
||||||
class StreamParseError(RuntimeError):
|
class StreamParseError(RuntimeError):
|
||||||
|
|
|
||||||
|
|
@ -47,11 +47,11 @@ class Container(Model):
|
||||||
try:
|
try:
|
||||||
result = self.attrs['Config'].get('Labels')
|
result = self.attrs['Config'].get('Labels')
|
||||||
return result or {}
|
return result or {}
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise DockerException(
|
raise DockerException(
|
||||||
'Label data is not available for sparse objects. Call reload()'
|
'Label data is not available for sparse objects. Call reload()'
|
||||||
' to retrieve all information'
|
' to retrieve all information'
|
||||||
)
|
) from ke
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self):
|
def status(self):
|
||||||
|
|
|
||||||
|
|
@ -15,10 +15,8 @@ class Image(Model):
|
||||||
An image on the server.
|
An image on the server.
|
||||||
"""
|
"""
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<{}: '{}'>".format(
|
tag_str = "', '".join(self.tags)
|
||||||
self.__class__.__name__,
|
return f"<{self.__class__.__name__}: '{tag_str}'>"
|
||||||
"', '".join(self.tags),
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def labels(self):
|
def labels(self):
|
||||||
|
|
@ -458,7 +456,8 @@ class ImageCollection(Collection):
|
||||||
if 'stream' in kwargs:
|
if 'stream' in kwargs:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
'`stream` is not a valid parameter for this method'
|
'`stream` is not a valid parameter for this method'
|
||||||
' and will be overridden'
|
' and will be overridden',
|
||||||
|
stacklevel=1,
|
||||||
)
|
)
|
||||||
del kwargs['stream']
|
del kwargs['stream']
|
||||||
|
|
||||||
|
|
@ -471,9 +470,8 @@ class ImageCollection(Collection):
|
||||||
# to be pulled.
|
# to be pulled.
|
||||||
pass
|
pass
|
||||||
if not all_tags:
|
if not all_tags:
|
||||||
return self.get('{0}{2}{1}'.format(
|
sep = '@' if tag.startswith('sha256:') else ':'
|
||||||
repository, tag, '@' if tag.startswith('sha256:') else ':'
|
return self.get(f'{repository}{sep}{tag}')
|
||||||
))
|
|
||||||
return self.list(repository)
|
return self.list(repository)
|
||||||
|
|
||||||
def push(self, repository, tag=None, **kwargs):
|
def push(self, repository, tag=None, **kwargs):
|
||||||
|
|
|
||||||
|
|
@ -187,7 +187,7 @@ class PluginCollection(Collection):
|
||||||
"""
|
"""
|
||||||
privileges = self.client.api.plugin_privileges(remote_name)
|
privileges = self.client.api.plugin_privileges(remote_name)
|
||||||
it = self.client.api.pull_plugin(remote_name, privileges, local_name)
|
it = self.client.api.pull_plugin(remote_name, privileges, local_name)
|
||||||
for data in it:
|
for _data in it:
|
||||||
pass
|
pass
|
||||||
return self.get(local_name or remote_name)
|
return self.get(local_name or remote_name)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -64,9 +64,10 @@ class Collection:
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"'{}' object is not callable. You might be trying to use the old "
|
f"'{self.__class__.__name__}' object is not callable. "
|
||||||
"(pre-2.0) API - use docker.APIClient if so."
|
"You might be trying to use the old (pre-2.0) API - "
|
||||||
.format(self.__class__.__name__))
|
"use docker.APIClient if so."
|
||||||
|
)
|
||||||
|
|
||||||
def list(self):
|
def list(self):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
@ -88,5 +89,4 @@ class Collection:
|
||||||
elif isinstance(attrs, dict):
|
elif isinstance(attrs, dict):
|
||||||
return self.model(attrs=attrs, client=self.client, collection=self)
|
return self.model(attrs=attrs, client=self.client, collection=self)
|
||||||
else:
|
else:
|
||||||
raise Exception("Can't create %s from %s" %
|
raise Exception(f"Can't create {self.model.__name__} from {attrs}")
|
||||||
(self.model.__name__, attrs))
|
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ class TLSConfig:
|
||||||
raise errors.TLSParameterError(
|
raise errors.TLSParameterError(
|
||||||
'client_cert must be a tuple of'
|
'client_cert must be a tuple of'
|
||||||
' (client certificate, key file)'
|
' (client certificate, key file)'
|
||||||
)
|
) from None
|
||||||
|
|
||||||
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
|
||||||
not os.path.isfile(tls_key)):
|
not os.path.isfile(tls_key)):
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
# flake8: noqa
|
|
||||||
from .unixconn import UnixHTTPAdapter
|
from .unixconn import UnixHTTPAdapter
|
||||||
from .ssladapter import SSLHTTPAdapter
|
from .ssladapter import SSLHTTPAdapter
|
||||||
try:
|
try:
|
||||||
|
|
|
||||||
|
|
@ -46,9 +46,8 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
conn = None
|
conn = None
|
||||||
try:
|
try:
|
||||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||||
|
except AttributeError as ae: # self.pool is None
|
||||||
except AttributeError: # self.pool is None
|
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
|
||||||
|
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
if self.block:
|
if self.block:
|
||||||
|
|
@ -56,7 +55,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
self,
|
self,
|
||||||
"Pool reached maximum size and no more "
|
"Pool reached maximum size and no more "
|
||||||
"connections are allowed."
|
"connections are allowed."
|
||||||
)
|
) from None
|
||||||
# Oh well, we'll create a new connection then
|
# Oh well, we'll create a new connection then
|
||||||
|
|
||||||
return conn or self._new_conn()
|
return conn or self._new_conn()
|
||||||
|
|
|
||||||
|
|
@ -141,8 +141,8 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
try:
|
try:
|
||||||
conn = self.pool.get(block=self.block, timeout=timeout)
|
conn = self.pool.get(block=self.block, timeout=timeout)
|
||||||
|
|
||||||
except AttributeError: # self.pool is None
|
except AttributeError as ae: # self.pool is None
|
||||||
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
|
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae
|
||||||
|
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
if self.block:
|
if self.block:
|
||||||
|
|
@ -150,7 +150,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
|
||||||
self,
|
self,
|
||||||
"Pool reached maximum size and no more "
|
"Pool reached maximum size and no more "
|
||||||
"connections are allowed."
|
"connections are allowed."
|
||||||
)
|
) from None
|
||||||
# Oh well, we'll create a new connection then
|
# Oh well, we'll create a new connection then
|
||||||
|
|
||||||
return conn or self._new_conn()
|
return conn or self._new_conn()
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter):
|
||||||
max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
|
max_pool_size=constants.DEFAULT_MAX_POOL_SIZE):
|
||||||
socket_path = socket_url.replace('http+unix://', '')
|
socket_path = socket_url.replace('http+unix://', '')
|
||||||
if not socket_path.startswith('/'):
|
if not socket_path.startswith('/'):
|
||||||
socket_path = '/' + socket_path
|
socket_path = f"/{socket_path}"
|
||||||
self.socket_path = socket_path
|
self.socket_path = socket_path
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.max_pool_size = max_pool_size
|
self.max_pool_size = max_pool_size
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
# flake8: noqa
|
|
||||||
from .containers import (
|
from .containers import (
|
||||||
ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
|
ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -48,8 +48,11 @@ class LogConfig(DictType):
|
||||||
>>> container = client.create_container('busybox', 'true',
|
>>> container = client.create_container('busybox', 'true',
|
||||||
... host_config=hc)
|
... host_config=hc)
|
||||||
>>> client.inspect_container(container)['HostConfig']['LogConfig']
|
>>> client.inspect_container(container)['HostConfig']['LogConfig']
|
||||||
{'Type': 'json-file', 'Config': {'labels': 'production_status,geo', 'max-size': '1g'}}
|
{
|
||||||
""" # noqa: E501
|
'Type': 'json-file',
|
||||||
|
'Config': {'labels': 'production_status,geo', 'max-size': '1g'}
|
||||||
|
}
|
||||||
|
"""
|
||||||
types = LogConfigTypesEnum
|
types = LogConfigTypesEnum
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
|
|
@ -652,25 +655,25 @@ class HostConfig(dict):
|
||||||
|
|
||||||
|
|
||||||
def host_config_type_error(param, param_value, expected):
|
def host_config_type_error(param, param_value, expected):
|
||||||
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
|
return TypeError(
|
||||||
return TypeError(error_msg.format(param, expected, type(param_value)))
|
f'Invalid type for {param} param: expected {expected} '
|
||||||
|
f'but found {type(param_value)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def host_config_version_error(param, version, less_than=True):
|
def host_config_version_error(param, version, less_than=True):
|
||||||
operator = '<' if less_than else '>'
|
operator = '<' if less_than else '>'
|
||||||
error_msg = '{0} param is not supported in API versions {1} {2}'
|
return errors.InvalidVersion(
|
||||||
return errors.InvalidVersion(error_msg.format(param, operator, version))
|
f'{param} param is not supported in API versions {operator} {version}',
|
||||||
|
)
|
||||||
|
|
||||||
def host_config_value_error(param, param_value):
|
def host_config_value_error(param, param_value):
|
||||||
error_msg = 'Invalid value for {0} param: {1}'
|
return ValueError(f'Invalid value for {param} param: {param_value}')
|
||||||
return ValueError(error_msg.format(param, param_value))
|
|
||||||
|
|
||||||
|
|
||||||
def host_config_incompatible_error(param, param_value, incompatible_param):
|
def host_config_incompatible_error(param, param_value, incompatible_param):
|
||||||
error_msg = '\"{1}\" {0} is incompatible with {2}'
|
|
||||||
return errors.InvalidArgument(
|
return errors.InvalidArgument(
|
||||||
error_msg.format(param, param_value, incompatible_param)
|
f'\"{param_value}\" {param} is incompatible with {incompatible_param}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,9 +28,9 @@ class CancellableStream:
|
||||||
try:
|
try:
|
||||||
return next(self._stream)
|
return next(self._stream)
|
||||||
except urllib3.exceptions.ProtocolError:
|
except urllib3.exceptions.ProtocolError:
|
||||||
raise StopIteration
|
raise StopIteration from None
|
||||||
except OSError:
|
except OSError:
|
||||||
raise StopIteration
|
raise StopIteration from None
|
||||||
|
|
||||||
next = __next__
|
next = __next__
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -371,7 +371,7 @@ def _convert_generic_resources_dict(generic_resources):
|
||||||
if not isinstance(generic_resources, dict):
|
if not isinstance(generic_resources, dict):
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'generic_resources must be a dict or a list '
|
'generic_resources must be a dict or a list '
|
||||||
' (found {})'.format(type(generic_resources))
|
f'(found {type(generic_resources)})'
|
||||||
)
|
)
|
||||||
resources = []
|
resources = []
|
||||||
for kind, value in generic_resources.items():
|
for kind, value in generic_resources.items():
|
||||||
|
|
@ -381,9 +381,9 @@ def _convert_generic_resources_dict(generic_resources):
|
||||||
elif isinstance(value, str):
|
elif isinstance(value, str):
|
||||||
resource_type = 'NamedResourceSpec'
|
resource_type = 'NamedResourceSpec'
|
||||||
else:
|
else:
|
||||||
|
kv = {kind: value}
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'Unsupported generic resource reservation '
|
f'Unsupported generic resource reservation type: {kv}'
|
||||||
'type: {}'.format({kind: value})
|
|
||||||
)
|
)
|
||||||
resources.append({
|
resources.append({
|
||||||
resource_type: {'Kind': kind, 'Value': value}
|
resource_type: {'Kind': kind, 'Value': value}
|
||||||
|
|
@ -764,8 +764,8 @@ class PlacementPreference(dict):
|
||||||
def __init__(self, strategy, descriptor):
|
def __init__(self, strategy, descriptor):
|
||||||
if strategy != 'spread':
|
if strategy != 'spread':
|
||||||
raise errors.InvalidArgument(
|
raise errors.InvalidArgument(
|
||||||
'PlacementPreference strategy value is invalid ({}):'
|
f'PlacementPreference strategy value is invalid ({strategy}): '
|
||||||
' must be "spread".'.format(strategy)
|
'must be "spread".'
|
||||||
)
|
)
|
||||||
self['Spread'] = {'SpreadDescriptor': descriptor}
|
self['Spread'] = {'SpreadDescriptor': descriptor}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
# flake8: noqa
|
|
||||||
from .build import create_archive, exclude_paths, mkbuildcontext, tar
|
from .build import create_archive, exclude_paths, mkbuildcontext, tar
|
||||||
from .decorators import check_resource, minimum_version, update_headers
|
from .decorators import check_resource, minimum_version, update_headers
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ def exclude_paths(root, patterns, dockerfile=None):
|
||||||
if dockerfile is None:
|
if dockerfile is None:
|
||||||
dockerfile = 'Dockerfile'
|
dockerfile = 'Dockerfile'
|
||||||
|
|
||||||
patterns.append('!' + dockerfile)
|
patterns.append(f"!{dockerfile}")
|
||||||
pm = PatternMatcher(patterns)
|
pm = PatternMatcher(patterns)
|
||||||
return set(pm.walk(root))
|
return set(pm.walk(root))
|
||||||
|
|
||||||
|
|
@ -93,10 +93,10 @@ def create_archive(root, files=None, fileobj=None, gzip=False,
|
||||||
try:
|
try:
|
||||||
with open(full_path, 'rb') as f:
|
with open(full_path, 'rb') as f:
|
||||||
t.addfile(i, f)
|
t.addfile(i, f)
|
||||||
except OSError:
|
except OSError as oe:
|
||||||
raise OSError(
|
raise OSError(
|
||||||
f'Can not read file in context: {full_path}'
|
f'Can not read file in context: {full_path}'
|
||||||
)
|
) from oe
|
||||||
else:
|
else:
|
||||||
# Directories, FIFOs, symlinks... don't need to be read.
|
# Directories, FIFOs, symlinks... don't need to be read.
|
||||||
t.addfile(i, None)
|
t.addfile(i, None)
|
||||||
|
|
@ -180,7 +180,7 @@ class PatternMatcher:
|
||||||
fpath = os.path.join(
|
fpath = os.path.join(
|
||||||
os.path.relpath(current_dir, root), f
|
os.path.relpath(current_dir, root), f
|
||||||
)
|
)
|
||||||
if fpath.startswith('.' + os.path.sep):
|
if fpath.startswith(f".{os.path.sep}"):
|
||||||
fpath = fpath[2:]
|
fpath = fpath[2:]
|
||||||
match = self.matches(fpath)
|
match = self.matches(fpath)
|
||||||
if not match:
|
if not match:
|
||||||
|
|
|
||||||
|
|
@ -27,9 +27,7 @@ def minimum_version(version):
|
||||||
def wrapper(self, *args, **kwargs):
|
def wrapper(self, *args, **kwargs):
|
||||||
if utils.version_lt(self._version, version):
|
if utils.version_lt(self._version, version):
|
||||||
raise errors.InvalidVersion(
|
raise errors.InvalidVersion(
|
||||||
'{} is not available for version < {}'.format(
|
f'{f.__name__} is not available for version < {version}',
|
||||||
f.__name__, version
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return f(self, *args, **kwargs)
|
return f(self, *args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
|
||||||
|
|
@ -79,18 +79,18 @@ def translate(pat):
|
||||||
i = i + 1
|
i = i + 1
|
||||||
if i >= n:
|
if i >= n:
|
||||||
# is "**EOF" - to align with .gitignore just accept all
|
# is "**EOF" - to align with .gitignore just accept all
|
||||||
res = res + '.*'
|
res = f"{res}.*"
|
||||||
else:
|
else:
|
||||||
# is "**"
|
# is "**"
|
||||||
# Note that this allows for any # of /'s (even 0) because
|
# Note that this allows for any # of /'s (even 0) because
|
||||||
# the .* will eat everything, even /'s
|
# the .* will eat everything, even /'s
|
||||||
res = res + '(.*/)?'
|
res = f"{res}(.*/)?"
|
||||||
else:
|
else:
|
||||||
# is "*" so map it to anything but "/"
|
# is "*" so map it to anything but "/"
|
||||||
res = res + '[^/]*'
|
res = f"{res}[^/]*"
|
||||||
elif c == '?':
|
elif c == '?':
|
||||||
# "?" is any char except "/"
|
# "?" is any char except "/"
|
||||||
res = res + '[^/]'
|
res = f"{res}[^/]"
|
||||||
elif c == '[':
|
elif c == '[':
|
||||||
j = i
|
j = i
|
||||||
if j < n and pat[j] == '!':
|
if j < n and pat[j] == '!':
|
||||||
|
|
@ -100,16 +100,16 @@ def translate(pat):
|
||||||
while j < n and pat[j] != ']':
|
while j < n and pat[j] != ']':
|
||||||
j = j + 1
|
j = j + 1
|
||||||
if j >= n:
|
if j >= n:
|
||||||
res = res + '\\['
|
res = f"{res}\\["
|
||||||
else:
|
else:
|
||||||
stuff = pat[i:j].replace('\\', '\\\\')
|
stuff = pat[i:j].replace('\\', '\\\\')
|
||||||
i = j + 1
|
i = j + 1
|
||||||
if stuff[0] == '!':
|
if stuff[0] == '!':
|
||||||
stuff = '^' + stuff[1:]
|
stuff = f"^{stuff[1:]}"
|
||||||
elif stuff[0] == '^':
|
elif stuff[0] == '^':
|
||||||
stuff = '\\' + stuff
|
stuff = f"\\{stuff}"
|
||||||
res = f'{res}[{stuff}]'
|
res = f'{res}[{stuff}]'
|
||||||
else:
|
else:
|
||||||
res = res + re.escape(c)
|
res = res + re.escape(c)
|
||||||
|
|
||||||
return res + '$'
|
return f"{res}$"
|
||||||
|
|
|
||||||
|
|
@ -72,4 +72,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a):
|
||||||
try:
|
try:
|
||||||
yield decoder(buffered)
|
yield decoder(buffered)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise StreamParseError(e)
|
raise StreamParseError(e) from e
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False):
|
||||||
if not end:
|
if not end:
|
||||||
return [start + proto]
|
return [start + proto]
|
||||||
if randomly_available_port:
|
if randomly_available_port:
|
||||||
return [f'{start}-{end}' + proto]
|
return [f"{start}-{end}{proto}"]
|
||||||
return [str(port) + proto for port in range(int(start), int(end) + 1)]
|
return [str(port) + proto for port in range(int(start), int(end) + 1)]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -69,5 +69,9 @@ class ProxyConfig(dict):
|
||||||
return proxy_env + environment
|
return proxy_env + environment
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format(
|
return (
|
||||||
self.http, self.https, self.ftp, self.no_proxy)
|
'ProxyConfig('
|
||||||
|
f'http={self.http}, https={self.https}, '
|
||||||
|
f'ftp={self.ftp}, no_proxy={self.no_proxy}'
|
||||||
|
')'
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ def read(socket, n=4096):
|
||||||
try:
|
try:
|
||||||
if hasattr(socket, 'recv'):
|
if hasattr(socket, 'recv'):
|
||||||
return socket.recv(n)
|
return socket.recv(n)
|
||||||
if isinstance(socket, getattr(pysocket, 'SocketIO')):
|
if isinstance(socket, pysocket.SocketIO):
|
||||||
return socket.read(n)
|
return socket.read(n)
|
||||||
return os.read(socket.fileno(), n)
|
return os.read(socket.fileno(), n)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
|
|
|
||||||
|
|
@ -127,8 +127,7 @@ def convert_volume_binds(binds):
|
||||||
if isinstance(v, dict):
|
if isinstance(v, dict):
|
||||||
if 'ro' in v and 'mode' in v:
|
if 'ro' in v and 'mode' in v:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Binding cannot contain both "ro" and "mode": {}'
|
f'Binding cannot contain both "ro" and "mode": {v!r}'
|
||||||
.format(repr(v))
|
|
||||||
)
|
)
|
||||||
|
|
||||||
bind = v['bind']
|
bind = v['bind']
|
||||||
|
|
@ -160,8 +159,8 @@ def convert_tmpfs_mounts(tmpfs):
|
||||||
|
|
||||||
if not isinstance(tmpfs, list):
|
if not isinstance(tmpfs, list):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'Expected tmpfs value to be either a list or a dict, found: {}'
|
'Expected tmpfs value to be either a list or a dict, '
|
||||||
.format(type(tmpfs).__name__)
|
f'found: {type(tmpfs).__name__}'
|
||||||
)
|
)
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
|
|
@ -175,8 +174,8 @@ def convert_tmpfs_mounts(tmpfs):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Expected item in tmpfs list to be a string, found: {}"
|
"Expected item in tmpfs list to be a string, "
|
||||||
.format(type(mount).__name__)
|
f"found: {type(mount).__name__}"
|
||||||
)
|
)
|
||||||
|
|
||||||
result[name] = options
|
result[name] = options
|
||||||
|
|
@ -218,9 +217,9 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||||
|
|
||||||
parsed_url = urlparse(addr)
|
parsed_url = urlparse(addr)
|
||||||
proto = parsed_url.scheme
|
proto = parsed_url.scheme
|
||||||
if not proto or any([x not in string.ascii_letters + '+' for x in proto]):
|
if not proto or any(x not in f"{string.ascii_letters}+" for x in proto):
|
||||||
# https://bugs.python.org/issue754016
|
# https://bugs.python.org/issue754016
|
||||||
parsed_url = urlparse('//' + addr, 'tcp')
|
parsed_url = urlparse(f"//{addr}", 'tcp')
|
||||||
proto = 'tcp'
|
proto = 'tcp'
|
||||||
|
|
||||||
if proto == 'fd':
|
if proto == 'fd':
|
||||||
|
|
@ -256,15 +255,14 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||||
|
|
||||||
if parsed_url.path and proto == 'ssh':
|
if parsed_url.path and proto == 'ssh':
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Invalid bind address format: no path allowed for this protocol:'
|
f'Invalid bind address format: no path allowed for this protocol: {addr}'
|
||||||
' {}'.format(addr)
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
path = parsed_url.path
|
path = parsed_url.path
|
||||||
if proto == 'unix' and parsed_url.hostname is not None:
|
if proto == 'unix' and parsed_url.hostname is not None:
|
||||||
# For legacy reasons, we consider unix://path
|
# For legacy reasons, we consider unix://path
|
||||||
# to be valid and equivalent to unix:///path
|
# to be valid and equivalent to unix:///path
|
||||||
path = '/'.join((parsed_url.hostname, path))
|
path = f"{parsed_url.hostname}/{path}"
|
||||||
|
|
||||||
netloc = parsed_url.netloc
|
netloc = parsed_url.netloc
|
||||||
if proto in ('tcp', 'ssh'):
|
if proto in ('tcp', 'ssh'):
|
||||||
|
|
@ -272,8 +270,7 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||||
if port <= 0:
|
if port <= 0:
|
||||||
if proto != 'ssh':
|
if proto != 'ssh':
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Invalid bind address format: port is required:'
|
f'Invalid bind address format: port is required: {addr}'
|
||||||
' {}'.format(addr)
|
|
||||||
)
|
)
|
||||||
port = 22
|
port = 22
|
||||||
netloc = f'{parsed_url.netloc}:{port}'
|
netloc = f'{parsed_url.netloc}:{port}'
|
||||||
|
|
@ -283,7 +280,7 @@ def parse_host(addr, is_win32=False, tls=False):
|
||||||
|
|
||||||
# Rewrite schemes to fit library internals (requests adapters)
|
# Rewrite schemes to fit library internals (requests adapters)
|
||||||
if proto == 'tcp':
|
if proto == 'tcp':
|
||||||
proto = 'http{}'.format('s' if tls else '')
|
proto = f"http{'s' if tls else ''}"
|
||||||
elif proto == 'unix':
|
elif proto == 'unix':
|
||||||
proto = 'http+unix'
|
proto = 'http+unix'
|
||||||
|
|
||||||
|
|
@ -417,19 +414,18 @@ def parse_bytes(s):
|
||||||
if suffix in units.keys() or suffix.isdigit():
|
if suffix in units.keys() or suffix.isdigit():
|
||||||
try:
|
try:
|
||||||
digits = float(digits_part)
|
digits = float(digits_part)
|
||||||
except ValueError:
|
except ValueError as ve:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Failed converting the string value for memory ({}) to'
|
'Failed converting the string value for memory '
|
||||||
' an integer.'.format(digits_part)
|
f'({digits_part}) to an integer.'
|
||||||
)
|
) from ve
|
||||||
|
|
||||||
# Reconvert to long for the final result
|
# Reconvert to long for the final result
|
||||||
s = int(digits * units[suffix])
|
s = int(digits * units[suffix])
|
||||||
else:
|
else:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'The specified value for memory ({}) should specify the'
|
f'The specified value for memory ({s}) should specify the units. '
|
||||||
' units. The postfix should be one of the `b` `k` `m` `g`'
|
'The postfix should be one of the `b` `k` `m` `g` characters'
|
||||||
' characters'.format(s)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return s
|
return s
|
||||||
|
|
@ -465,8 +461,7 @@ def parse_env_file(env_file):
|
||||||
environment[k] = v
|
environment[k] = v
|
||||||
else:
|
else:
|
||||||
raise errors.DockerException(
|
raise errors.DockerException(
|
||||||
'Invalid line in environment file {}:\n{}'.format(
|
f'Invalid line in environment file {env_file}:\n{line}')
|
||||||
env_file, line))
|
|
||||||
|
|
||||||
return environment
|
return environment
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from importlib.metadata import version
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('..'))
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -56,7 +57,7 @@ master_doc = 'index'
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = 'Docker SDK for Python'
|
project = 'Docker SDK for Python'
|
||||||
year = datetime.datetime.now().year
|
year = datetime.datetime.now().year
|
||||||
copyright = '%d Docker Inc' % year
|
copyright = f'{year} Docker Inc'
|
||||||
author = 'Docker Inc'
|
author = 'Docker Inc'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
|
@ -64,7 +65,6 @@ author = 'Docker Inc'
|
||||||
# built documents.
|
# built documents.
|
||||||
#
|
#
|
||||||
# see https://github.com/pypa/setuptools_scm#usage-from-sphinx
|
# see https://github.com/pypa/setuptools_scm#usage-from-sphinx
|
||||||
from importlib.metadata import version
|
|
||||||
release = version('docker')
|
release = version('docker')
|
||||||
# for example take major/minor
|
# for example take major/minor
|
||||||
version = '.'.join(release.split('.')[:2])
|
version = '.'.join(release.split('.')[:2])
|
||||||
|
|
|
||||||
|
|
@ -3,3 +3,18 @@ requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
[tool.setuptools_scm]
|
||||||
write_to = 'docker/_version.py'
|
write_to = 'docker/_version.py'
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
target-version = "py37"
|
||||||
|
extend-select = [
|
||||||
|
"B",
|
||||||
|
"C",
|
||||||
|
"F",
|
||||||
|
"W",
|
||||||
|
]
|
||||||
|
ignore = [
|
||||||
|
"C901", # too complex (there's a whole bunch of these)
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.per-file-ignores]
|
||||||
|
"**/__init__.py" = ["F401"]
|
||||||
|
|
|
||||||
4
setup.py
4
setup.py
|
|
@ -30,7 +30,7 @@ extras_require = {
|
||||||
}
|
}
|
||||||
|
|
||||||
with open('./test-requirements.txt') as test_reqs_txt:
|
with open('./test-requirements.txt') as test_reqs_txt:
|
||||||
test_requirements = [line for line in test_reqs_txt]
|
test_requirements = list(test_reqs_txt)
|
||||||
|
|
||||||
|
|
||||||
long_description = ''
|
long_description = ''
|
||||||
|
|
@ -48,7 +48,7 @@ setup(
|
||||||
url='https://github.com/docker/docker-py',
|
url='https://github.com/docker/docker-py',
|
||||||
project_urls={
|
project_urls={
|
||||||
'Documentation': 'https://docker-py.readthedocs.io',
|
'Documentation': 'https://docker-py.readthedocs.io',
|
||||||
'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', # noqa: E501
|
'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html',
|
||||||
'Source': 'https://github.com/docker/docker-py',
|
'Source': 'https://github.com/docker/docker-py',
|
||||||
'Tracker': 'https://github.com/docker/docker-py/issues',
|
'Tracker': 'https://github.com/docker/docker-py/issues',
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
setuptools==65.5.1
|
setuptools==65.5.1
|
||||||
coverage==6.4.2
|
coverage==6.4.2
|
||||||
flake8==4.0.1
|
ruff==0.0.265
|
||||||
pytest==7.1.2
|
pytest==7.1.2
|
||||||
pytest-cov==3.0.0
|
pytest-cov==3.0.0
|
||||||
pytest-timeout==2.1.0
|
pytest-timeout==2.1.0
|
||||||
|
|
|
||||||
|
|
@ -80,7 +80,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40):
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
while not condition():
|
while not condition():
|
||||||
if time.time() - start_time > timeout:
|
if time.time() - start_time > timeout:
|
||||||
raise AssertionError("Timeout: %s" % condition)
|
raise AssertionError(f"Timeout: {condition}")
|
||||||
time.sleep(delay)
|
time.sleep(delay)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -132,7 +132,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
path=base_dir,
|
path=base_dir,
|
||||||
tag=tag,
|
tag=tag,
|
||||||
)
|
)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
|
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
|
||||||
|
|
@ -142,7 +142,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
|
||||||
logs = logs.decode('utf-8')
|
logs = logs.decode('utf-8')
|
||||||
|
|
||||||
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
|
assert sorted(filter(None, logs.split('\n'))) == sorted([
|
||||||
'/test/#file.txt',
|
'/test/#file.txt',
|
||||||
'/test/ignored/subdir/excepted-with-spaces',
|
'/test/ignored/subdir/excepted-with-spaces',
|
||||||
'/test/ignored/subdir/excepted-file',
|
'/test/ignored/subdir/excepted-file',
|
||||||
|
|
@ -160,7 +160,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
|
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('buildargs')
|
self.tmp_imgs.append('buildargs')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('buildargs')
|
info = self.client.inspect_image('buildargs')
|
||||||
|
|
@ -180,7 +180,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag=tag, shmsize=shmsize
|
fileobj=script, tag=tag, shmsize=shmsize
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append(tag)
|
self.tmp_imgs.append(tag)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# There is currently no way to get the shmsize
|
# There is currently no way to get the shmsize
|
||||||
|
|
@ -198,7 +198,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
isolation='default'
|
isolation='default'
|
||||||
)
|
)
|
||||||
|
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@requires_api_version('1.23')
|
@requires_api_version('1.23')
|
||||||
|
|
@ -213,7 +213,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag='labels', labels=labels
|
fileobj=script, tag='labels', labels=labels
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('labels')
|
self.tmp_imgs.append('labels')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('labels')
|
info = self.client.inspect_image('labels')
|
||||||
|
|
@ -230,7 +230,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
|
||||||
stream = self.client.build(fileobj=script, tag='build1')
|
stream = self.client.build(fileobj=script, tag='build1')
|
||||||
self.tmp_imgs.append('build1')
|
self.tmp_imgs.append('build1')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
stream = self.client.build(
|
stream = self.client.build(
|
||||||
|
|
@ -271,7 +271,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, target='first', tag='build1'
|
fileobj=script, target='first', tag='build1'
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('build1')
|
self.tmp_imgs.append('build1')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('build1')
|
info = self.client.inspect_image('build1')
|
||||||
|
|
@ -300,7 +300,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tmp_imgs.append('dockerpytest_customnetbuild')
|
self.tmp_imgs.append('dockerpytest_customnetbuild')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
assert self.client.inspect_image('dockerpytest_customnetbuild')
|
assert self.client.inspect_image('dockerpytest_customnetbuild')
|
||||||
|
|
@ -312,7 +312,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tmp_imgs.append('dockerpytest_nonebuild')
|
self.tmp_imgs.append('dockerpytest_nonebuild')
|
||||||
logs = [chunk for chunk in stream]
|
logs = list(stream)
|
||||||
assert 'errorDetail' in logs[-1]
|
assert 'errorDetail' in logs[-1]
|
||||||
assert logs[-1]['errorDetail']['code'] == 1
|
assert logs[-1]['errorDetail']['code'] == 1
|
||||||
|
|
||||||
|
|
@ -365,7 +365,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag=tag, squash=squash
|
fileobj=script, tag=tag, squash=squash
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append(tag)
|
self.tmp_imgs.append(tag)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return self.client.inspect_image(tag)
|
return self.client.inspect_image(tag)
|
||||||
|
|
@ -392,7 +392,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
expected = '{0}{2}\n{1}'.format(
|
expected = '{0}{2}\n{1}'.format(
|
||||||
control_chars[0], control_chars[1], snippet
|
control_chars[0], control_chars[1], snippet
|
||||||
)
|
)
|
||||||
assert any([line == expected for line in lines])
|
assert any(line == expected for line in lines)
|
||||||
|
|
||||||
def test_build_gzip_encoding(self):
|
def test_build_gzip_encoding(self):
|
||||||
base_dir = tempfile.mkdtemp()
|
base_dir = tempfile.mkdtemp()
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ class ConnectionTimeoutTest(unittest.TestCase):
|
||||||
# This call isn't supposed to complete, and it should fail fast.
|
# This call isn't supposed to complete, and it should fail fast.
|
||||||
try:
|
try:
|
||||||
res = self.client.inspect_container('id')
|
res = self.client.inspect_container('id')
|
||||||
except: # noqa: E722
|
except Exception:
|
||||||
pass
|
pass
|
||||||
end = time.time()
|
end = time.time()
|
||||||
assert res is None
|
assert res is None
|
||||||
|
|
@ -72,6 +72,4 @@ class UnixconnTest(unittest.TestCase):
|
||||||
client.close()
|
client.close()
|
||||||
del client
|
del client
|
||||||
|
|
||||||
assert len(w) == 0, "No warnings produced: {}".format(
|
assert len(w) == 0, f"No warnings produced: {w[0].message}"
|
||||||
w[0].message
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -666,9 +666,7 @@ class ArchiveTest(BaseAPIIntegrationTest):
|
||||||
test_file.seek(0)
|
test_file.seek(0)
|
||||||
ctnr = self.client.create_container(
|
ctnr = self.client.create_container(
|
||||||
TEST_IMG,
|
TEST_IMG,
|
||||||
'cat {}'.format(
|
f"cat {os.path.join('/vol1/', os.path.basename(test_file.name))}",
|
||||||
os.path.join('/vol1/', os.path.basename(test_file.name))
|
|
||||||
),
|
|
||||||
volumes=['/vol1']
|
volumes=['/vol1']
|
||||||
)
|
)
|
||||||
self.tmp_containers.append(ctnr)
|
self.tmp_containers.append(ctnr)
|
||||||
|
|
@ -826,7 +824,7 @@ class LogsTest(BaseAPIIntegrationTest):
|
||||||
exitcode = self.client.wait(id)['StatusCode']
|
exitcode = self.client.wait(id)['StatusCode']
|
||||||
assert exitcode == 0
|
assert exitcode == 0
|
||||||
logs = self.client.logs(id)
|
logs = self.client.logs(id)
|
||||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||||
|
|
||||||
def test_logs_tail_option(self):
|
def test_logs_tail_option(self):
|
||||||
snippet = '''Line1
|
snippet = '''Line1
|
||||||
|
|
@ -857,7 +855,7 @@ Line2'''
|
||||||
exitcode = self.client.wait(id)['StatusCode']
|
exitcode = self.client.wait(id)['StatusCode']
|
||||||
assert exitcode == 0
|
assert exitcode == 0
|
||||||
|
|
||||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||||
|
|
||||||
@pytest.mark.timeout(5)
|
@pytest.mark.timeout(5)
|
||||||
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
|
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
|
||||||
|
|
@ -878,7 +876,7 @@ Line2'''
|
||||||
for chunk in generator:
|
for chunk in generator:
|
||||||
logs += chunk
|
logs += chunk
|
||||||
|
|
||||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||||
|
|
||||||
def test_logs_with_dict_instead_of_id(self):
|
def test_logs_with_dict_instead_of_id(self):
|
||||||
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
||||||
|
|
@ -891,7 +889,7 @@ Line2'''
|
||||||
exitcode = self.client.wait(id)['StatusCode']
|
exitcode = self.client.wait(id)['StatusCode']
|
||||||
assert exitcode == 0
|
assert exitcode == 0
|
||||||
logs = self.client.logs(container)
|
logs = self.client.logs(container)
|
||||||
assert logs == (snippet + '\n').encode(encoding='ascii')
|
assert logs == f"{snippet}\n".encode(encoding='ascii')
|
||||||
|
|
||||||
def test_logs_with_tail_0(self):
|
def test_logs_with_tail_0(self):
|
||||||
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
snippet = 'Flowering Nights (Sakuya Iyazoi)'
|
||||||
|
|
@ -920,7 +918,7 @@ Line2'''
|
||||||
logs_until_1 = self.client.logs(container, until=1)
|
logs_until_1 = self.client.logs(container, until=1)
|
||||||
assert logs_until_1 == b''
|
assert logs_until_1 == b''
|
||||||
logs_until_now = self.client.logs(container, datetime.now())
|
logs_until_now = self.client.logs(container, datetime.now())
|
||||||
assert logs_until_now == (snippet + '\n').encode(encoding='ascii')
|
assert logs_until_now == f"{snippet}\n".encode(encoding='ascii')
|
||||||
|
|
||||||
|
|
||||||
class DiffTest(BaseAPIIntegrationTest):
|
class DiffTest(BaseAPIIntegrationTest):
|
||||||
|
|
@ -1086,7 +1084,7 @@ class PortTest(BaseAPIIntegrationTest):
|
||||||
|
|
||||||
ip, host_port = port_binding['HostIp'], port_binding['HostPort']
|
ip, host_port = port_binding['HostIp'], port_binding['HostPort']
|
||||||
|
|
||||||
port_binding = port if not protocol else port + "/" + protocol
|
port_binding = port if not protocol else f"{port}/{protocol}"
|
||||||
assert ip == port_bindings[port_binding][0]
|
assert ip == port_bindings[port_binding][0]
|
||||||
assert host_port == port_bindings[port_binding][1]
|
assert host_port == port_bindings[port_binding][1]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
||||||
@helpers.requires_api_version('1.24')
|
@helpers.requires_api_version('1.24')
|
||||||
def test_healthcheck_shell_command(self):
|
def test_healthcheck_shell_command(self):
|
||||||
container = self.client.create_container(
|
container = self.client.create_container(
|
||||||
TEST_IMG, 'top', healthcheck=dict(test='echo "hello world"'))
|
TEST_IMG, 'top', healthcheck={'test': 'echo "hello world"'})
|
||||||
self.tmp_containers.append(container)
|
self.tmp_containers.append(container)
|
||||||
|
|
||||||
res = self.client.inspect_container(container)
|
res = self.client.inspect_container(container)
|
||||||
|
|
@ -27,12 +27,12 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
||||||
@helpers.requires_api_version('1.24')
|
@helpers.requires_api_version('1.24')
|
||||||
def test_healthcheck_passes(self):
|
def test_healthcheck_passes(self):
|
||||||
container = self.client.create_container(
|
container = self.client.create_container(
|
||||||
TEST_IMG, 'top', healthcheck=dict(
|
TEST_IMG, 'top', healthcheck={
|
||||||
test="true",
|
'test': "true",
|
||||||
interval=1 * SECOND,
|
'interval': 1 * SECOND,
|
||||||
timeout=1 * SECOND,
|
'timeout': 1 * SECOND,
|
||||||
retries=1,
|
'retries': 1,
|
||||||
))
|
})
|
||||||
self.tmp_containers.append(container)
|
self.tmp_containers.append(container)
|
||||||
self.client.start(container)
|
self.client.start(container)
|
||||||
wait_on_health_status(self.client, container, "healthy")
|
wait_on_health_status(self.client, container, "healthy")
|
||||||
|
|
@ -40,12 +40,12 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
||||||
@helpers.requires_api_version('1.24')
|
@helpers.requires_api_version('1.24')
|
||||||
def test_healthcheck_fails(self):
|
def test_healthcheck_fails(self):
|
||||||
container = self.client.create_container(
|
container = self.client.create_container(
|
||||||
TEST_IMG, 'top', healthcheck=dict(
|
TEST_IMG, 'top', healthcheck={
|
||||||
test="false",
|
'test': "false",
|
||||||
interval=1 * SECOND,
|
'interval': 1 * SECOND,
|
||||||
timeout=1 * SECOND,
|
'timeout': 1 * SECOND,
|
||||||
retries=1,
|
'retries': 1,
|
||||||
))
|
})
|
||||||
self.tmp_containers.append(container)
|
self.tmp_containers.append(container)
|
||||||
self.client.start(container)
|
self.client.start(container)
|
||||||
wait_on_health_status(self.client, container, "unhealthy")
|
wait_on_health_status(self.client, container, "unhealthy")
|
||||||
|
|
@ -53,14 +53,14 @@ class HealthcheckTest(BaseAPIIntegrationTest):
|
||||||
@helpers.requires_api_version('1.29')
|
@helpers.requires_api_version('1.29')
|
||||||
def test_healthcheck_start_period(self):
|
def test_healthcheck_start_period(self):
|
||||||
container = self.client.create_container(
|
container = self.client.create_container(
|
||||||
TEST_IMG, 'top', healthcheck=dict(
|
TEST_IMG, 'top', healthcheck={
|
||||||
test="echo 'x' >> /counter.txt && "
|
'test': "echo 'x' >> /counter.txt && "
|
||||||
"test `cat /counter.txt | wc -l` -ge 3",
|
"test `cat /counter.txt | wc -l` -ge 3",
|
||||||
interval=1 * SECOND,
|
'interval': 1 * SECOND,
|
||||||
timeout=1 * SECOND,
|
'timeout': 1 * SECOND,
|
||||||
retries=1,
|
'retries': 1,
|
||||||
start_period=3 * SECOND
|
'start_period': 3 * SECOND
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tmp_containers.append(container)
|
self.tmp_containers.append(container)
|
||||||
|
|
|
||||||
|
|
@ -263,10 +263,8 @@ class ImportImageTest(BaseAPIIntegrationTest):
|
||||||
data = self.client.get_image(test_img)
|
data = self.client.get_image(test_img)
|
||||||
assert data
|
assert data
|
||||||
output = self.client.load_image(data)
|
output = self.client.load_image(data)
|
||||||
assert any([
|
assert any(line for line in output
|
||||||
line for line in output
|
if f'Loaded image: {test_img}' in line.get('stream', ''))
|
||||||
if f'Loaded image: {test_img}' in line.get('stream', '')
|
|
||||||
])
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def temporary_http_file_server(self, stream):
|
def temporary_http_file_server(self, stream):
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
||||||
return self.client.inspect_plugin(plugin_name)
|
return self.client.inspect_plugin(plugin_name)
|
||||||
except docker.errors.NotFound:
|
except docker.errors.NotFound:
|
||||||
prv = self.client.plugin_privileges(plugin_name)
|
prv = self.client.plugin_privileges(plugin_name)
|
||||||
for d in self.client.pull_plugin(plugin_name, prv):
|
for _d in self.client.pull_plugin(plugin_name, prv):
|
||||||
pass
|
pass
|
||||||
return self.client.inspect_plugin(plugin_name)
|
return self.client.inspect_plugin(plugin_name)
|
||||||
|
|
||||||
|
|
@ -118,7 +118,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
prv = self.client.plugin_privileges(SSHFS)
|
prv = self.client.plugin_privileges(SSHFS)
|
||||||
logs = [d for d in self.client.pull_plugin(SSHFS, prv)]
|
logs = list(self.client.pull_plugin(SSHFS, prv))
|
||||||
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
||||||
assert self.client.inspect_plugin(SSHFS)
|
assert self.client.inspect_plugin(SSHFS)
|
||||||
assert self.client.enable_plugin(SSHFS)
|
assert self.client.enable_plugin(SSHFS)
|
||||||
|
|
@ -128,7 +128,7 @@ class PluginTest(BaseAPIIntegrationTest):
|
||||||
pl_data = self.ensure_plugin_installed(SSHFS)
|
pl_data = self.ensure_plugin_installed(SSHFS)
|
||||||
assert pl_data['Enabled'] is False
|
assert pl_data['Enabled'] is False
|
||||||
prv = self.client.plugin_privileges(SSHFS)
|
prv = self.client.plugin_privileges(SSHFS)
|
||||||
logs = [d for d in self.client.upgrade_plugin(SSHFS, SSHFS, prv)]
|
logs = list(self.client.upgrade_plugin(SSHFS, SSHFS, prv))
|
||||||
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
assert filter(lambda x: x['status'] == 'Download complete', logs)
|
||||||
assert self.client.inspect_plugin(SSHFS)
|
assert self.client.inspect_plugin(SSHFS)
|
||||||
assert self.client.enable_plugin(SSHFS)
|
assert self.client.enable_plugin(SSHFS)
|
||||||
|
|
|
||||||
|
|
@ -127,11 +127,11 @@ class SwarmTest(BaseAPIIntegrationTest):
|
||||||
assert self.init_swarm()
|
assert self.init_swarm()
|
||||||
with pytest.raises(docker.errors.APIError) as exc_info:
|
with pytest.raises(docker.errors.APIError) as exc_info:
|
||||||
self.client.leave_swarm()
|
self.client.leave_swarm()
|
||||||
exc_info.value.response.status_code == 500
|
assert exc_info.value.response.status_code == 503
|
||||||
assert self.client.leave_swarm(force=True)
|
assert self.client.leave_swarm(force=True)
|
||||||
with pytest.raises(docker.errors.APIError) as exc_info:
|
with pytest.raises(docker.errors.APIError) as exc_info:
|
||||||
self.client.inspect_swarm()
|
self.client.inspect_swarm()
|
||||||
exc_info.value.response.status_code == 406
|
assert exc_info.value.response.status_code == 503
|
||||||
assert self.client.leave_swarm(force=True)
|
assert self.client.leave_swarm(force=True)
|
||||||
|
|
||||||
@requires_api_version('1.24')
|
@requires_api_version('1.24')
|
||||||
|
|
|
||||||
|
|
@ -103,8 +103,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
|
||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
output = self.client.logs(container)
|
output = self.client.logs(container)
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Container exited with code {}:\n{}"
|
f"Container exited with code {exitcode}:\n{output}")
|
||||||
.format(exitcode, output))
|
|
||||||
|
|
||||||
return container
|
return container
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -29,7 +29,7 @@ class ContextLifecycleTest(BaseAPIIntegrationTest):
|
||||||
"test", tls_cfg=docker_tls)
|
"test", tls_cfg=docker_tls)
|
||||||
|
|
||||||
# check for a context 'test' in the context store
|
# check for a context 'test' in the context store
|
||||||
assert any([ctx.Name == "test" for ctx in ContextAPI.contexts()])
|
assert any(ctx.Name == "test" for ctx in ContextAPI.contexts())
|
||||||
# retrieve a context object for 'test'
|
# retrieve a context object for 'test'
|
||||||
assert ContextAPI.get_context("test")
|
assert ContextAPI.get_context("test")
|
||||||
# remove context
|
# remove context
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ class TestStore:
|
||||||
def setup_method(self):
|
def setup_method(self):
|
||||||
self.tmp_keys = []
|
self.tmp_keys = []
|
||||||
if sys.platform.startswith('linux'):
|
if sys.platform.startswith('linux'):
|
||||||
if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE):
|
if shutil.which(f"docker-credential-{DEFAULT_LINUX_STORE}"):
|
||||||
self.store = Store(DEFAULT_LINUX_STORE)
|
self.store = Store(DEFAULT_LINUX_STORE)
|
||||||
elif shutil.which('docker-credential-pass'):
|
elif shutil.which('docker-credential-pass'):
|
||||||
self.store = Store('pass')
|
self.store = Store('pass')
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ from unittest import mock
|
||||||
@mock.patch.dict(os.environ)
|
@mock.patch.dict(os.environ)
|
||||||
def test_create_environment_dict():
|
def test_create_environment_dict():
|
||||||
base = {'FOO': 'bar', 'BAZ': 'foobar'}
|
base = {'FOO': 'bar', 'BAZ': 'foobar'}
|
||||||
os.environ = base
|
os.environ = base # noqa: B003
|
||||||
assert create_environment_dict({'FOO': 'baz'}) == {
|
assert create_environment_dict({'FOO': 'baz'}) == {
|
||||||
'FOO': 'baz', 'BAZ': 'foobar',
|
'FOO': 'baz', 'BAZ': 'foobar',
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -49,7 +49,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
|
|
||||||
container = client.containers.run(
|
container = client.containers.run(
|
||||||
"alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'",
|
"alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'",
|
||||||
volumes=["%s:/insidecontainer" % path],
|
volumes=[f"{path}:/insidecontainer"],
|
||||||
detach=True
|
detach=True
|
||||||
)
|
)
|
||||||
self.tmp_containers.append(container.id)
|
self.tmp_containers.append(container.id)
|
||||||
|
|
@ -58,7 +58,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
name = "container_volume_test"
|
name = "container_volume_test"
|
||||||
out = client.containers.run(
|
out = client.containers.run(
|
||||||
"alpine", "cat /insidecontainer/test",
|
"alpine", "cat /insidecontainer/test",
|
||||||
volumes=["%s:/insidecontainer" % path],
|
volumes=[f"{path}:/insidecontainer"],
|
||||||
name=name
|
name=name
|
||||||
)
|
)
|
||||||
self.tmp_containers.append(name)
|
self.tmp_containers.append(name)
|
||||||
|
|
@ -109,7 +109,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
|
|
||||||
out = client.containers.run(
|
out = client.containers.run(
|
||||||
"alpine", "echo hello",
|
"alpine", "echo hello",
|
||||||
log_config=dict(type='none')
|
log_config={"type": 'none'}
|
||||||
)
|
)
|
||||||
assert out is None
|
assert out is None
|
||||||
|
|
||||||
|
|
@ -118,7 +118,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
|
|
||||||
out = client.containers.run(
|
out = client.containers.run(
|
||||||
"alpine", "echo hello",
|
"alpine", "echo hello",
|
||||||
log_config=dict(type='json-file')
|
log_config={"type": 'json-file'}
|
||||||
)
|
)
|
||||||
assert out == b'hello\n'
|
assert out == b'hello\n'
|
||||||
|
|
||||||
|
|
@ -150,7 +150,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
out = client.containers.run(
|
out = client.containers.run(
|
||||||
'alpine', 'sh -c "echo hello && echo world"', stream=True
|
'alpine', 'sh -c "echo hello && echo world"', stream=True
|
||||||
)
|
)
|
||||||
logs = [line for line in out]
|
logs = list(out)
|
||||||
assert logs[0] == b'hello\n'
|
assert logs[0] == b'hello\n'
|
||||||
assert logs[1] == b'world\n'
|
assert logs[1] == b'world\n'
|
||||||
|
|
||||||
|
|
@ -165,7 +165,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
|
|
||||||
threading.Timer(1, out.close).start()
|
threading.Timer(1, out.close).start()
|
||||||
|
|
||||||
logs = [line for line in out]
|
logs = list(out)
|
||||||
|
|
||||||
assert len(logs) == 2
|
assert len(logs) == 2
|
||||||
assert logs[0] == b'hello\n'
|
assert logs[0] == b'hello\n'
|
||||||
|
|
@ -221,7 +221,7 @@ class ContainerCollectionTest(BaseIntegrationTest):
|
||||||
assert container.status == 'running'
|
assert container.status == 'running'
|
||||||
assert container.image == client.images.get('alpine')
|
assert container.image == client.images.get('alpine')
|
||||||
with pytest.raises(docker.errors.DockerException):
|
with pytest.raises(docker.errors.DockerException):
|
||||||
container.labels
|
_ = container.labels
|
||||||
|
|
||||||
container.kill()
|
container.kill()
|
||||||
container.remove()
|
container.remove()
|
||||||
|
|
|
||||||
|
|
@ -88,9 +88,7 @@ class ImageCollectionTest(BaseIntegrationTest):
|
||||||
client = docker.from_env(version=TEST_API_VERSION)
|
client = docker.from_env(version=TEST_API_VERSION)
|
||||||
images = client.images.pull('hello-world', all_tags=True)
|
images = client.images.pull('hello-world', all_tags=True)
|
||||||
assert len(images) >= 1
|
assert len(images) >= 1
|
||||||
assert any([
|
assert any('hello-world:latest' in img.attrs['RepoTags'] for img in images)
|
||||||
'hello-world:latest' in img.attrs['RepoTags'] for img in images
|
|
||||||
])
|
|
||||||
|
|
||||||
def test_load_error(self):
|
def test_load_error(self):
|
||||||
client = docker.from_env(version=TEST_API_VERSION)
|
client = docker.from_env(version=TEST_API_VERSION)
|
||||||
|
|
|
||||||
|
|
@ -59,11 +59,11 @@ class NetworkTest(BaseIntegrationTest):
|
||||||
network.connect(container)
|
network.connect(container)
|
||||||
container.start()
|
container.start()
|
||||||
assert client.networks.get(network.id).containers == [container]
|
assert client.networks.get(network.id).containers == [container]
|
||||||
network_containers = list(
|
network_containers = [
|
||||||
c
|
c
|
||||||
for net in client.networks.list(ids=[network.id], greedy=True)
|
for net in client.networks.list(ids=[network.id], greedy=True)
|
||||||
for c in net.containers
|
for c in net.containers
|
||||||
)
|
]
|
||||||
assert network_containers == [container]
|
assert network_containers == [container]
|
||||||
network.disconnect(container)
|
network.disconnect(container)
|
||||||
assert network.containers == []
|
assert network.containers == []
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ class TestRegressions(BaseAPIIntegrationTest):
|
||||||
def test_443_handle_nonchunked_response_in_stream(self):
|
def test_443_handle_nonchunked_response_in_stream(self):
|
||||||
dfile = io.BytesIO()
|
dfile = io.BytesIO()
|
||||||
with pytest.raises(docker.errors.APIError) as exc:
|
with pytest.raises(docker.errors.APIError) as exc:
|
||||||
for line in self.client.build(fileobj=dfile, tag="a/b/c"):
|
for _line in self.client.build(fileobj=dfile, tag="a/b/c"):
|
||||||
pass
|
pass
|
||||||
assert exc.value.is_error()
|
assert exc.value.is_error()
|
||||||
dfile.close()
|
dfile.close()
|
||||||
|
|
|
||||||
|
|
@ -124,7 +124,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
path=base_dir,
|
path=base_dir,
|
||||||
tag=tag,
|
tag=tag,
|
||||||
)
|
)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
|
c = self.client.create_container(tag, ['find', '/test', '-type', 'f'])
|
||||||
|
|
@ -134,7 +134,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
|
||||||
logs = logs.decode('utf-8')
|
logs = logs.decode('utf-8')
|
||||||
|
|
||||||
assert sorted(list(filter(None, logs.split('\n')))) == sorted([
|
assert sorted(filter(None, logs.split('\n'))) == sorted([
|
||||||
'/test/#file.txt',
|
'/test/#file.txt',
|
||||||
'/test/ignored/subdir/excepted-file',
|
'/test/ignored/subdir/excepted-file',
|
||||||
'/test/not-ignored'
|
'/test/not-ignored'
|
||||||
|
|
@ -151,7 +151,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
|
fileobj=script, tag='buildargs', buildargs={'test': 'OK'}
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('buildargs')
|
self.tmp_imgs.append('buildargs')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('buildargs')
|
info = self.client.inspect_image('buildargs')
|
||||||
|
|
@ -171,7 +171,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag=tag, shmsize=shmsize
|
fileobj=script, tag=tag, shmsize=shmsize
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append(tag)
|
self.tmp_imgs.append(tag)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# There is currently no way to get the shmsize
|
# There is currently no way to get the shmsize
|
||||||
|
|
@ -189,7 +189,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
isolation='default'
|
isolation='default'
|
||||||
)
|
)
|
||||||
|
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@requires_api_version('1.23')
|
@requires_api_version('1.23')
|
||||||
|
|
@ -204,7 +204,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag='labels', labels=labels
|
fileobj=script, tag='labels', labels=labels
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('labels')
|
self.tmp_imgs.append('labels')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('labels')
|
info = self.client.inspect_image('labels')
|
||||||
|
|
@ -221,7 +221,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
|
|
||||||
stream = self.client.build(fileobj=script, tag='build1')
|
stream = self.client.build(fileobj=script, tag='build1')
|
||||||
self.tmp_imgs.append('build1')
|
self.tmp_imgs.append('build1')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
stream = self.client.build(
|
stream = self.client.build(
|
||||||
|
|
@ -262,7 +262,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, target='first', tag='build1'
|
fileobj=script, target='first', tag='build1'
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append('build1')
|
self.tmp_imgs.append('build1')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
info = self.client.inspect_image('build1')
|
info = self.client.inspect_image('build1')
|
||||||
|
|
@ -291,7 +291,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tmp_imgs.append('dockerpytest_customnetbuild')
|
self.tmp_imgs.append('dockerpytest_customnetbuild')
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
assert self.client.inspect_image('dockerpytest_customnetbuild')
|
assert self.client.inspect_image('dockerpytest_customnetbuild')
|
||||||
|
|
@ -303,7 +303,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.tmp_imgs.append('dockerpytest_nonebuild')
|
self.tmp_imgs.append('dockerpytest_nonebuild')
|
||||||
logs = [chunk for chunk in stream]
|
logs = list(stream)
|
||||||
assert 'errorDetail' in logs[-1]
|
assert 'errorDetail' in logs[-1]
|
||||||
assert logs[-1]['errorDetail']['code'] == 1
|
assert logs[-1]['errorDetail']['code'] == 1
|
||||||
|
|
||||||
|
|
@ -356,7 +356,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
fileobj=script, tag=tag, squash=squash
|
fileobj=script, tag=tag, squash=squash
|
||||||
)
|
)
|
||||||
self.tmp_imgs.append(tag)
|
self.tmp_imgs.append(tag)
|
||||||
for chunk in stream:
|
for _chunk in stream:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return self.client.inspect_image(tag)
|
return self.client.inspect_image(tag)
|
||||||
|
|
@ -383,7 +383,7 @@ class BuildTest(BaseAPIIntegrationTest):
|
||||||
expected = '{0}{2}\n{1}'.format(
|
expected = '{0}{2}\n{1}'.format(
|
||||||
control_chars[0], control_chars[1], snippet
|
control_chars[0], control_chars[1], snippet
|
||||||
)
|
)
|
||||||
assert any([line == expected for line in lines])
|
assert any(line == expected for line in lines)
|
||||||
|
|
||||||
def test_build_gzip_encoding(self):
|
def test_build_gzip_encoding(self):
|
||||||
base_dir = tempfile.mkdtemp()
|
base_dir = tempfile.mkdtemp()
|
||||||
|
|
|
||||||
|
|
@ -110,8 +110,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest):
|
||||||
if exitcode != 0:
|
if exitcode != 0:
|
||||||
output = self.client.logs(container)
|
output = self.client.logs(container)
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Container exited with code {}:\n{}"
|
f"Container exited with code {exitcode}:\n{output}")
|
||||||
.format(exitcode, output))
|
|
||||||
|
|
||||||
return container
|
return container
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -89,7 +89,7 @@ class BuildTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'build',
|
f"{url_prefix}build",
|
||||||
stream=True,
|
stream=True,
|
||||||
data=None,
|
data=None,
|
||||||
headers=expected_headers,
|
headers=expected_headers,
|
||||||
|
|
@ -193,10 +193,10 @@ class BuildTest(BaseAPIClientTest):
|
||||||
'foo/Dockerfile.foo', None
|
'foo/Dockerfile.foo', None
|
||||||
)
|
)
|
||||||
assert process_dockerfile(
|
assert process_dockerfile(
|
||||||
'../Dockerfile', pre(base + '\\foo')
|
'../Dockerfile', pre(f"{base}\\foo")
|
||||||
)[1] is not None
|
)[1] is not None
|
||||||
assert process_dockerfile(
|
assert process_dockerfile(
|
||||||
'../baz/Dockerfile.baz', pre(base + '/baz')
|
'../baz/Dockerfile.baz', pre(f"{base}/baz")
|
||||||
) == ('../baz/Dockerfile.baz', None)
|
) == ('../baz/Dockerfile.baz', None)
|
||||||
|
|
||||||
def test_process_dockerfile(self):
|
def test_process_dockerfile(self):
|
||||||
|
|
@ -218,8 +218,8 @@ class BuildTest(BaseAPIClientTest):
|
||||||
'foo/Dockerfile.foo', None
|
'foo/Dockerfile.foo', None
|
||||||
)
|
)
|
||||||
assert process_dockerfile(
|
assert process_dockerfile(
|
||||||
'../Dockerfile', base + '/foo'
|
'../Dockerfile', f"{base}/foo"
|
||||||
)[1] is not None
|
)[1] is not None
|
||||||
assert process_dockerfile('../baz/Dockerfile.baz', base + '/baz') == (
|
assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == (
|
||||||
'../baz/Dockerfile.baz', None
|
'../baz/Dockerfile.baz', None
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -32,9 +32,7 @@ class ExecTest(BaseAPIClientTest):
|
||||||
self.client.exec_start(fake_api.FAKE_EXEC_ID)
|
self.client.exec_start(fake_api.FAKE_EXEC_ID)
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == url_prefix + 'exec/{}/start'.format(
|
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
|
||||||
fake_api.FAKE_EXEC_ID
|
|
||||||
)
|
|
||||||
|
|
||||||
assert json.loads(args[1]['data']) == {
|
assert json.loads(args[1]['data']) == {
|
||||||
'Tty': False,
|
'Tty': False,
|
||||||
|
|
@ -51,9 +49,7 @@ class ExecTest(BaseAPIClientTest):
|
||||||
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
|
self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True)
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == url_prefix + 'exec/{}/start'.format(
|
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start"
|
||||||
fake_api.FAKE_EXEC_ID
|
|
||||||
)
|
|
||||||
|
|
||||||
assert json.loads(args[1]['data']) == {
|
assert json.loads(args[1]['data']) == {
|
||||||
'Tty': False,
|
'Tty': False,
|
||||||
|
|
@ -68,16 +64,14 @@ class ExecTest(BaseAPIClientTest):
|
||||||
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
|
self.client.exec_inspect(fake_api.FAKE_EXEC_ID)
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == url_prefix + 'exec/{}/json'.format(
|
assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/json"
|
||||||
fake_api.FAKE_EXEC_ID
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_exec_resize(self):
|
def test_exec_resize(self):
|
||||||
self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60)
|
self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60)
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize',
|
f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/resize",
|
||||||
params={'h': 20, 'w': 60},
|
params={'h': 20, 'w': 60},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ from .api_test import (
|
||||||
|
|
||||||
class ImageTest(BaseAPIClientTest):
|
class ImageTest(BaseAPIClientTest):
|
||||||
def test_image_viz(self):
|
def test_image_viz(self):
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception): # noqa: B017
|
||||||
self.client.images('busybox', viz=True)
|
self.client.images('busybox', viz=True)
|
||||||
self.fail('Viz output should not be supported!')
|
self.fail('Viz output should not be supported!')
|
||||||
|
|
||||||
|
|
@ -21,7 +21,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/json',
|
f"{url_prefix}images/json",
|
||||||
params={'only_ids': 0, 'all': 1},
|
params={'only_ids': 0, 'all': 1},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -31,7 +31,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/json',
|
f"{url_prefix}images/json",
|
||||||
params={'only_ids': 0, 'all': 0,
|
params={'only_ids': 0, 'all': 0,
|
||||||
'filters': '{"reference": ["foo:bar"]}'},
|
'filters': '{"reference": ["foo:bar"]}'},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
|
|
@ -42,7 +42,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/json',
|
f"{url_prefix}images/json",
|
||||||
params={'only_ids': 1, 'all': 1},
|
params={'only_ids': 1, 'all': 1},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -52,7 +52,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/json',
|
f"{url_prefix}images/json",
|
||||||
params={'only_ids': 1, 'all': 0},
|
params={'only_ids': 1, 'all': 0},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -62,7 +62,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/json',
|
f"{url_prefix}images/json",
|
||||||
params={'only_ids': 0, 'all': 0,
|
params={'only_ids': 0, 'all': 0,
|
||||||
'filters': '{"dangling": ["true"]}'},
|
'filters': '{"dangling": ["true"]}'},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
|
|
@ -72,7 +72,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
self.client.pull('joffrey/test001')
|
self.client.pull('joffrey/test001')
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == url_prefix + 'images/create'
|
assert args[0][1] == f"{url_prefix}images/create"
|
||||||
assert args[1]['params'] == {
|
assert args[1]['params'] == {
|
||||||
'tag': 'latest', 'fromImage': 'joffrey/test001'
|
'tag': 'latest', 'fromImage': 'joffrey/test001'
|
||||||
}
|
}
|
||||||
|
|
@ -82,7 +82,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
self.client.pull('joffrey/test001', stream=True)
|
self.client.pull('joffrey/test001', stream=True)
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == url_prefix + 'images/create'
|
assert args[0][1] == f"{url_prefix}images/create"
|
||||||
assert args[1]['params'] == {
|
assert args[1]['params'] == {
|
||||||
'tag': 'latest', 'fromImage': 'joffrey/test001'
|
'tag': 'latest', 'fromImage': 'joffrey/test001'
|
||||||
}
|
}
|
||||||
|
|
@ -93,7 +93,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'commit',
|
f"{url_prefix}commit",
|
||||||
data='{}',
|
data='{}',
|
||||||
headers={'Content-Type': 'application/json'},
|
headers={'Content-Type': 'application/json'},
|
||||||
params={
|
params={
|
||||||
|
|
@ -113,7 +113,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'DELETE',
|
'DELETE',
|
||||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID,
|
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}",
|
||||||
params={'force': False, 'noprune': False},
|
params={'force': False, 'noprune': False},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -123,7 +123,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/test_image/history',
|
f"{url_prefix}images/test_image/history",
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -136,7 +136,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/create',
|
f"{url_prefix}images/create",
|
||||||
params={
|
params={
|
||||||
'repo': fake_api.FAKE_REPO_NAME,
|
'repo': fake_api.FAKE_REPO_NAME,
|
||||||
'tag': fake_api.FAKE_TAG_NAME,
|
'tag': fake_api.FAKE_TAG_NAME,
|
||||||
|
|
@ -157,7 +157,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/create',
|
f"{url_prefix}images/create",
|
||||||
params={
|
params={
|
||||||
'repo': fake_api.FAKE_REPO_NAME,
|
'repo': fake_api.FAKE_REPO_NAME,
|
||||||
'tag': fake_api.FAKE_TAG_NAME,
|
'tag': fake_api.FAKE_TAG_NAME,
|
||||||
|
|
@ -179,7 +179,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/create',
|
f"{url_prefix}images/create",
|
||||||
params={
|
params={
|
||||||
'repo': fake_api.FAKE_REPO_NAME,
|
'repo': fake_api.FAKE_REPO_NAME,
|
||||||
'tag': fake_api.FAKE_TAG_NAME,
|
'tag': fake_api.FAKE_TAG_NAME,
|
||||||
|
|
@ -194,7 +194,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/test_image/json',
|
f"{url_prefix}images/test_image/json",
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -212,7 +212,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/test_image/push',
|
f"{url_prefix}images/test_image/push",
|
||||||
params={
|
params={
|
||||||
'tag': None
|
'tag': None
|
||||||
},
|
},
|
||||||
|
|
@ -231,7 +231,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/test_image/push',
|
f"{url_prefix}images/test_image/push",
|
||||||
params={
|
params={
|
||||||
'tag': fake_api.FAKE_TAG_NAME,
|
'tag': fake_api.FAKE_TAG_NAME,
|
||||||
},
|
},
|
||||||
|
|
@ -255,7 +255,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/test_image/push',
|
f"{url_prefix}images/test_image/push",
|
||||||
params={
|
params={
|
||||||
'tag': fake_api.FAKE_TAG_NAME,
|
'tag': fake_api.FAKE_TAG_NAME,
|
||||||
},
|
},
|
||||||
|
|
@ -273,7 +273,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/test_image/push',
|
f"{url_prefix}images/test_image/push",
|
||||||
params={
|
params={
|
||||||
'tag': None
|
'tag': None
|
||||||
},
|
},
|
||||||
|
|
@ -288,7 +288,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
|
||||||
params={
|
params={
|
||||||
'tag': None,
|
'tag': None,
|
||||||
'repo': 'repo',
|
'repo': 'repo',
|
||||||
|
|
@ -306,7 +306,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
|
||||||
params={
|
params={
|
||||||
'tag': 'tag',
|
'tag': 'tag',
|
||||||
'repo': 'repo',
|
'repo': 'repo',
|
||||||
|
|
@ -321,7 +321,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag',
|
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag",
|
||||||
params={
|
params={
|
||||||
'tag': None,
|
'tag': None,
|
||||||
'repo': 'repo',
|
'repo': 'repo',
|
||||||
|
|
@ -335,7 +335,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get',
|
f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/get",
|
||||||
stream=True,
|
stream=True,
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -345,7 +345,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/load',
|
f"{url_prefix}images/load",
|
||||||
data='Byte Stream....',
|
data='Byte Stream....',
|
||||||
stream=True,
|
stream=True,
|
||||||
params={},
|
params={},
|
||||||
|
|
@ -357,7 +357,7 @@ class ImageTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'POST',
|
'POST',
|
||||||
url_prefix + 'images/load',
|
f"{url_prefix}images/load",
|
||||||
data='Byte Stream....',
|
data='Byte Stream....',
|
||||||
stream=True,
|
stream=True,
|
||||||
params={'quiet': True},
|
params={'quiet': True},
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
with mock.patch('docker.api.client.APIClient.get', get):
|
with mock.patch('docker.api.client.APIClient.get', get):
|
||||||
assert self.client.networks() == networks
|
assert self.client.networks() == networks
|
||||||
|
|
||||||
assert get.call_args[0][0] == url_prefix + 'networks'
|
assert get.call_args[0][0] == f"{url_prefix}networks"
|
||||||
|
|
||||||
filters = json.loads(get.call_args[1]['params']['filters'])
|
filters = json.loads(get.call_args[1]['params']['filters'])
|
||||||
assert not filters
|
assert not filters
|
||||||
|
|
@ -54,7 +54,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
result = self.client.create_network('foo')
|
result = self.client.create_network('foo')
|
||||||
assert result == network_data
|
assert result == network_data
|
||||||
|
|
||||||
assert post.call_args[0][0] == url_prefix + 'networks/create'
|
assert post.call_args[0][0] == f"{url_prefix}networks/create"
|
||||||
|
|
||||||
assert json.loads(post.call_args[1]['data']) == {"Name": "foo"}
|
assert json.loads(post.call_args[1]['data']) == {"Name": "foo"}
|
||||||
|
|
||||||
|
|
@ -97,7 +97,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
self.client.remove_network(network_id)
|
self.client.remove_network(network_id)
|
||||||
|
|
||||||
args = delete.call_args
|
args = delete.call_args
|
||||||
assert args[0][0] == url_prefix + f'networks/{network_id}'
|
assert args[0][0] == f"{url_prefix}networks/{network_id}"
|
||||||
|
|
||||||
def test_inspect_network(self):
|
def test_inspect_network(self):
|
||||||
network_id = 'abc12345'
|
network_id = 'abc12345'
|
||||||
|
|
@ -117,7 +117,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
assert result == network_data
|
assert result == network_data
|
||||||
|
|
||||||
args = get.call_args
|
args = get.call_args
|
||||||
assert args[0][0] == url_prefix + f'networks/{network_id}'
|
assert args[0][0] == f"{url_prefix}networks/{network_id}"
|
||||||
|
|
||||||
def test_connect_container_to_network(self):
|
def test_connect_container_to_network(self):
|
||||||
network_id = 'abc12345'
|
network_id = 'abc12345'
|
||||||
|
|
@ -135,7 +135,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
)
|
)
|
||||||
|
|
||||||
assert post.call_args[0][0] == (
|
assert post.call_args[0][0] == (
|
||||||
url_prefix + f'networks/{network_id}/connect'
|
f"{url_prefix}networks/{network_id}/connect"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert json.loads(post.call_args[1]['data']) == {
|
assert json.loads(post.call_args[1]['data']) == {
|
||||||
|
|
@ -158,7 +158,7 @@ class NetworkTest(BaseAPIClientTest):
|
||||||
container={'Id': container_id}, net_id=network_id)
|
container={'Id': container_id}, net_id=network_id)
|
||||||
|
|
||||||
assert post.call_args[0][0] == (
|
assert post.call_args[0][0] == (
|
||||||
url_prefix + f'networks/{network_id}/disconnect'
|
f"{url_prefix}networks/{network_id}/disconnect"
|
||||||
)
|
)
|
||||||
assert json.loads(post.call_args[1]['data']) == {
|
assert json.loads(post.call_args[1]['data']) == {
|
||||||
'Container': container_id
|
'Container': container_id
|
||||||
|
|
|
||||||
|
|
@ -86,9 +86,7 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False):
|
||||||
|
|
||||||
|
|
||||||
url_base = f'{fake_api.prefix}/'
|
url_base = f'{fake_api.prefix}/'
|
||||||
url_prefix = '{}v{}/'.format(
|
url_prefix = f'{url_base}v{docker.constants.DEFAULT_DOCKER_API_VERSION}/'
|
||||||
url_base,
|
|
||||||
docker.constants.DEFAULT_DOCKER_API_VERSION)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseAPIClientTest(unittest.TestCase):
|
class BaseAPIClientTest(unittest.TestCase):
|
||||||
|
|
@ -130,22 +128,18 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
def test_url_valid_resource(self):
|
def test_url_valid_resource(self):
|
||||||
url = self.client._url('/hello/{0}/world', 'somename')
|
url = self.client._url('/hello/{0}/world', 'somename')
|
||||||
assert url == '{}{}'.format(url_prefix, 'hello/somename/world')
|
assert url == f"{url_prefix}hello/somename/world"
|
||||||
|
|
||||||
url = self.client._url(
|
url = self.client._url(
|
||||||
'/hello/{0}/world/{1}', 'somename', 'someothername'
|
'/hello/{0}/world/{1}', 'somename', 'someothername'
|
||||||
)
|
)
|
||||||
assert url == '{}{}'.format(
|
assert url == f"{url_prefix}hello/somename/world/someothername"
|
||||||
url_prefix, 'hello/somename/world/someothername'
|
|
||||||
)
|
|
||||||
|
|
||||||
url = self.client._url('/hello/{0}/world', 'some?name')
|
url = self.client._url('/hello/{0}/world', 'some?name')
|
||||||
assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world')
|
assert url == f"{url_prefix}hello/some%3Fname/world"
|
||||||
|
|
||||||
url = self.client._url("/images/{0}/push", "localhost:5000/image")
|
url = self.client._url("/images/{0}/push", "localhost:5000/image")
|
||||||
assert url == '{}{}'.format(
|
assert url == f"{url_prefix}images/localhost:5000/image/push"
|
||||||
url_prefix, 'images/localhost:5000/image/push'
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_url_invalid_resource(self):
|
def test_url_invalid_resource(self):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
|
|
@ -153,20 +147,20 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
def test_url_no_resource(self):
|
def test_url_no_resource(self):
|
||||||
url = self.client._url('/simple')
|
url = self.client._url('/simple')
|
||||||
assert url == '{}{}'.format(url_prefix, 'simple')
|
assert url == f"{url_prefix}simple"
|
||||||
|
|
||||||
def test_url_unversioned_api(self):
|
def test_url_unversioned_api(self):
|
||||||
url = self.client._url(
|
url = self.client._url(
|
||||||
'/hello/{0}/world', 'somename', versioned_api=False
|
'/hello/{0}/world', 'somename', versioned_api=False
|
||||||
)
|
)
|
||||||
assert url == '{}{}'.format(url_base, 'hello/somename/world')
|
assert url == f"{url_base}hello/somename/world"
|
||||||
|
|
||||||
def test_version(self):
|
def test_version(self):
|
||||||
self.client.version()
|
self.client.version()
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'version',
|
f"{url_prefix}version",
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -175,7 +169,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_base + 'version',
|
f"{url_base}version",
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -194,7 +188,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'info',
|
f"{url_prefix}info",
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -203,7 +197,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'images/search',
|
f"{url_prefix}images/search",
|
||||||
params={'term': 'busybox'},
|
params={'term': 'busybox'},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -212,7 +206,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
self.client.login('sakuya', 'izayoi')
|
self.client.login('sakuya', 'izayoi')
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][0] == 'POST'
|
assert args[0][0] == 'POST'
|
||||||
assert args[0][1] == url_prefix + 'auth'
|
assert args[0][1] == f"{url_prefix}auth"
|
||||||
assert json.loads(args[1]['data']) == {
|
assert json.loads(args[1]['data']) == {
|
||||||
'username': 'sakuya', 'password': 'izayoi'
|
'username': 'sakuya', 'password': 'izayoi'
|
||||||
}
|
}
|
||||||
|
|
@ -229,7 +223,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'events',
|
f"{url_prefix}events",
|
||||||
params={'since': None, 'until': None, 'filters': None},
|
params={'since': None, 'until': None, 'filters': None},
|
||||||
stream=True,
|
stream=True,
|
||||||
timeout=None
|
timeout=None
|
||||||
|
|
@ -245,7 +239,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'events',
|
f"{url_prefix}events",
|
||||||
params={
|
params={
|
||||||
'since': ts - 10,
|
'since': ts - 10,
|
||||||
'until': ts + 10,
|
'until': ts + 10,
|
||||||
|
|
@ -264,7 +258,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
expected_filters = docker.utils.convert_filters(filters)
|
expected_filters = docker.utils.convert_filters(filters)
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'GET',
|
'GET',
|
||||||
url_prefix + 'events',
|
f"{url_prefix}events",
|
||||||
params={
|
params={
|
||||||
'since': None,
|
'since': None,
|
||||||
'until': None,
|
'until': None,
|
||||||
|
|
@ -318,7 +312,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
|
|
||||||
fake_request.assert_called_with(
|
fake_request.assert_called_with(
|
||||||
'DELETE',
|
'DELETE',
|
||||||
url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID,
|
f"{url_prefix}containers/{fake_api.FAKE_CONTAINER_ID}",
|
||||||
params={'v': False, 'link': True, 'force': False},
|
params={'v': False, 'link': True, 'force': False},
|
||||||
timeout=DEFAULT_TIMEOUT_SECONDS
|
timeout=DEFAULT_TIMEOUT_SECONDS
|
||||||
)
|
)
|
||||||
|
|
@ -332,15 +326,15 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
self.client.create_host_config(security_opt='wrong')
|
self.client.create_host_config(security_opt='wrong')
|
||||||
|
|
||||||
def test_stream_helper_decoding(self):
|
def test_stream_helper_decoding(self):
|
||||||
status_code, content = fake_api.fake_responses[url_prefix + 'events']()
|
status_code, content = fake_api.fake_responses[f"{url_prefix}events"]()
|
||||||
content_str = json.dumps(content)
|
content_str = json.dumps(content)
|
||||||
content_str = content_str.encode('utf-8')
|
content_str = content_str.encode('utf-8')
|
||||||
body = io.BytesIO(content_str)
|
body = io.BytesIO(content_str)
|
||||||
|
|
||||||
# mock a stream interface
|
# mock a stream interface
|
||||||
raw_resp = urllib3.HTTPResponse(body=body)
|
raw_resp = urllib3.HTTPResponse(body=body)
|
||||||
setattr(raw_resp._fp, 'chunked', True)
|
raw_resp._fp.chunked = True
|
||||||
setattr(raw_resp._fp, 'chunk_left', len(body.getvalue()) - 1)
|
raw_resp._fp.chunk_left = len(body.getvalue()) - 1
|
||||||
|
|
||||||
# pass `decode=False` to the helper
|
# pass `decode=False` to the helper
|
||||||
raw_resp._fp.seek(0)
|
raw_resp._fp.seek(0)
|
||||||
|
|
@ -355,7 +349,7 @@ class DockerApiTest(BaseAPIClientTest):
|
||||||
assert result == content
|
assert result == content
|
||||||
|
|
||||||
# non-chunked response, pass `decode=False` to the helper
|
# non-chunked response, pass `decode=False` to the helper
|
||||||
setattr(raw_resp._fp, 'chunked', False)
|
raw_resp._fp.chunked = False
|
||||||
raw_resp._fp.seek(0)
|
raw_resp._fp.seek(0)
|
||||||
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
resp = response(status_code=status_code, content=content, raw=raw_resp)
|
||||||
result = next(self.client._stream_helper(resp))
|
result = next(self.client._stream_helper(resp))
|
||||||
|
|
@ -443,7 +437,7 @@ class UnixSocketStreamTest(unittest.TestCase):
|
||||||
lines = []
|
lines = []
|
||||||
for i in range(0, 50):
|
for i in range(0, 50):
|
||||||
line = str(i).encode()
|
line = str(i).encode()
|
||||||
lines += [('%x' % len(line)).encode(), line]
|
lines += [f'{len(line):x}'.encode(), line]
|
||||||
lines.append(b'0')
|
lines.append(b'0')
|
||||||
lines.append(b'')
|
lines.append(b'')
|
||||||
|
|
||||||
|
|
@ -454,7 +448,7 @@ class UnixSocketStreamTest(unittest.TestCase):
|
||||||
) + b'\r\n'.join(lines)
|
) + b'\r\n'.join(lines)
|
||||||
|
|
||||||
with APIClient(
|
with APIClient(
|
||||||
base_url="http+unix://" + self.socket_file,
|
base_url=f"http+unix://{self.socket_file}",
|
||||||
version=DEFAULT_DOCKER_API_VERSION) as client:
|
version=DEFAULT_DOCKER_API_VERSION) as client:
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
try:
|
try:
|
||||||
|
|
@ -490,8 +484,7 @@ class TCPSocketStreamTest(unittest.TestCase):
|
||||||
cls.thread = threading.Thread(target=cls.server.serve_forever)
|
cls.thread = threading.Thread(target=cls.server.serve_forever)
|
||||||
cls.thread.daemon = True
|
cls.thread.daemon = True
|
||||||
cls.thread.start()
|
cls.thread.start()
|
||||||
cls.address = 'http://{}:{}'.format(
|
cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}'
|
||||||
socket.gethostname(), cls.server.server_address[1])
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def teardown_class(cls):
|
def teardown_class(cls):
|
||||||
|
|
@ -588,7 +581,7 @@ class TCPSocketStreamTest(unittest.TestCase):
|
||||||
|
|
||||||
def test_read_from_socket_no_stream_no_tty(self):
|
def test_read_from_socket_no_stream_no_tty(self):
|
||||||
res = self.request(stream=False, tty=False, demux=False)
|
res = self.request(stream=False, tty=False, demux=False)
|
||||||
res == self.stdout_data + self.stderr_data
|
assert res == self.stdout_data + self.stderr_data
|
||||||
|
|
||||||
def test_read_from_socket_no_stream_no_tty_demux(self):
|
def test_read_from_socket_no_stream_no_tty_demux(self):
|
||||||
res = self.request(stream=False, tty=False, demux=True)
|
res = self.request(stream=False, tty=False, demux=True)
|
||||||
|
|
@ -600,7 +593,7 @@ class UserAgentTest(unittest.TestCase):
|
||||||
self.patcher = mock.patch.object(
|
self.patcher = mock.patch.object(
|
||||||
APIClient,
|
APIClient,
|
||||||
'send',
|
'send',
|
||||||
return_value=fake_resp("GET", "%s/version" % fake_api.prefix)
|
return_value=fake_resp("GET", f"{fake_api.prefix}/version")
|
||||||
)
|
)
|
||||||
self.mock_send = self.patcher.start()
|
self.mock_send = self.patcher.start()
|
||||||
|
|
||||||
|
|
@ -613,7 +606,7 @@ class UserAgentTest(unittest.TestCase):
|
||||||
|
|
||||||
assert self.mock_send.call_count == 1
|
assert self.mock_send.call_count == 1
|
||||||
headers = self.mock_send.call_args[0][0].headers
|
headers = self.mock_send.call_args[0][0].headers
|
||||||
expected = 'docker-sdk-python/%s' % docker.__version__
|
expected = f'docker-sdk-python/{docker.__version__}'
|
||||||
assert headers['User-Agent'] == expected
|
assert headers['User-Agent'] == expected
|
||||||
|
|
||||||
def test_custom_user_agent(self):
|
def test_custom_user_agent(self):
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ class VolumeTest(BaseAPIClientTest):
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert args[0][0] == 'GET'
|
assert args[0][0] == 'GET'
|
||||||
assert args[0][1] == url_prefix + 'volumes'
|
assert args[0][1] == f"{url_prefix}volumes"
|
||||||
|
|
||||||
def test_list_volumes_and_filters(self):
|
def test_list_volumes_and_filters(self):
|
||||||
volumes = self.client.volumes(filters={'dangling': True})
|
volumes = self.client.volumes(filters={'dangling': True})
|
||||||
|
|
@ -23,7 +23,7 @@ class VolumeTest(BaseAPIClientTest):
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert args[0][0] == 'GET'
|
assert args[0][0] == 'GET'
|
||||||
assert args[0][1] == url_prefix + 'volumes'
|
assert args[0][1] == f"{url_prefix}volumes"
|
||||||
assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'},
|
assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'},
|
||||||
'timeout': 60}
|
'timeout': 60}
|
||||||
|
|
||||||
|
|
@ -37,7 +37,7 @@ class VolumeTest(BaseAPIClientTest):
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert args[0][0] == 'POST'
|
assert args[0][0] == 'POST'
|
||||||
assert args[0][1] == url_prefix + 'volumes/create'
|
assert args[0][1] == f"{url_prefix}volumes/create"
|
||||||
assert json.loads(args[1]['data']) == {'Name': name}
|
assert json.loads(args[1]['data']) == {'Name': name}
|
||||||
|
|
||||||
@requires_api_version('1.23')
|
@requires_api_version('1.23')
|
||||||
|
|
@ -63,7 +63,7 @@ class VolumeTest(BaseAPIClientTest):
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert args[0][0] == 'POST'
|
assert args[0][0] == 'POST'
|
||||||
assert args[0][1] == url_prefix + 'volumes/create'
|
assert args[0][1] == f"{url_prefix}volumes/create"
|
||||||
data = json.loads(args[1]['data'])
|
data = json.loads(args[1]['data'])
|
||||||
assert 'Driver' in data
|
assert 'Driver' in data
|
||||||
assert data['Driver'] == driver_name
|
assert data['Driver'] == driver_name
|
||||||
|
|
|
||||||
|
|
@ -290,9 +290,10 @@ class LoadConfigTest(unittest.TestCase):
|
||||||
folder = tempfile.mkdtemp()
|
folder = tempfile.mkdtemp()
|
||||||
self.addCleanup(shutil.rmtree, folder)
|
self.addCleanup(shutil.rmtree, folder)
|
||||||
|
|
||||||
dockercfg_path = os.path.join(folder,
|
dockercfg_path = os.path.join(
|
||||||
'.{}.dockercfg'.format(
|
folder,
|
||||||
random.randrange(100000)))
|
f'.{random.randrange(100000)}.dockercfg',
|
||||||
|
)
|
||||||
registry = 'https://your.private.registry.io'
|
registry = 'https://your.private.registry.io'
|
||||||
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
|
auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii')
|
||||||
config = {
|
config = {
|
||||||
|
|
@ -777,8 +778,8 @@ class InMemoryStore(credentials.Store):
|
||||||
def get(self, server):
|
def get(self, server):
|
||||||
try:
|
try:
|
||||||
return self.__store[server]
|
return self.__store[server]
|
||||||
except KeyError:
|
except KeyError as ke:
|
||||||
raise credentials.errors.CredentialsNotFound()
|
raise credentials.errors.CredentialsNotFound() from ke
|
||||||
|
|
||||||
def store(self, server, username, secret):
|
def store(self, server, username, secret):
|
||||||
self.__store[server] = {
|
self.__store[server] = {
|
||||||
|
|
|
||||||
|
|
@ -85,10 +85,7 @@ class ClientTest(unittest.TestCase):
|
||||||
mock_obj.return_value.urlopen.return_value.status = 200
|
mock_obj.return_value.urlopen.return_value.status = 200
|
||||||
client.ping()
|
client.ping()
|
||||||
|
|
||||||
base_url = "{base_url}/v{version}/_ping".format(
|
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
|
||||||
base_url=client.api.base_url,
|
|
||||||
version=client.api._version
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_obj.assert_called_once_with(base_url,
|
mock_obj.assert_called_once_with(base_url,
|
||||||
"/var/run/docker.sock",
|
"/var/run/docker.sock",
|
||||||
|
|
@ -124,10 +121,7 @@ class ClientTest(unittest.TestCase):
|
||||||
mock_obj.return_value.urlopen.return_value.status = 200
|
mock_obj.return_value.urlopen.return_value.status = 200
|
||||||
client.ping()
|
client.ping()
|
||||||
|
|
||||||
base_url = "{base_url}/v{version}/_ping".format(
|
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
|
||||||
base_url=client.api.base_url,
|
|
||||||
version=client.api._version
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_obj.assert_called_once_with(base_url,
|
mock_obj.assert_called_once_with(base_url,
|
||||||
"/var/run/docker.sock",
|
"/var/run/docker.sock",
|
||||||
|
|
@ -159,7 +153,8 @@ class FromEnvTest(unittest.TestCase):
|
||||||
self.os_environ = os.environ.copy()
|
self.os_environ = os.environ.copy()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.environ = self.os_environ
|
os.environ.clear()
|
||||||
|
os.environ.update(self.os_environ)
|
||||||
|
|
||||||
def test_from_env(self):
|
def test_from_env(self):
|
||||||
"""Test that environment variables are passed through to
|
"""Test that environment variables are passed through to
|
||||||
|
|
@ -198,10 +193,7 @@ class FromEnvTest(unittest.TestCase):
|
||||||
mock_obj.return_value.urlopen.return_value.status = 200
|
mock_obj.return_value.urlopen.return_value.status = 200
|
||||||
client.ping()
|
client.ping()
|
||||||
|
|
||||||
base_url = "{base_url}/v{version}/_ping".format(
|
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
|
||||||
base_url=client.api.base_url,
|
|
||||||
version=client.api._version
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_obj.assert_called_once_with(base_url,
|
mock_obj.assert_called_once_with(base_url,
|
||||||
"/var/run/docker.sock",
|
"/var/run/docker.sock",
|
||||||
|
|
@ -235,10 +227,7 @@ class FromEnvTest(unittest.TestCase):
|
||||||
mock_obj.return_value.urlopen.return_value.status = 200
|
mock_obj.return_value.urlopen.return_value.status = 200
|
||||||
client.ping()
|
client.ping()
|
||||||
|
|
||||||
base_url = "{base_url}/v{version}/_ping".format(
|
base_url = f"{client.api.base_url}/v{client.api._version}/_ping"
|
||||||
base_url=client.api.base_url,
|
|
||||||
version=client.api._version
|
|
||||||
)
|
|
||||||
|
|
||||||
mock_obj.assert_called_once_with(base_url,
|
mock_obj.assert_called_once_with(base_url,
|
||||||
"/var/run/docker.sock",
|
"/var/run/docker.sock",
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ class BaseContextTest(unittest.TestCase):
|
||||||
)
|
)
|
||||||
def test_url_compatibility_on_linux(self):
|
def test_url_compatibility_on_linux(self):
|
||||||
c = Context("test")
|
c = Context("test")
|
||||||
assert c.Host == DEFAULT_UNIX_SOCKET.strip("http+")
|
assert c.Host == DEFAULT_UNIX_SOCKET[5:]
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
not IS_WINDOWS_PLATFORM, reason='Windows specific path check'
|
not IS_WINDOWS_PLATFORM, reason='Windows specific path check'
|
||||||
|
|
@ -45,5 +45,7 @@ class BaseContextTest(unittest.TestCase):
|
||||||
ctx = ContextAPI.inspect_context()
|
ctx = ContextAPI.inspect_context()
|
||||||
assert ctx["Name"] == "default"
|
assert ctx["Name"] == "default"
|
||||||
assert ctx["Metadata"]["StackOrchestrator"] == "swarm"
|
assert ctx["Metadata"]["StackOrchestrator"] == "swarm"
|
||||||
assert ctx["Endpoints"]["docker"]["Host"] in [
|
assert ctx["Endpoints"]["docker"]["Host"] in (
|
||||||
DEFAULT_NPIPE, DEFAULT_UNIX_SOCKET.strip("http+")]
|
DEFAULT_NPIPE,
|
||||||
|
DEFAULT_UNIX_SOCKET[5:],
|
||||||
|
)
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,10 @@ from . import fake_stat
|
||||||
|
|
||||||
CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
|
CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}'
|
||||||
|
|
||||||
FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' # noqa: E501
|
FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254'
|
||||||
FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501
|
FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501
|
||||||
FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' # noqa: E501
|
FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7'
|
||||||
FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' # noqa: E501
|
FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c'
|
||||||
FAKE_IMAGE_NAME = 'test_image'
|
FAKE_IMAGE_NAME = 'test_image'
|
||||||
FAKE_TARBALL_PATH = '/path/to/tarball'
|
FAKE_TARBALL_PATH = '/path/to/tarball'
|
||||||
FAKE_REPO_NAME = 'repo'
|
FAKE_REPO_NAME = 'repo'
|
||||||
|
|
@ -617,17 +617,11 @@ fake_responses = {
|
||||||
get_fake_volume_list,
|
get_fake_volume_list,
|
||||||
(f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
|
(f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'):
|
||||||
get_fake_volume,
|
get_fake_volume,
|
||||||
('{1}/{0}/volumes/{2}'.format(
|
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'):
|
||||||
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
|
|
||||||
), 'GET'):
|
|
||||||
get_fake_volume,
|
get_fake_volume,
|
||||||
('{1}/{0}/volumes/{2}'.format(
|
(f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'):
|
||||||
CURRENT_VERSION, prefix, FAKE_VOLUME_NAME
|
|
||||||
), 'DELETE'):
|
|
||||||
fake_remove_volume,
|
fake_remove_volume,
|
||||||
('{1}/{0}/nodes/{2}/update?version=1'.format(
|
(f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'):
|
||||||
CURRENT_VERSION, prefix, FAKE_NODE_ID
|
|
||||||
), 'POST'):
|
|
||||||
post_fake_update_node,
|
post_fake_update_node,
|
||||||
(f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
|
(f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'):
|
||||||
post_fake_join_swarm,
|
post_fake_join_swarm,
|
||||||
|
|
@ -635,21 +629,13 @@ fake_responses = {
|
||||||
get_fake_network_list,
|
get_fake_network_list,
|
||||||
(f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
|
(f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'):
|
||||||
post_fake_network,
|
post_fake_network,
|
||||||
('{1}/{0}/networks/{2}'.format(
|
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'):
|
||||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
|
||||||
), 'GET'):
|
|
||||||
get_fake_network,
|
get_fake_network,
|
||||||
('{1}/{0}/networks/{2}'.format(
|
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'):
|
||||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
|
||||||
), 'DELETE'):
|
|
||||||
delete_fake_network,
|
delete_fake_network,
|
||||||
('{1}/{0}/networks/{2}/connect'.format(
|
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'):
|
||||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
|
||||||
), 'POST'):
|
|
||||||
post_fake_network_connect,
|
post_fake_network_connect,
|
||||||
('{1}/{0}/networks/{2}/disconnect'.format(
|
(f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'):
|
||||||
CURRENT_VERSION, prefix, FAKE_NETWORK_ID
|
|
||||||
), 'POST'):
|
|
||||||
post_fake_network_disconnect,
|
post_fake_network_disconnect,
|
||||||
f'{prefix}/{CURRENT_VERSION}/secrets/create':
|
f'{prefix}/{CURRENT_VERSION}/secrets/create':
|
||||||
post_fake_secret,
|
post_fake_secret,
|
||||||
|
|
|
||||||
|
|
@ -31,77 +31,77 @@ class ContainerCollectionTest(unittest.TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_create_container_args(self):
|
def test_create_container_args(self):
|
||||||
create_kwargs = _create_container_args(dict(
|
create_kwargs = _create_container_args({
|
||||||
image='alpine',
|
"image": 'alpine',
|
||||||
command='echo hello world',
|
"command": 'echo hello world',
|
||||||
blkio_weight_device=[{'Path': 'foo', 'Weight': 3}],
|
"blkio_weight_device": [{'Path': 'foo', 'Weight': 3}],
|
||||||
blkio_weight=2,
|
"blkio_weight": 2,
|
||||||
cap_add=['foo'],
|
"cap_add": ['foo'],
|
||||||
cap_drop=['bar'],
|
"cap_drop": ['bar'],
|
||||||
cgroup_parent='foobar',
|
"cgroup_parent": 'foobar',
|
||||||
cgroupns='host',
|
"cgroupns": 'host',
|
||||||
cpu_period=1,
|
"cpu_period": 1,
|
||||||
cpu_quota=2,
|
"cpu_quota": 2,
|
||||||
cpu_shares=5,
|
"cpu_shares": 5,
|
||||||
cpuset_cpus='0-3',
|
"cpuset_cpus": '0-3',
|
||||||
detach=False,
|
"detach": False,
|
||||||
device_read_bps=[{'Path': 'foo', 'Rate': 3}],
|
"device_read_bps": [{'Path': 'foo', 'Rate': 3}],
|
||||||
device_read_iops=[{'Path': 'foo', 'Rate': 3}],
|
"device_read_iops": [{'Path': 'foo', 'Rate': 3}],
|
||||||
device_write_bps=[{'Path': 'foo', 'Rate': 3}],
|
"device_write_bps": [{'Path': 'foo', 'Rate': 3}],
|
||||||
device_write_iops=[{'Path': 'foo', 'Rate': 3}],
|
"device_write_iops": [{'Path': 'foo', 'Rate': 3}],
|
||||||
devices=['/dev/sda:/dev/xvda:rwm'],
|
"devices": ['/dev/sda:/dev/xvda:rwm'],
|
||||||
dns=['8.8.8.8'],
|
"dns": ['8.8.8.8'],
|
||||||
domainname='example.com',
|
"domainname": 'example.com',
|
||||||
dns_opt=['foo'],
|
"dns_opt": ['foo'],
|
||||||
dns_search=['example.com'],
|
"dns_search": ['example.com'],
|
||||||
entrypoint='/bin/sh',
|
"entrypoint": '/bin/sh',
|
||||||
environment={'FOO': 'BAR'},
|
"environment": {'FOO': 'BAR'},
|
||||||
extra_hosts={'foo': '1.2.3.4'},
|
"extra_hosts": {'foo': '1.2.3.4'},
|
||||||
group_add=['blah'],
|
"group_add": ['blah'],
|
||||||
ipc_mode='foo',
|
"ipc_mode": 'foo',
|
||||||
kernel_memory=123,
|
"kernel_memory": 123,
|
||||||
labels={'key': 'value'},
|
"labels": {'key': 'value'},
|
||||||
links={'foo': 'bar'},
|
"links": {'foo': 'bar'},
|
||||||
log_config={'Type': 'json-file', 'Config': {}},
|
"log_config": {'Type': 'json-file', 'Config': {}},
|
||||||
lxc_conf={'foo': 'bar'},
|
"lxc_conf": {'foo': 'bar'},
|
||||||
healthcheck={'test': 'true'},
|
"healthcheck": {'test': 'true'},
|
||||||
hostname='somehost',
|
"hostname": 'somehost',
|
||||||
mac_address='abc123',
|
"mac_address": 'abc123',
|
||||||
mem_limit=123,
|
"mem_limit": 123,
|
||||||
mem_reservation=123,
|
"mem_reservation": 123,
|
||||||
mem_swappiness=2,
|
"mem_swappiness": 2,
|
||||||
memswap_limit=456,
|
"memswap_limit": 456,
|
||||||
name='somename',
|
"name": 'somename',
|
||||||
network_disabled=False,
|
"network_disabled": False,
|
||||||
network='foo',
|
"network": 'foo',
|
||||||
network_driver_opt={'key1': 'a'},
|
"network_driver_opt": {'key1': 'a'},
|
||||||
oom_kill_disable=True,
|
"oom_kill_disable": True,
|
||||||
oom_score_adj=5,
|
"oom_score_adj": 5,
|
||||||
pid_mode='host',
|
"pid_mode": 'host',
|
||||||
pids_limit=500,
|
"pids_limit": 500,
|
||||||
platform='linux',
|
"platform": 'linux',
|
||||||
ports={
|
"ports": {
|
||||||
1111: 4567,
|
1111: 4567,
|
||||||
2222: None
|
2222: None
|
||||||
},
|
},
|
||||||
privileged=True,
|
"privileged": True,
|
||||||
publish_all_ports=True,
|
"publish_all_ports": True,
|
||||||
read_only=True,
|
"read_only": True,
|
||||||
restart_policy={'Name': 'always'},
|
"restart_policy": {'Name': 'always'},
|
||||||
security_opt=['blah'],
|
"security_opt": ['blah'],
|
||||||
shm_size=123,
|
"shm_size": 123,
|
||||||
stdin_open=True,
|
"stdin_open": True,
|
||||||
stop_signal=9,
|
"stop_signal": 9,
|
||||||
sysctls={'foo': 'bar'},
|
"sysctls": {'foo': 'bar'},
|
||||||
tmpfs={'/blah': ''},
|
"tmpfs": {'/blah': ''},
|
||||||
tty=True,
|
"tty": True,
|
||||||
ulimits=[{"Name": "nofile", "Soft": 1024, "Hard": 2048}],
|
"ulimits": [{"Name": "nofile", "Soft": 1024, "Hard": 2048}],
|
||||||
user='bob',
|
"user": 'bob',
|
||||||
userns_mode='host',
|
"userns_mode": 'host',
|
||||||
uts_mode='host',
|
"uts_mode": 'host',
|
||||||
version='1.23',
|
"version": '1.23',
|
||||||
volume_driver='some_driver',
|
"volume_driver": 'some_driver',
|
||||||
volumes=[
|
"volumes": [
|
||||||
'/home/user1/:/mnt/vol2',
|
'/home/user1/:/mnt/vol2',
|
||||||
'/var/www:/mnt/vol1:ro',
|
'/var/www:/mnt/vol1:ro',
|
||||||
'volumename:/mnt/vol3r',
|
'volumename:/mnt/vol3r',
|
||||||
|
|
@ -109,18 +109,18 @@ class ContainerCollectionTest(unittest.TestCase):
|
||||||
'/anothervolumewithnohostpath:ro',
|
'/anothervolumewithnohostpath:ro',
|
||||||
'C:\\windows\\path:D:\\hello\\world:rw'
|
'C:\\windows\\path:D:\\hello\\world:rw'
|
||||||
],
|
],
|
||||||
volumes_from=['container'],
|
"volumes_from": ['container'],
|
||||||
working_dir='/code'
|
"working_dir": '/code'
|
||||||
))
|
})
|
||||||
|
|
||||||
expected = dict(
|
expected = {
|
||||||
image='alpine',
|
"image": 'alpine',
|
||||||
command='echo hello world',
|
"command": 'echo hello world',
|
||||||
domainname='example.com',
|
"domainname": 'example.com',
|
||||||
detach=False,
|
"detach": False,
|
||||||
entrypoint='/bin/sh',
|
"entrypoint": '/bin/sh',
|
||||||
environment={'FOO': 'BAR'},
|
"environment": {'FOO': 'BAR'},
|
||||||
host_config={
|
"host_config": {
|
||||||
'Binds': [
|
'Binds': [
|
||||||
'/home/user1/:/mnt/vol2',
|
'/home/user1/:/mnt/vol2',
|
||||||
'/var/www:/mnt/vol1:ro',
|
'/var/www:/mnt/vol1:ro',
|
||||||
|
|
@ -183,20 +183,20 @@ class ContainerCollectionTest(unittest.TestCase):
|
||||||
'VolumeDriver': 'some_driver',
|
'VolumeDriver': 'some_driver',
|
||||||
'VolumesFrom': ['container'],
|
'VolumesFrom': ['container'],
|
||||||
},
|
},
|
||||||
healthcheck={'test': 'true'},
|
"healthcheck": {'test': 'true'},
|
||||||
hostname='somehost',
|
"hostname": 'somehost',
|
||||||
labels={'key': 'value'},
|
"labels": {'key': 'value'},
|
||||||
mac_address='abc123',
|
"mac_address": 'abc123',
|
||||||
name='somename',
|
"name": 'somename',
|
||||||
network_disabled=False,
|
"network_disabled": False,
|
||||||
networking_config={'foo': {'driver_opt': {'key1': 'a'}}},
|
"networking_config": {'foo': {'driver_opt': {'key1': 'a'}}},
|
||||||
platform='linux',
|
"platform": 'linux',
|
||||||
ports=[('1111', 'tcp'), ('2222', 'tcp')],
|
"ports": [('1111', 'tcp'), ('2222', 'tcp')],
|
||||||
stdin_open=True,
|
"stdin_open": True,
|
||||||
stop_signal=9,
|
"stop_signal": 9,
|
||||||
tty=True,
|
"tty": True,
|
||||||
user='bob',
|
"user": 'bob',
|
||||||
volumes=[
|
"volumes": [
|
||||||
'/mnt/vol2',
|
'/mnt/vol2',
|
||||||
'/mnt/vol1',
|
'/mnt/vol1',
|
||||||
'/mnt/vol3r',
|
'/mnt/vol3r',
|
||||||
|
|
@ -204,8 +204,8 @@ class ContainerCollectionTest(unittest.TestCase):
|
||||||
'/anothervolumewithnohostpath',
|
'/anothervolumewithnohostpath',
|
||||||
'D:\\hello\\world'
|
'D:\\hello\\world'
|
||||||
],
|
],
|
||||||
working_dir='/code'
|
"working_dir": '/code'
|
||||||
)
|
}
|
||||||
|
|
||||||
assert create_kwargs == expected
|
assert create_kwargs == expected
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ class SwarmTest(BaseAPIClientTest):
|
||||||
)
|
)
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
assert args[0][1] == (
|
assert args[0][1] == (
|
||||||
url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1'
|
f"{url_prefix}nodes/24ifsmvkjbyhk/update?version=1"
|
||||||
)
|
)
|
||||||
assert json.loads(args[1]['data']) == node_spec
|
assert json.loads(args[1]['data']) == node_spec
|
||||||
assert args[1]['headers']['Content-Type'] == 'application/json'
|
assert args[1]['headers']['Content-Type'] == 'application/json'
|
||||||
|
|
@ -45,7 +45,7 @@ class SwarmTest(BaseAPIClientTest):
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert (args[0][1] == url_prefix + 'swarm/join')
|
assert (args[0][1] == f"{url_prefix}swarm/join")
|
||||||
assert (json.loads(args[1]['data']) == data)
|
assert (json.loads(args[1]['data']) == data)
|
||||||
assert (args[1]['headers']['Content-Type'] == 'application/json')
|
assert (args[1]['headers']['Content-Type'] == 'application/json')
|
||||||
|
|
||||||
|
|
@ -64,6 +64,6 @@ class SwarmTest(BaseAPIClientTest):
|
||||||
|
|
||||||
args = fake_request.call_args
|
args = fake_request.call_args
|
||||||
|
|
||||||
assert (args[0][1] == url_prefix + 'swarm/join')
|
assert (args[0][1] == f"{url_prefix}swarm/join")
|
||||||
assert (json.loads(args[1]['data']) == data)
|
assert (json.loads(args[1]['data']) == data)
|
||||||
assert (args[1]['headers']['Content-Type'] == 'application/json')
|
assert (args[1]['headers']['Content-Type'] == 'application/json')
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,8 @@ class KwargsFromEnvTest(unittest.TestCase):
|
||||||
self.os_environ = os.environ.copy()
|
self.os_environ = os.environ.copy()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.environ = self.os_environ
|
os.environ.clear()
|
||||||
|
os.environ.update(self.os_environ)
|
||||||
|
|
||||||
def test_kwargs_from_env_empty(self):
|
def test_kwargs_from_env_empty(self):
|
||||||
os.environ.update(DOCKER_HOST='',
|
os.environ.update(DOCKER_HOST='',
|
||||||
|
|
@ -486,9 +487,9 @@ class PortsTest(unittest.TestCase):
|
||||||
def test_split_port_with_protocol(self):
|
def test_split_port_with_protocol(self):
|
||||||
for protocol in ['tcp', 'udp', 'sctp']:
|
for protocol in ['tcp', 'udp', 'sctp']:
|
||||||
internal_port, external_port = split_port(
|
internal_port, external_port = split_port(
|
||||||
"127.0.0.1:1000:2000/" + protocol
|
f"127.0.0.1:1000:2000/{protocol}"
|
||||||
)
|
)
|
||||||
assert internal_port == ["2000/" + protocol]
|
assert internal_port == [f"2000/{protocol}"]
|
||||||
assert external_port == [("127.0.0.1", "1000")]
|
assert external_port == [("127.0.0.1", "1000")]
|
||||||
|
|
||||||
def test_split_port_with_host_ip_no_port(self):
|
def test_split_port_with_host_ip_no_port(self):
|
||||||
|
|
|
||||||
6
tox.ini
6
tox.ini
|
|
@ -1,5 +1,5 @@
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py{37,38,39,310,311}, flake8
|
envlist = py{37,38,39,310,311}, ruff
|
||||||
skipsdist=True
|
skipsdist=True
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
|
|
@ -10,7 +10,7 @@ deps =
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
-r{toxinidir}/requirements.txt
|
-r{toxinidir}/requirements.txt
|
||||||
|
|
||||||
[testenv:flake8]
|
[testenv:ruff]
|
||||||
commands = flake8 docker tests setup.py
|
commands = ruff docker tests setup.py
|
||||||
deps =
|
deps =
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue