diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f23873f0..bdf62ff3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,16 +6,16 @@ env: DOCKER_BUILDKIT: '1' jobs: - flake8: + lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: '3.x' - - run: pip install -U flake8 - - name: Run flake8 - run: flake8 docker/ tests/ + python-version: '3.11' + - run: pip install -U ruff==0.0.265 + - name: Run ruff + run: ruff docker tests unit-tests: runs-on: ubuntu-latest diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 86173118..acf22ef7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -44,7 +44,7 @@ paragraph in the Docker contribution guidelines. Before we can review your pull request, please ensure that nothing has been broken by your changes by running the test suite. You can do so simply by running `make test` in the project root. This also includes coding style using -`flake8` +`ruff` ### 3. Write clear, self-contained commits diff --git a/Makefile b/Makefile index ae6ae34e..79486e3e 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,7 @@ build-dind-certs: docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs . .PHONY: test -test: flake8 unit-test-py3 integration-dind integration-dind-ssl +test: ruff unit-test-py3 integration-dind integration-dind-ssl .PHONY: unit-test-py3 unit-test-py3: build-py3 @@ -163,9 +163,9 @@ integration-dind-ssl: build-dind-certs build-py3 setup-network docker rm -vf dpy-dind-ssl dpy-dind-certs -.PHONY: flake8 -flake8: build-py3 - docker run -t --rm docker-sdk-python3 flake8 docker tests +.PHONY: ruff +ruff: build-py3 + docker run -t --rm docker-sdk-python3 ruff docker tests .PHONY: docs docs: build-docs diff --git a/docker/__init__.py b/docker/__init__.py index 46beb532..c1c518c5 100644 --- a/docker/__init__.py +++ b/docker/__init__.py @@ -1,4 +1,3 @@ -# flake8: noqa from .api import APIClient from .client import DockerClient, from_env from .context import Context diff --git a/docker/api/__init__.py b/docker/api/__init__.py index ff518441..7260e953 100644 --- a/docker/api/__init__.py +++ b/docker/api/__init__.py @@ -1,2 +1 @@ -# flake8: noqa from .client import APIClient diff --git a/docker/api/build.py b/docker/api/build.py index 3a1a3d96..439f4dc3 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -314,9 +314,8 @@ class BuildApiMixin: auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) log.debug( - 'Sending auth config ({})'.format( - ', '.join(repr(k) for k in auth_data.keys()) - ) + "Sending auth config (%s)", + ', '.join(repr(k) for k in auth_data), ) if auth_data: @@ -336,12 +335,9 @@ def process_dockerfile(dockerfile, path): abs_dockerfile = os.path.join(path, dockerfile) if constants.IS_WINDOWS_PLATFORM and path.startswith( constants.WINDOWS_LONGPATH_PREFIX): - abs_dockerfile = '{}{}'.format( - constants.WINDOWS_LONGPATH_PREFIX, - os.path.normpath( - abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):] - ) - ) + normpath = os.path.normpath( + abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]) + abs_dockerfile = f'{constants.WINDOWS_LONGPATH_PREFIX}{normpath}' if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or os.path.relpath(abs_dockerfile, path).startswith('..')): # Dockerfile not in context - read data to insert into tar later diff --git a/docker/api/client.py b/docker/api/client.py index 65b9d9d1..a2cb459d 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -160,10 +160,10 @@ class APIClient( base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size ) - except NameError: + except NameError as err: raise DockerException( 'Install pypiwin32 package to enable npipe:// support' - ) + ) from err self.mount('http+docker://', self._custom_adapter) self.base_url = 'http+docker://localnpipe' elif base_url.startswith('ssh://'): @@ -172,10 +172,10 @@ class APIClient( base_url, timeout, pool_connections=num_pools, max_pool_size=max_pool_size, shell_out=use_ssh_client ) - except NameError: + except NameError as err: raise DockerException( 'Install paramiko package to enable ssh:// support' - ) + ) from err self.mount('http+docker://ssh', self._custom_adapter) self._unmount('http://', 'https://') self.base_url = 'http+docker://ssh' @@ -199,28 +199,27 @@ class APIClient( self._version = version if not isinstance(self._version, str): raise DockerException( - 'Version parameter must be a string or None. Found {}'.format( - type(version).__name__ - ) + 'Version parameter must be a string or None. ' + f'Found {type(version).__name__}' ) if utils.version_lt(self._version, MINIMUM_DOCKER_API_VERSION): raise InvalidVersion( - 'API versions below {} are no longer supported by this ' - 'library.'.format(MINIMUM_DOCKER_API_VERSION) + f'API versions below {MINIMUM_DOCKER_API_VERSION} are ' + f'no longer supported by this library.' ) def _retrieve_server_version(self): try: return self.version(api_version=False)["ApiVersion"] - except KeyError: + except KeyError as ke: raise DockerException( 'Invalid response from docker daemon: key "ApiVersion"' ' is missing.' - ) + ) from ke except Exception as e: raise DockerException( f'Error while fetching server API version: {e}' - ) + ) from e def _set_request_timeout(self, kwargs): """Prepare the kwargs for an HTTP request by inserting the timeout @@ -248,19 +247,17 @@ class APIClient( for arg in args: if not isinstance(arg, str): raise ValueError( - 'Expected a string but found {} ({}) ' - 'instead'.format(arg, type(arg)) + f'Expected a string but found {arg} ({type(arg)}) instead' ) quote_f = partial(urllib.parse.quote, safe="/:") args = map(quote_f, args) + formatted_path = pathfmt.format(*args) if kwargs.get('versioned_api', True): - return '{}/v{}{}'.format( - self.base_url, self._version, pathfmt.format(*args) - ) + return f'{self.base_url}/v{self._version}{formatted_path}' else: - return f'{self.base_url}{pathfmt.format(*args)}' + return f'{self.base_url}{formatted_path}' def _raise_for_status(self, response): """Raises stored :class:`APIError`, if one occurred.""" @@ -479,7 +476,7 @@ class APIClient( return self._multiplexed_response_stream_helper(res) else: return sep.join( - [x for x in self._multiplexed_buffer_helper(res)] + list(self._multiplexed_buffer_helper(res)) ) def _unmount(self, *args): diff --git a/docker/api/container.py b/docker/api/container.py index d5001fad..5a267d13 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -869,8 +869,8 @@ class ContainerApiMixin: params['since'] = since else: raise errors.InvalidArgument( - 'since value should be datetime or positive int/float, ' - 'not {}'.format(type(since)) + 'since value should be datetime or positive int/float,' + f' not {type(since)}' ) if until is not None: @@ -886,8 +886,8 @@ class ContainerApiMixin: params['until'] = until else: raise errors.InvalidArgument( - 'until value should be datetime or positive int/float, ' - 'not {}'.format(type(until)) + f'until value should be datetime or positive int/float, ' + f'not {type(until)}' ) url = self._url("/containers/{0}/logs", container) @@ -959,7 +959,7 @@ class ContainerApiMixin: return port_settings.get(private_port) for protocol in ['tcp', 'udp', 'sctp']: - h_ports = port_settings.get(private_port + '/' + protocol) + h_ports = port_settings.get(f"{private_port}/{protocol}") if h_ports: break diff --git a/docker/api/service.py b/docker/api/service.py index 652b7c24..3aed0651 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -7,9 +7,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec, def raise_version_error(param, min_version): raise errors.InvalidVersion( - '{} is not supported in API version < {}'.format( - param, min_version - ) + f'{param} is not supported in API version < {min_version}' ) if update_config is not None: diff --git a/docker/auth.py b/docker/auth.py index cb388554..7a301ba4 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -22,15 +22,15 @@ def resolve_repository_name(repo_name): index_name, remote_name = split_repo_name(repo_name) if index_name[0] == '-' or index_name[-1] == '-': raise errors.InvalidRepository( - 'Invalid index name ({}). Cannot begin or end with a' - ' hyphen.'.format(index_name) + f'Invalid index name ({index_name}). ' + 'Cannot begin or end with a hyphen.' ) return resolve_index_name(index_name), remote_name def resolve_index_name(index_name): index_name = convert_to_hostname(index_name) - if index_name == 'index.' + INDEX_NAME: + if index_name == f"index.{INDEX_NAME}": index_name = INDEX_NAME return index_name @@ -99,9 +99,7 @@ class AuthConfig(dict): for registry, entry in entries.items(): if not isinstance(entry, dict): log.debug( - 'Config entry for key {} is not auth config'.format( - registry - ) + f'Config entry for key {registry} is not auth config' ) # We sometimes fall back to parsing the whole config as if it # was the auth config by itself, for legacy purposes. In that @@ -109,17 +107,11 @@ class AuthConfig(dict): # keys is not formatted properly. if raise_on_error: raise errors.InvalidConfigFile( - 'Invalid configuration for registry {}'.format( - registry - ) + f'Invalid configuration for registry {registry}' ) return {} if 'identitytoken' in entry: - log.debug( - 'Found an IdentityToken entry for registry {}'.format( - registry - ) - ) + log.debug(f'Found an IdentityToken entry for registry {registry}') conf[registry] = { 'IdentityToken': entry['identitytoken'] } @@ -130,16 +122,15 @@ class AuthConfig(dict): # a valid value in the auths config. # https://github.com/docker/compose/issues/3265 log.debug( - 'Auth data for {} is absent. Client might be using a ' - 'credentials store instead.'.format(registry) + f'Auth data for {registry} is absent. ' + f'Client might be using a credentials store instead.' ) conf[registry] = {} continue username, password = decode_auth(entry['auth']) log.debug( - 'Found entry (registry={}, username={})' - .format(repr(registry), repr(username)) + f'Found entry (registry={registry!r}, username={username!r})' ) conf[registry] = { @@ -277,7 +268,7 @@ class AuthConfig(dict): except credentials.StoreError as e: raise errors.DockerException( f'Credentials store error: {repr(e)}' - ) + ) from e def _get_store_instance(self, name): if name not in self._stores: diff --git a/docker/context/__init__.py b/docker/context/__init__.py index 0a6707f9..dbf172fd 100644 --- a/docker/context/__init__.py +++ b/docker/context/__init__.py @@ -1,3 +1,2 @@ -# flake8: noqa from .context import Context from .api import ContextAPI diff --git a/docker/context/api.py b/docker/context/api.py index 380e8c4c..493f470e 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -113,8 +113,8 @@ class ContextAPI: names.append(data["Name"]) except Exception as e: raise errors.ContextException( - "Failed to load metafile {}: {}".format( - filename, e)) + f"Failed to load metafile {filename}: {e}", + ) from e contexts = [cls.DEFAULT_CONTEXT] for name in names: diff --git a/docker/context/config.py b/docker/context/config.py index d761aef1..8c3fe250 100644 --- a/docker/context/config.py +++ b/docker/context/config.py @@ -77,5 +77,6 @@ def get_context_host(path=None, tls=False): host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls) if host == DEFAULT_UNIX_SOCKET: # remove http+ from default docker socket url - return host.strip("http+") + if host.startswith("http+"): + host = host[5:] return host diff --git a/docker/context/context.py b/docker/context/context.py index dbaa01cb..4faf8e70 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -42,8 +42,9 @@ class Context: for k, v in endpoints.items(): if not isinstance(v, dict): # unknown format - raise ContextException("""Unknown endpoint format for - context {}: {}""".format(name, v)) + raise ContextException( + f"Unknown endpoint format for context {name}: {v}", + ) self.endpoints[k] = v if k != "docker": @@ -96,8 +97,9 @@ class Context: metadata = json.load(f) except (OSError, KeyError, ValueError) as e: # unknown format - raise Exception("""Detected corrupted meta file for - context {} : {}""".format(name, e)) + raise Exception( + f"Detected corrupted meta file for context {name} : {e}" + ) from e # for docker endpoints, set defaults for # Host and SkipTLSVerify fields diff --git a/docker/credentials/__init__.py b/docker/credentials/__init__.py index 31ad28e3..a1247700 100644 --- a/docker/credentials/__init__.py +++ b/docker/credentials/__init__.py @@ -1,4 +1,8 @@ -# flake8: noqa from .store import Store from .errors import StoreError, CredentialsNotFound -from .constants import * +from .constants import ( + DEFAULT_LINUX_STORE, + DEFAULT_OSX_STORE, + DEFAULT_WIN32_STORE, + PROGRAM_PREFIX, +) diff --git a/docker/credentials/errors.py b/docker/credentials/errors.py index 42a1bc1a..d059fd9f 100644 --- a/docker/credentials/errors.py +++ b/docker/credentials/errors.py @@ -13,13 +13,5 @@ class InitializationError(StoreError): def process_store_error(cpe, program): message = cpe.output.decode('utf-8') if 'credentials not found in native keychain' in message: - return CredentialsNotFound( - 'No matching credentials in {}'.format( - program - ) - ) - return StoreError( - 'Credentials store {} exited with "{}".'.format( - program, cpe.output.decode('utf-8').strip() - ) - ) + return CredentialsNotFound(f'No matching credentials in {program}') + return StoreError(f'Credentials store {program} exited with "{message}".') diff --git a/docker/credentials/store.py b/docker/credentials/store.py index b7ab53fb..4e63a5ba 100644 --- a/docker/credentials/store.py +++ b/docker/credentials/store.py @@ -20,9 +20,8 @@ class Store: self.environment = environment if self.exe is None: warnings.warn( - '{} not installed or not available in PATH'.format( - self.program - ) + f'{self.program} not installed or not available in PATH', + stacklevel=1, ) def get(self, server): @@ -73,10 +72,8 @@ class Store: def _execute(self, subcmd, data_input): if self.exe is None: raise errors.StoreError( - '{} not installed or not available in PATH'.format( - self.program - ) - ) + f'{self.program} not installed or not available in PATH' + ) output = None env = create_environment_dict(self.environment) try: @@ -84,18 +81,14 @@ class Store: [self.exe, subcmd], input=data_input, env=env, ) except subprocess.CalledProcessError as e: - raise errors.process_store_error(e, self.program) + raise errors.process_store_error(e, self.program) from e except OSError as e: if e.errno == errno.ENOENT: raise errors.StoreError( - '{} not installed or not available in PATH'.format( - self.program - ) - ) + f'{self.program} not installed or not available in PATH' + ) from e else: raise errors.StoreError( - 'Unexpected OS error "{}", errno={}'.format( - e.strerror, e.errno - ) - ) + f'Unexpected OS error "{e.strerror}", errno={e.errno}' + ) from e return output diff --git a/docker/errors.py b/docker/errors.py index 75e30a8c..d03e10f6 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -54,14 +54,16 @@ class APIError(requests.exceptions.HTTPError, DockerException): message = super().__str__() if self.is_client_error(): - message = '{} Client Error for {}: {}'.format( - self.response.status_code, self.response.url, - self.response.reason) + message = ( + f'{self.response.status_code} Client Error for ' + f'{self.response.url}: {self.response.reason}' + ) elif self.is_server_error(): - message = '{} Server Error for {}: {}'.format( - self.response.status_code, self.response.url, - self.response.reason) + message = ( + f'{self.response.status_code} Server Error for ' + f'{self.response.url}: {self.response.reason}' + ) if self.explanation: message = f'{message} ("{self.explanation}")' @@ -142,10 +144,10 @@ class ContainerError(DockerException): self.stderr = stderr err = f": {stderr}" if stderr is not None else "" - msg = ("Command '{}' in image '{}' returned non-zero exit " - "status {}{}").format(command, image, exit_status, err) - - super().__init__(msg) + super().__init__( + f"Command '{command}' in image '{image}' " + f"returned non-zero exit status {exit_status}{err}" + ) class StreamParseError(RuntimeError): diff --git a/docker/models/containers.py b/docker/models/containers.py index 64838397..44bb92a0 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -47,11 +47,11 @@ class Container(Model): try: result = self.attrs['Config'].get('Labels') return result or {} - except KeyError: + except KeyError as ke: raise DockerException( 'Label data is not available for sparse objects. Call reload()' ' to retrieve all information' - ) + ) from ke @property def status(self): diff --git a/docker/models/images.py b/docker/models/images.py index e3ec39d2..b4777d8d 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -15,10 +15,8 @@ class Image(Model): An image on the server. """ def __repr__(self): - return "<{}: '{}'>".format( - self.__class__.__name__, - "', '".join(self.tags), - ) + tag_str = "', '".join(self.tags) + return f"<{self.__class__.__name__}: '{tag_str}'>" @property def labels(self): @@ -458,7 +456,8 @@ class ImageCollection(Collection): if 'stream' in kwargs: warnings.warn( '`stream` is not a valid parameter for this method' - ' and will be overridden' + ' and will be overridden', + stacklevel=1, ) del kwargs['stream'] @@ -471,9 +470,8 @@ class ImageCollection(Collection): # to be pulled. pass if not all_tags: - return self.get('{0}{2}{1}'.format( - repository, tag, '@' if tag.startswith('sha256:') else ':' - )) + sep = '@' if tag.startswith('sha256:') else ':' + return self.get(f'{repository}{sep}{tag}') return self.list(repository) def push(self, repository, tag=None, **kwargs): diff --git a/docker/models/plugins.py b/docker/models/plugins.py index 16f5245e..85d768c9 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -187,7 +187,7 @@ class PluginCollection(Collection): """ privileges = self.client.api.plugin_privileges(remote_name) it = self.client.api.pull_plugin(remote_name, privileges, local_name) - for data in it: + for _data in it: pass return self.get(local_name or remote_name) diff --git a/docker/models/resource.py b/docker/models/resource.py index 89030e59..d3a35e84 100644 --- a/docker/models/resource.py +++ b/docker/models/resource.py @@ -64,9 +64,10 @@ class Collection: def __call__(self, *args, **kwargs): raise TypeError( - "'{}' object is not callable. You might be trying to use the old " - "(pre-2.0) API - use docker.APIClient if so." - .format(self.__class__.__name__)) + f"'{self.__class__.__name__}' object is not callable. " + "You might be trying to use the old (pre-2.0) API - " + "use docker.APIClient if so." + ) def list(self): raise NotImplementedError @@ -88,5 +89,4 @@ class Collection: elif isinstance(attrs, dict): return self.model(attrs=attrs, client=self.client, collection=self) else: - raise Exception("Can't create %s from %s" % - (self.model.__name__, attrs)) + raise Exception(f"Can't create {self.model.__name__} from {attrs}") diff --git a/docker/tls.py b/docker/tls.py index f4dffb2e..a4dd0020 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -55,7 +55,7 @@ class TLSConfig: raise errors.TLSParameterError( 'client_cert must be a tuple of' ' (client certificate, key file)' - ) + ) from None if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or not os.path.isfile(tls_key)): diff --git a/docker/transport/__init__.py b/docker/transport/__init__.py index e37fc3ba..54492c11 100644 --- a/docker/transport/__init__.py +++ b/docker/transport/__init__.py @@ -1,4 +1,3 @@ -# flake8: noqa from .unixconn import UnixHTTPAdapter from .ssladapter import SSLHTTPAdapter try: diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index 45988b2d..d335d871 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -46,9 +46,8 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) - - except AttributeError: # self.pool is None - raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") + except AttributeError as ae: # self.pool is None + raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae except queue.Empty: if self.block: @@ -56,7 +55,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): self, "Pool reached maximum size and no more " "connections are allowed." - ) + ) from None # Oh well, we'll create a new connection then return conn or self._new_conn() diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index a92beb62..6e1d0ee7 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -141,8 +141,8 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): try: conn = self.pool.get(block=self.block, timeout=timeout) - except AttributeError: # self.pool is None - raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") + except AttributeError as ae: # self.pool is None + raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") from ae except queue.Empty: if self.block: @@ -150,7 +150,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): self, "Pool reached maximum size and no more " "connections are allowed." - ) + ) from None # Oh well, we'll create a new connection then return conn or self._new_conn() diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index fae10f26..09d373dd 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -55,7 +55,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter): max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): socket_path = socket_url.replace('http+unix://', '') if not socket_path.startswith('/'): - socket_path = '/' + socket_path + socket_path = f"/{socket_path}" self.socket_path = socket_path self.timeout = timeout self.max_pool_size = max_pool_size diff --git a/docker/types/__init__.py b/docker/types/__init__.py index b425746e..89f22389 100644 --- a/docker/types/__init__.py +++ b/docker/types/__init__.py @@ -1,4 +1,3 @@ -# flake8: noqa from .containers import ( ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest ) diff --git a/docker/types/containers.py b/docker/types/containers.py index 84df0f7e..a2806138 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -48,8 +48,11 @@ class LogConfig(DictType): >>> container = client.create_container('busybox', 'true', ... host_config=hc) >>> client.inspect_container(container)['HostConfig']['LogConfig'] - {'Type': 'json-file', 'Config': {'labels': 'production_status,geo', 'max-size': '1g'}} - """ # noqa: E501 + { + 'Type': 'json-file', + 'Config': {'labels': 'production_status,geo', 'max-size': '1g'} + } + """ types = LogConfigTypesEnum def __init__(self, **kwargs): @@ -652,25 +655,25 @@ class HostConfig(dict): def host_config_type_error(param, param_value, expected): - error_msg = 'Invalid type for {0} param: expected {1} but found {2}' - return TypeError(error_msg.format(param, expected, type(param_value))) + return TypeError( + f'Invalid type for {param} param: expected {expected} ' + f'but found {type(param_value)}' + ) def host_config_version_error(param, version, less_than=True): operator = '<' if less_than else '>' - error_msg = '{0} param is not supported in API versions {1} {2}' - return errors.InvalidVersion(error_msg.format(param, operator, version)) - + return errors.InvalidVersion( + f'{param} param is not supported in API versions {operator} {version}', + ) def host_config_value_error(param, param_value): - error_msg = 'Invalid value for {0} param: {1}' - return ValueError(error_msg.format(param, param_value)) + return ValueError(f'Invalid value for {param} param: {param_value}') def host_config_incompatible_error(param, param_value, incompatible_param): - error_msg = '\"{1}\" {0} is incompatible with {2}' return errors.InvalidArgument( - error_msg.format(param, param_value, incompatible_param) + f'\"{param_value}\" {param} is incompatible with {incompatible_param}' ) diff --git a/docker/types/daemon.py b/docker/types/daemon.py index 096b2cc1..04e6ccb2 100644 --- a/docker/types/daemon.py +++ b/docker/types/daemon.py @@ -28,9 +28,9 @@ class CancellableStream: try: return next(self._stream) except urllib3.exceptions.ProtocolError: - raise StopIteration + raise StopIteration from None except OSError: - raise StopIteration + raise StopIteration from None next = __next__ diff --git a/docker/types/services.py b/docker/types/services.py index a3383ef7..0b07c350 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -370,8 +370,8 @@ def _convert_generic_resources_dict(generic_resources): return generic_resources if not isinstance(generic_resources, dict): raise errors.InvalidArgument( - 'generic_resources must be a dict or a list' - ' (found {})'.format(type(generic_resources)) + 'generic_resources must be a dict or a list ' + f'(found {type(generic_resources)})' ) resources = [] for kind, value in generic_resources.items(): @@ -381,9 +381,9 @@ def _convert_generic_resources_dict(generic_resources): elif isinstance(value, str): resource_type = 'NamedResourceSpec' else: + kv = {kind: value} raise errors.InvalidArgument( - 'Unsupported generic resource reservation ' - 'type: {}'.format({kind: value}) + f'Unsupported generic resource reservation type: {kv}' ) resources.append({ resource_type: {'Kind': kind, 'Value': value} @@ -764,8 +764,8 @@ class PlacementPreference(dict): def __init__(self, strategy, descriptor): if strategy != 'spread': raise errors.InvalidArgument( - 'PlacementPreference strategy value is invalid ({}):' - ' must be "spread".'.format(strategy) + f'PlacementPreference strategy value is invalid ({strategy}): ' + 'must be "spread".' ) self['Spread'] = {'SpreadDescriptor': descriptor} diff --git a/docker/utils/__init__.py b/docker/utils/__init__.py index 81c8186c..944c6e65 100644 --- a/docker/utils/__init__.py +++ b/docker/utils/__init__.py @@ -1,4 +1,4 @@ -# flake8: noqa + from .build import create_archive, exclude_paths, mkbuildcontext, tar from .decorators import check_resource, minimum_version, update_headers from .utils import ( diff --git a/docker/utils/build.py b/docker/utils/build.py index 59564c4c..8d18c2be 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -42,7 +42,7 @@ def exclude_paths(root, patterns, dockerfile=None): if dockerfile is None: dockerfile = 'Dockerfile' - patterns.append('!' + dockerfile) + patterns.append(f"!{dockerfile}") pm = PatternMatcher(patterns) return set(pm.walk(root)) @@ -93,10 +93,10 @@ def create_archive(root, files=None, fileobj=None, gzip=False, try: with open(full_path, 'rb') as f: t.addfile(i, f) - except OSError: + except OSError as oe: raise OSError( f'Can not read file in context: {full_path}' - ) + ) from oe else: # Directories, FIFOs, symlinks... don't need to be read. t.addfile(i, None) @@ -180,7 +180,7 @@ class PatternMatcher: fpath = os.path.join( os.path.relpath(current_dir, root), f ) - if fpath.startswith('.' + os.path.sep): + if fpath.startswith(f".{os.path.sep}"): fpath = fpath[2:] match = self.matches(fpath) if not match: diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py index cf1baf49..5aab98cd 100644 --- a/docker/utils/decorators.py +++ b/docker/utils/decorators.py @@ -27,9 +27,7 @@ def minimum_version(version): def wrapper(self, *args, **kwargs): if utils.version_lt(self._version, version): raise errors.InvalidVersion( - '{} is not available for version < {}'.format( - f.__name__, version - ) + f'{f.__name__} is not available for version < {version}', ) return f(self, *args, **kwargs) return wrapper diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py index 90e9f60f..be745381 100644 --- a/docker/utils/fnmatch.py +++ b/docker/utils/fnmatch.py @@ -79,18 +79,18 @@ def translate(pat): i = i + 1 if i >= n: # is "**EOF" - to align with .gitignore just accept all - res = res + '.*' + res = f"{res}.*" else: # is "**" # Note that this allows for any # of /'s (even 0) because # the .* will eat everything, even /'s - res = res + '(.*/)?' + res = f"{res}(.*/)?" else: # is "*" so map it to anything but "/" - res = res + '[^/]*' + res = f"{res}[^/]*" elif c == '?': # "?" is any char except "/" - res = res + '[^/]' + res = f"{res}[^/]" elif c == '[': j = i if j < n and pat[j] == '!': @@ -100,16 +100,16 @@ def translate(pat): while j < n and pat[j] != ']': j = j + 1 if j >= n: - res = res + '\\[' + res = f"{res}\\[" else: stuff = pat[i:j].replace('\\', '\\\\') i = j + 1 if stuff[0] == '!': - stuff = '^' + stuff[1:] + stuff = f"^{stuff[1:]}" elif stuff[0] == '^': - stuff = '\\' + stuff + stuff = f"\\{stuff}" res = f'{res}[{stuff}]' else: res = res + re.escape(c) - return res + '$' + return f"{res}$" diff --git a/docker/utils/json_stream.py b/docker/utils/json_stream.py index f384175f..266193e5 100644 --- a/docker/utils/json_stream.py +++ b/docker/utils/json_stream.py @@ -72,4 +72,4 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a): try: yield decoder(buffered) except Exception as e: - raise StreamParseError(e) + raise StreamParseError(e) from e diff --git a/docker/utils/ports.py b/docker/utils/ports.py index e8139366..9fd6e8f6 100644 --- a/docker/utils/ports.py +++ b/docker/utils/ports.py @@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False): if not end: return [start + proto] if randomly_available_port: - return [f'{start}-{end}' + proto] + return [f"{start}-{end}{proto}"] return [str(port) + proto for port in range(int(start), int(end) + 1)] diff --git a/docker/utils/proxy.py b/docker/utils/proxy.py index 49e98ed9..e7164b6c 100644 --- a/docker/utils/proxy.py +++ b/docker/utils/proxy.py @@ -69,5 +69,9 @@ class ProxyConfig(dict): return proxy_env + environment def __str__(self): - return 'ProxyConfig(http={}, https={}, ftp={}, no_proxy={})'.format( - self.http, self.https, self.ftp, self.no_proxy) + return ( + 'ProxyConfig(' + f'http={self.http}, https={self.https}, ' + f'ftp={self.ftp}, no_proxy={self.no_proxy}' + ')' + ) diff --git a/docker/utils/socket.py b/docker/utils/socket.py index cdc485ea..2306ed07 100644 --- a/docker/utils/socket.py +++ b/docker/utils/socket.py @@ -42,7 +42,7 @@ def read(socket, n=4096): try: if hasattr(socket, 'recv'): return socket.recv(n) - if isinstance(socket, getattr(pysocket, 'SocketIO')): + if isinstance(socket, pysocket.SocketIO): return socket.read(n) return os.read(socket.fileno(), n) except OSError as e: diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 301ee989..89492855 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -127,8 +127,7 @@ def convert_volume_binds(binds): if isinstance(v, dict): if 'ro' in v and 'mode' in v: raise ValueError( - 'Binding cannot contain both "ro" and "mode": {}' - .format(repr(v)) + f'Binding cannot contain both "ro" and "mode": {v!r}' ) bind = v['bind'] @@ -167,8 +166,8 @@ def convert_tmpfs_mounts(tmpfs): if not isinstance(tmpfs, list): raise ValueError( - 'Expected tmpfs value to be either a list or a dict, found: {}' - .format(type(tmpfs).__name__) + 'Expected tmpfs value to be either a list or a dict, ' + f'found: {type(tmpfs).__name__}' ) result = {} @@ -182,8 +181,8 @@ def convert_tmpfs_mounts(tmpfs): else: raise ValueError( - "Expected item in tmpfs list to be a string, found: {}" - .format(type(mount).__name__) + "Expected item in tmpfs list to be a string, " + f"found: {type(mount).__name__}" ) result[name] = options @@ -225,9 +224,9 @@ def parse_host(addr, is_win32=False, tls=False): parsed_url = urlparse(addr) proto = parsed_url.scheme - if not proto or any([x not in string.ascii_letters + '+' for x in proto]): + if not proto or any(x not in f"{string.ascii_letters}+" for x in proto): # https://bugs.python.org/issue754016 - parsed_url = urlparse('//' + addr, 'tcp') + parsed_url = urlparse(f"//{addr}", 'tcp') proto = 'tcp' if proto == 'fd': @@ -263,15 +262,14 @@ def parse_host(addr, is_win32=False, tls=False): if parsed_url.path and proto == 'ssh': raise errors.DockerException( - 'Invalid bind address format: no path allowed for this protocol:' - ' {}'.format(addr) + f'Invalid bind address format: no path allowed for this protocol: {addr}' ) else: path = parsed_url.path if proto == 'unix' and parsed_url.hostname is not None: # For legacy reasons, we consider unix://path # to be valid and equivalent to unix:///path - path = '/'.join((parsed_url.hostname, path)) + path = f"{parsed_url.hostname}/{path}" netloc = parsed_url.netloc if proto in ('tcp', 'ssh'): @@ -279,8 +277,7 @@ def parse_host(addr, is_win32=False, tls=False): if port <= 0: if proto != 'ssh': raise errors.DockerException( - 'Invalid bind address format: port is required:' - ' {}'.format(addr) + f'Invalid bind address format: port is required: {addr}' ) port = 22 netloc = f'{parsed_url.netloc}:{port}' @@ -290,7 +287,7 @@ def parse_host(addr, is_win32=False, tls=False): # Rewrite schemes to fit library internals (requests adapters) if proto == 'tcp': - proto = 'http{}'.format('s' if tls else '') + proto = f"http{'s' if tls else ''}" elif proto == 'unix': proto = 'http+unix' @@ -424,19 +421,18 @@ def parse_bytes(s): if suffix in units.keys() or suffix.isdigit(): try: digits = float(digits_part) - except ValueError: + except ValueError as ve: raise errors.DockerException( - 'Failed converting the string value for memory ({}) to' - ' an integer.'.format(digits_part) - ) + 'Failed converting the string value for memory ' + f'({digits_part}) to an integer.' + ) from ve # Reconvert to long for the final result s = int(digits * units[suffix]) else: raise errors.DockerException( - 'The specified value for memory ({}) should specify the' - ' units. The postfix should be one of the `b` `k` `m` `g`' - ' characters'.format(s) + f'The specified value for memory ({s}) should specify the units. ' + 'The postfix should be one of the `b` `k` `m` `g` characters' ) return s @@ -472,8 +468,7 @@ def parse_env_file(env_file): environment[k] = v else: raise errors.DockerException( - 'Invalid line in environment file {}:\n{}'.format( - env_file, line)) + f'Invalid line in environment file {env_file}:\n{line}') return environment diff --git a/docs/conf.py b/docs/conf.py index dc3b37cc..a529f8be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,6 +18,7 @@ import datetime import os import sys +from importlib.metadata import version sys.path.insert(0, os.path.abspath('..')) @@ -56,7 +57,7 @@ master_doc = 'index' # General information about the project. project = 'Docker SDK for Python' year = datetime.datetime.now().year -copyright = '%d Docker Inc' % year +copyright = f'{year} Docker Inc' author = 'Docker Inc' # The version info for the project you're documenting, acts as replacement for @@ -64,7 +65,6 @@ author = 'Docker Inc' # built documents. # # see https://github.com/pypa/setuptools_scm#usage-from-sphinx -from importlib.metadata import version release = version('docker') # for example take major/minor version = '.'.join(release.split('.')[:2]) diff --git a/pyproject.toml b/pyproject.toml index 9554358e..0a672796 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,3 +3,18 @@ requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"] [tool.setuptools_scm] write_to = 'docker/_version.py' + +[tool.ruff] +target-version = "py37" +extend-select = [ + "B", + "C", + "F", + "W", +] +ignore = [ + "C901", # too complex (there's a whole bunch of these) +] + +[tool.ruff.per-file-ignores] +"**/__init__.py" = ["F401"] diff --git a/setup.py b/setup.py index ff6da714..866aa23c 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ extras_require = { } with open('./test-requirements.txt') as test_reqs_txt: - test_requirements = [line for line in test_reqs_txt] + test_requirements = list(test_reqs_txt) long_description = '' @@ -48,7 +48,7 @@ setup( url='https://github.com/docker/docker-py', project_urls={ 'Documentation': 'https://docker-py.readthedocs.io', - 'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', # noqa: E501 + 'Changelog': 'https://docker-py.readthedocs.io/en/stable/change-log.html', 'Source': 'https://github.com/docker/docker-py', 'Tracker': 'https://github.com/docker/docker-py/issues', }, diff --git a/test-requirements.txt b/test-requirements.txt index b7457fa7..d98ec4db 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,6 @@ setuptools==65.5.1 coverage==6.4.2 -flake8==4.0.1 +ruff==0.0.265 pytest==7.1.2 pytest-cov==3.0.0 pytest-timeout==2.1.0 diff --git a/tests/helpers.py b/tests/helpers.py index bdb07f96..e0785774 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -80,7 +80,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40): start_time = time.time() while not condition(): if time.time() - start_time > timeout: - raise AssertionError("Timeout: %s" % condition) + raise AssertionError(f"Timeout: {condition}") time.sleep(delay) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 606c3b7e..2add2d87 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -132,7 +132,7 @@ class BuildTest(BaseAPIIntegrationTest): path=base_dir, tag=tag, ) - for chunk in stream: + for _chunk in stream: pass c = self.client.create_container(tag, ['find', '/test', '-type', 'f']) @@ -142,7 +142,7 @@ class BuildTest(BaseAPIIntegrationTest): logs = logs.decode('utf-8') - assert sorted(list(filter(None, logs.split('\n')))) == sorted([ + assert sorted(filter(None, logs.split('\n'))) == sorted([ '/test/#file.txt', '/test/ignored/subdir/excepted-with-spaces', '/test/ignored/subdir/excepted-file', @@ -160,7 +160,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag='buildargs', buildargs={'test': 'OK'} ) self.tmp_imgs.append('buildargs') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('buildargs') @@ -180,7 +180,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag=tag, shmsize=shmsize ) self.tmp_imgs.append(tag) - for chunk in stream: + for _chunk in stream: pass # There is currently no way to get the shmsize @@ -198,7 +198,7 @@ class BuildTest(BaseAPIIntegrationTest): isolation='default' ) - for chunk in stream: + for _chunk in stream: pass @requires_api_version('1.23') @@ -213,7 +213,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag='labels', labels=labels ) self.tmp_imgs.append('labels') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('labels') @@ -230,7 +230,7 @@ class BuildTest(BaseAPIIntegrationTest): stream = self.client.build(fileobj=script, tag='build1') self.tmp_imgs.append('build1') - for chunk in stream: + for _chunk in stream: pass stream = self.client.build( @@ -271,7 +271,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, target='first', tag='build1' ) self.tmp_imgs.append('build1') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('build1') @@ -300,7 +300,7 @@ class BuildTest(BaseAPIIntegrationTest): ) self.tmp_imgs.append('dockerpytest_customnetbuild') - for chunk in stream: + for _chunk in stream: pass assert self.client.inspect_image('dockerpytest_customnetbuild') @@ -312,7 +312,7 @@ class BuildTest(BaseAPIIntegrationTest): ) self.tmp_imgs.append('dockerpytest_nonebuild') - logs = [chunk for chunk in stream] + logs = list(stream) assert 'errorDetail' in logs[-1] assert logs[-1]['errorDetail']['code'] == 1 @@ -365,7 +365,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag=tag, squash=squash ) self.tmp_imgs.append(tag) - for chunk in stream: + for _chunk in stream: pass return self.client.inspect_image(tag) @@ -392,7 +392,7 @@ class BuildTest(BaseAPIIntegrationTest): expected = '{0}{2}\n{1}'.format( control_chars[0], control_chars[1], snippet ) - assert any([line == expected for line in lines]) + assert any(line == expected for line in lines) def test_build_gzip_encoding(self): base_dir = tempfile.mkdtemp() diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py index d1622fa8..ae71a57b 100644 --- a/tests/integration/api_client_test.py +++ b/tests/integration/api_client_test.py @@ -47,7 +47,7 @@ class ConnectionTimeoutTest(unittest.TestCase): # This call isn't supposed to complete, and it should fail fast. try: res = self.client.inspect_container('id') - except: # noqa: E722 + except Exception: pass end = time.time() assert res is None @@ -72,6 +72,4 @@ class UnixconnTest(unittest.TestCase): client.close() del client - assert len(w) == 0, "No warnings produced: {}".format( - w[0].message - ) + assert len(w) == 0, f"No warnings produced: {w[0].message}" diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index f571d191..ecda1d65 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -702,9 +702,7 @@ class ArchiveTest(BaseAPIIntegrationTest): test_file.seek(0) ctnr = self.client.create_container( TEST_IMG, - 'cat {}'.format( - os.path.join('/vol1/', os.path.basename(test_file.name)) - ), + f"cat {os.path.join('/vol1/', os.path.basename(test_file.name))}", volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -862,7 +860,7 @@ class LogsTest(BaseAPIIntegrationTest): exitcode = self.client.wait(id)['StatusCode'] assert exitcode == 0 logs = self.client.logs(id) - assert logs == (snippet + '\n').encode(encoding='ascii') + assert logs == f"{snippet}\n".encode(encoding='ascii') def test_logs_tail_option(self): snippet = '''Line1 @@ -893,7 +891,7 @@ Line2''' exitcode = self.client.wait(id)['StatusCode'] assert exitcode == 0 - assert logs == (snippet + '\n').encode(encoding='ascii') + assert logs == f"{snippet}\n".encode(encoding='ascii') @pytest.mark.timeout(5) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), @@ -914,7 +912,7 @@ Line2''' for chunk in generator: logs += chunk - assert logs == (snippet + '\n').encode(encoding='ascii') + assert logs == f"{snippet}\n".encode(encoding='ascii') def test_logs_with_dict_instead_of_id(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' @@ -927,7 +925,7 @@ Line2''' exitcode = self.client.wait(id)['StatusCode'] assert exitcode == 0 logs = self.client.logs(container) - assert logs == (snippet + '\n').encode(encoding='ascii') + assert logs == f"{snippet}\n".encode(encoding='ascii') def test_logs_with_tail_0(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' @@ -956,7 +954,7 @@ Line2''' logs_until_1 = self.client.logs(container, until=1) assert logs_until_1 == b'' logs_until_now = self.client.logs(container, datetime.now()) - assert logs_until_now == (snippet + '\n').encode(encoding='ascii') + assert logs_until_now == f"{snippet}\n".encode(encoding='ascii') class DiffTest(BaseAPIIntegrationTest): @@ -1122,7 +1120,7 @@ class PortTest(BaseAPIIntegrationTest): ip, host_port = port_binding['HostIp'], port_binding['HostPort'] - port_binding = port if not protocol else port + "/" + protocol + port_binding = port if not protocol else f"{port}/{protocol}" assert ip == port_bindings[port_binding][0] assert host_port == port_bindings[port_binding][1] diff --git a/tests/integration/api_healthcheck_test.py b/tests/integration/api_healthcheck_test.py index c54583b0..9ecdcd86 100644 --- a/tests/integration/api_healthcheck_test.py +++ b/tests/integration/api_healthcheck_test.py @@ -16,7 +16,7 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_shell_command(self): container = self.client.create_container( - TEST_IMG, 'top', healthcheck=dict(test='echo "hello world"')) + TEST_IMG, 'top', healthcheck={'test': 'echo "hello world"'}) self.tmp_containers.append(container) res = self.client.inspect_container(container) @@ -27,12 +27,12 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_passes(self): container = self.client.create_container( - TEST_IMG, 'top', healthcheck=dict( - test="true", - interval=1 * SECOND, - timeout=1 * SECOND, - retries=1, - )) + TEST_IMG, 'top', healthcheck={ + 'test': "true", + 'interval': 1 * SECOND, + 'timeout': 1 * SECOND, + 'retries': 1, + }) self.tmp_containers.append(container) self.client.start(container) wait_on_health_status(self.client, container, "healthy") @@ -40,12 +40,12 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_fails(self): container = self.client.create_container( - TEST_IMG, 'top', healthcheck=dict( - test="false", - interval=1 * SECOND, - timeout=1 * SECOND, - retries=1, - )) + TEST_IMG, 'top', healthcheck={ + 'test': "false", + 'interval': 1 * SECOND, + 'timeout': 1 * SECOND, + 'retries': 1, + }) self.tmp_containers.append(container) self.client.start(container) wait_on_health_status(self.client, container, "unhealthy") @@ -53,14 +53,14 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.29') def test_healthcheck_start_period(self): container = self.client.create_container( - TEST_IMG, 'top', healthcheck=dict( - test="echo 'x' >> /counter.txt && " + TEST_IMG, 'top', healthcheck={ + 'test': "echo 'x' >> /counter.txt && " "test `cat /counter.txt | wc -l` -ge 3", - interval=1 * SECOND, - timeout=1 * SECOND, - retries=1, - start_period=3 * SECOND - ) + 'interval': 1 * SECOND, + 'timeout': 1 * SECOND, + 'retries': 1, + 'start_period': 3 * SECOND + } ) self.tmp_containers.append(container) diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index cb3d6671..7081b53b 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -263,10 +263,8 @@ class ImportImageTest(BaseAPIIntegrationTest): data = self.client.get_image(test_img) assert data output = self.client.load_image(data) - assert any([ - line for line in output - if f'Loaded image: {test_img}' in line.get('stream', '') - ]) + assert any(line for line in output + if f'Loaded image: {test_img}' in line.get('stream', '')) @contextlib.contextmanager def temporary_http_file_server(self, stream): diff --git a/tests/integration/api_plugin_test.py b/tests/integration/api_plugin_test.py index 3ecb0283..3f163390 100644 --- a/tests/integration/api_plugin_test.py +++ b/tests/integration/api_plugin_test.py @@ -39,7 +39,7 @@ class PluginTest(BaseAPIIntegrationTest): return self.client.inspect_plugin(plugin_name) except docker.errors.NotFound: prv = self.client.plugin_privileges(plugin_name) - for d in self.client.pull_plugin(plugin_name, prv): + for _d in self.client.pull_plugin(plugin_name, prv): pass return self.client.inspect_plugin(plugin_name) @@ -118,7 +118,7 @@ class PluginTest(BaseAPIIntegrationTest): pass prv = self.client.plugin_privileges(SSHFS) - logs = [d for d in self.client.pull_plugin(SSHFS, prv)] + logs = list(self.client.pull_plugin(SSHFS, prv)) assert filter(lambda x: x['status'] == 'Download complete', logs) assert self.client.inspect_plugin(SSHFS) assert self.client.enable_plugin(SSHFS) @@ -128,7 +128,7 @@ class PluginTest(BaseAPIIntegrationTest): pl_data = self.ensure_plugin_installed(SSHFS) assert pl_data['Enabled'] is False prv = self.client.plugin_privileges(SSHFS) - logs = [d for d in self.client.upgrade_plugin(SSHFS, SSHFS, prv)] + logs = list(self.client.upgrade_plugin(SSHFS, SSHFS, prv)) assert filter(lambda x: x['status'] == 'Download complete', logs) assert self.client.inspect_plugin(SSHFS) assert self.client.enable_plugin(SSHFS) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index cffe12fc..b4125d24 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -127,11 +127,11 @@ class SwarmTest(BaseAPIIntegrationTest): assert self.init_swarm() with pytest.raises(docker.errors.APIError) as exc_info: self.client.leave_swarm() - exc_info.value.response.status_code == 500 + assert exc_info.value.response.status_code == 503 assert self.client.leave_swarm(force=True) with pytest.raises(docker.errors.APIError) as exc_info: self.client.inspect_swarm() - exc_info.value.response.status_code == 406 + assert exc_info.value.response.status_code == 503 assert self.client.leave_swarm(force=True) @requires_api_version('1.24') diff --git a/tests/integration/base.py b/tests/integration/base.py index 031079c9..e4073757 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -103,8 +103,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): if exitcode != 0: output = self.client.logs(container) raise Exception( - "Container exited with code {}:\n{}" - .format(exitcode, output)) + f"Container exited with code {exitcode}:\n{output}") return container diff --git a/tests/integration/context_api_test.py b/tests/integration/context_api_test.py index a2a12a5c..1a13f281 100644 --- a/tests/integration/context_api_test.py +++ b/tests/integration/context_api_test.py @@ -29,7 +29,7 @@ class ContextLifecycleTest(BaseAPIIntegrationTest): "test", tls_cfg=docker_tls) # check for a context 'test' in the context store - assert any([ctx.Name == "test" for ctx in ContextAPI.contexts()]) + assert any(ctx.Name == "test" for ctx in ContextAPI.contexts()) # retrieve a context object for 'test' assert ContextAPI.get_context("test") # remove context diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py index 16f4d60a..82ea8474 100644 --- a/tests/integration/credentials/store_test.py +++ b/tests/integration/credentials/store_test.py @@ -22,7 +22,7 @@ class TestStore: def setup_method(self): self.tmp_keys = [] if sys.platform.startswith('linux'): - if shutil.which('docker-credential-' + DEFAULT_LINUX_STORE): + if shutil.which(f"docker-credential-{DEFAULT_LINUX_STORE}"): self.store = Store(DEFAULT_LINUX_STORE) elif shutil.which('docker-credential-pass'): self.store = Store('pass') diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py index acf018d2..46440397 100644 --- a/tests/integration/credentials/utils_test.py +++ b/tests/integration/credentials/utils_test.py @@ -7,7 +7,7 @@ from unittest import mock @mock.patch.dict(os.environ) def test_create_environment_dict(): base = {'FOO': 'bar', 'BAZ': 'foobar'} - os.environ = base + os.environ = base # noqa: B003 assert create_environment_dict({'FOO': 'baz'}) == { 'FOO': 'baz', 'BAZ': 'foobar', } diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index eac4c979..4d33e622 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -49,7 +49,7 @@ class ContainerCollectionTest(BaseIntegrationTest): container = client.containers.run( "alpine", "sh -c 'echo \"hello\" > /insidecontainer/test'", - volumes=["%s:/insidecontainer" % path], + volumes=[f"{path}:/insidecontainer"], detach=True ) self.tmp_containers.append(container.id) @@ -58,7 +58,7 @@ class ContainerCollectionTest(BaseIntegrationTest): name = "container_volume_test" out = client.containers.run( "alpine", "cat /insidecontainer/test", - volumes=["%s:/insidecontainer" % path], + volumes=[f"{path}:/insidecontainer"], name=name ) self.tmp_containers.append(name) @@ -109,7 +109,7 @@ class ContainerCollectionTest(BaseIntegrationTest): out = client.containers.run( "alpine", "echo hello", - log_config=dict(type='none') + log_config={"type": 'none'} ) assert out is None @@ -118,7 +118,7 @@ class ContainerCollectionTest(BaseIntegrationTest): out = client.containers.run( "alpine", "echo hello", - log_config=dict(type='json-file') + log_config={"type": 'json-file'} ) assert out == b'hello\n' @@ -150,7 +150,7 @@ class ContainerCollectionTest(BaseIntegrationTest): out = client.containers.run( 'alpine', 'sh -c "echo hello && echo world"', stream=True ) - logs = [line for line in out] + logs = list(out) assert logs[0] == b'hello\n' assert logs[1] == b'world\n' @@ -165,7 +165,7 @@ class ContainerCollectionTest(BaseIntegrationTest): threading.Timer(1, out.close).start() - logs = [line for line in out] + logs = list(out) assert len(logs) == 2 assert logs[0] == b'hello\n' @@ -221,7 +221,7 @@ class ContainerCollectionTest(BaseIntegrationTest): assert container.status == 'running' assert container.image == client.images.get('alpine') with pytest.raises(docker.errors.DockerException): - container.labels + _ = container.labels container.kill() container.remove() diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 94aa2010..d335da4a 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -88,9 +88,7 @@ class ImageCollectionTest(BaseIntegrationTest): client = docker.from_env(version=TEST_API_VERSION) images = client.images.pull('hello-world', all_tags=True) assert len(images) >= 1 - assert any([ - 'hello-world:latest' in img.attrs['RepoTags'] for img in images - ]) + assert any('hello-world:latest' in img.attrs['RepoTags'] for img in images) def test_load_error(self): client = docker.from_env(version=TEST_API_VERSION) diff --git a/tests/integration/models_networks_test.py b/tests/integration/models_networks_test.py index 08d7ad29..f4052e4b 100644 --- a/tests/integration/models_networks_test.py +++ b/tests/integration/models_networks_test.py @@ -59,11 +59,11 @@ class NetworkTest(BaseIntegrationTest): network.connect(container) container.start() assert client.networks.get(network.id).containers == [container] - network_containers = list( + network_containers = [ c for net in client.networks.list(ids=[network.id], greedy=True) for c in net.containers - ) + ] assert network_containers == [container] network.disconnect(container) assert network.containers == [] diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py index 10313a63..7d2b228c 100644 --- a/tests/integration/regression_test.py +++ b/tests/integration/regression_test.py @@ -12,7 +12,7 @@ class TestRegressions(BaseAPIIntegrationTest): def test_443_handle_nonchunked_response_in_stream(self): dfile = io.BytesIO() with pytest.raises(docker.errors.APIError) as exc: - for line in self.client.build(fileobj=dfile, tag="a/b/c"): + for _line in self.client.build(fileobj=dfile, tag="a/b/c"): pass assert exc.value.is_error() dfile.close() diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py index ef48e12e..d060f465 100644 --- a/tests/ssh/api_build_test.py +++ b/tests/ssh/api_build_test.py @@ -124,7 +124,7 @@ class BuildTest(BaseAPIIntegrationTest): path=base_dir, tag=tag, ) - for chunk in stream: + for _chunk in stream: pass c = self.client.create_container(tag, ['find', '/test', '-type', 'f']) @@ -134,7 +134,7 @@ class BuildTest(BaseAPIIntegrationTest): logs = logs.decode('utf-8') - assert sorted(list(filter(None, logs.split('\n')))) == sorted([ + assert sorted(filter(None, logs.split('\n'))) == sorted([ '/test/#file.txt', '/test/ignored/subdir/excepted-file', '/test/not-ignored' @@ -151,7 +151,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag='buildargs', buildargs={'test': 'OK'} ) self.tmp_imgs.append('buildargs') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('buildargs') @@ -171,7 +171,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag=tag, shmsize=shmsize ) self.tmp_imgs.append(tag) - for chunk in stream: + for _chunk in stream: pass # There is currently no way to get the shmsize @@ -189,7 +189,7 @@ class BuildTest(BaseAPIIntegrationTest): isolation='default' ) - for chunk in stream: + for _chunk in stream: pass @requires_api_version('1.23') @@ -204,7 +204,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag='labels', labels=labels ) self.tmp_imgs.append('labels') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('labels') @@ -221,7 +221,7 @@ class BuildTest(BaseAPIIntegrationTest): stream = self.client.build(fileobj=script, tag='build1') self.tmp_imgs.append('build1') - for chunk in stream: + for _chunk in stream: pass stream = self.client.build( @@ -262,7 +262,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, target='first', tag='build1' ) self.tmp_imgs.append('build1') - for chunk in stream: + for _chunk in stream: pass info = self.client.inspect_image('build1') @@ -291,7 +291,7 @@ class BuildTest(BaseAPIIntegrationTest): ) self.tmp_imgs.append('dockerpytest_customnetbuild') - for chunk in stream: + for _chunk in stream: pass assert self.client.inspect_image('dockerpytest_customnetbuild') @@ -303,7 +303,7 @@ class BuildTest(BaseAPIIntegrationTest): ) self.tmp_imgs.append('dockerpytest_nonebuild') - logs = [chunk for chunk in stream] + logs = list(stream) assert 'errorDetail' in logs[-1] assert logs[-1]['errorDetail']['code'] == 1 @@ -356,7 +356,7 @@ class BuildTest(BaseAPIIntegrationTest): fileobj=script, tag=tag, squash=squash ) self.tmp_imgs.append(tag) - for chunk in stream: + for _chunk in stream: pass return self.client.inspect_image(tag) @@ -383,7 +383,7 @@ class BuildTest(BaseAPIIntegrationTest): expected = '{0}{2}\n{1}'.format( control_chars[0], control_chars[1], snippet ) - assert any([line == expected for line in lines]) + assert any(line == expected for line in lines) def test_build_gzip_encoding(self): base_dir = tempfile.mkdtemp() diff --git a/tests/ssh/base.py b/tests/ssh/base.py index 4b91add4..d6ff130a 100644 --- a/tests/ssh/base.py +++ b/tests/ssh/base.py @@ -110,8 +110,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): if exitcode != 0: output = self.client.logs(container) raise Exception( - "Container exited with code {}:\n{}" - .format(exitcode, output)) + f"Container exited with code {exitcode}:\n{output}") return container diff --git a/tests/unit/api_build_test.py b/tests/unit/api_build_test.py index 7e07a269..cbecd1e5 100644 --- a/tests/unit/api_build_test.py +++ b/tests/unit/api_build_test.py @@ -89,7 +89,7 @@ class BuildTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'build', + f"{url_prefix}build", stream=True, data=None, headers=expected_headers, @@ -193,10 +193,10 @@ class BuildTest(BaseAPIClientTest): 'foo/Dockerfile.foo', None ) assert process_dockerfile( - '../Dockerfile', pre(base + '\\foo') + '../Dockerfile', pre(f"{base}\\foo") )[1] is not None assert process_dockerfile( - '../baz/Dockerfile.baz', pre(base + '/baz') + '../baz/Dockerfile.baz', pre(f"{base}/baz") ) == ('../baz/Dockerfile.baz', None) def test_process_dockerfile(self): @@ -218,8 +218,8 @@ class BuildTest(BaseAPIClientTest): 'foo/Dockerfile.foo', None ) assert process_dockerfile( - '../Dockerfile', base + '/foo' + '../Dockerfile', f"{base}/foo" )[1] is not None - assert process_dockerfile('../baz/Dockerfile.baz', base + '/baz') == ( + assert process_dockerfile('../baz/Dockerfile.baz', f"{base}/baz") == ( '../baz/Dockerfile.baz', None ) diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py index 45042508..1760239f 100644 --- a/tests/unit/api_exec_test.py +++ b/tests/unit/api_exec_test.py @@ -32,9 +32,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_start(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{}/start'.format( - fake_api.FAKE_EXEC_ID - ) + assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start" assert json.loads(args[1]['data']) == { 'Tty': False, @@ -51,9 +49,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{}/start'.format( - fake_api.FAKE_EXEC_ID - ) + assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/start" assert json.loads(args[1]['data']) == { 'Tty': False, @@ -68,16 +64,14 @@ class ExecTest(BaseAPIClientTest): self.client.exec_inspect(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{}/json'.format( - fake_api.FAKE_EXEC_ID - ) + assert args[0][1] == f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/json" def test_exec_resize(self): self.client.exec_resize(fake_api.FAKE_EXEC_ID, height=20, width=60) fake_request.assert_called_with( 'POST', - url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize', + f"{url_prefix}exec/{fake_api.FAKE_EXEC_ID}/resize", params={'h': 20, 'w': 60}, timeout=DEFAULT_TIMEOUT_SECONDS ) diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index b3428aa1..22b27fe0 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -12,7 +12,7 @@ from .api_test import ( class ImageTest(BaseAPIClientTest): def test_image_viz(self): - with pytest.raises(Exception): + with pytest.raises(Exception): # noqa: B017 self.client.images('busybox', viz=True) self.fail('Viz output should not be supported!') @@ -21,7 +21,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/json', + f"{url_prefix}images/json", params={'only_ids': 0, 'all': 1}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -31,7 +31,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/json', + f"{url_prefix}images/json", params={'only_ids': 0, 'all': 0, 'filters': '{"reference": ["foo:bar"]}'}, timeout=DEFAULT_TIMEOUT_SECONDS @@ -42,7 +42,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/json', + f"{url_prefix}images/json", params={'only_ids': 1, 'all': 1}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -52,7 +52,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/json', + f"{url_prefix}images/json", params={'only_ids': 1, 'all': 0}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -62,7 +62,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/json', + f"{url_prefix}images/json", params={'only_ids': 0, 'all': 0, 'filters': '{"dangling": ["true"]}'}, timeout=DEFAULT_TIMEOUT_SECONDS @@ -72,7 +72,7 @@ class ImageTest(BaseAPIClientTest): self.client.pull('joffrey/test001') args = fake_request.call_args - assert args[0][1] == url_prefix + 'images/create' + assert args[0][1] == f"{url_prefix}images/create" assert args[1]['params'] == { 'tag': 'latest', 'fromImage': 'joffrey/test001' } @@ -82,7 +82,7 @@ class ImageTest(BaseAPIClientTest): self.client.pull('joffrey/test001', stream=True) args = fake_request.call_args - assert args[0][1] == url_prefix + 'images/create' + assert args[0][1] == f"{url_prefix}images/create" assert args[1]['params'] == { 'tag': 'latest', 'fromImage': 'joffrey/test001' } @@ -93,7 +93,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'commit', + f"{url_prefix}commit", data='{}', headers={'Content-Type': 'application/json'}, params={ @@ -113,7 +113,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID, + f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}", params={'force': False, 'noprune': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -123,7 +123,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/test_image/history', + f"{url_prefix}images/test_image/history", timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -136,7 +136,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/create', + f"{url_prefix}images/create", params={ 'repo': fake_api.FAKE_REPO_NAME, 'tag': fake_api.FAKE_TAG_NAME, @@ -157,7 +157,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/create', + f"{url_prefix}images/create", params={ 'repo': fake_api.FAKE_REPO_NAME, 'tag': fake_api.FAKE_TAG_NAME, @@ -179,7 +179,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/create', + f"{url_prefix}images/create", params={ 'repo': fake_api.FAKE_REPO_NAME, 'tag': fake_api.FAKE_TAG_NAME, @@ -194,7 +194,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/test_image/json', + f"{url_prefix}images/test_image/json", timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -212,7 +212,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/test_image/push', + f"{url_prefix}images/test_image/push", params={ 'tag': None }, @@ -231,7 +231,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/test_image/push', + f"{url_prefix}images/test_image/push", params={ 'tag': fake_api.FAKE_TAG_NAME, }, @@ -255,7 +255,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/test_image/push', + f"{url_prefix}images/test_image/push", params={ 'tag': fake_api.FAKE_TAG_NAME, }, @@ -273,7 +273,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/test_image/push', + f"{url_prefix}images/test_image/push", params={ 'tag': None }, @@ -288,7 +288,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', + f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag", params={ 'tag': None, 'repo': 'repo', @@ -306,7 +306,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', + f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag", params={ 'tag': 'tag', 'repo': 'repo', @@ -321,7 +321,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', + f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/tag", params={ 'tag': None, 'repo': 'repo', @@ -335,7 +335,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get', + f"{url_prefix}images/{fake_api.FAKE_IMAGE_ID}/get", stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -345,7 +345,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/load', + f"{url_prefix}images/load", data='Byte Stream....', stream=True, params={}, @@ -357,7 +357,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/load', + f"{url_prefix}images/load", data='Byte Stream....', stream=True, params={'quiet': True}, diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index 8afab737..d3daa44c 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -28,7 +28,7 @@ class NetworkTest(BaseAPIClientTest): with mock.patch('docker.api.client.APIClient.get', get): assert self.client.networks() == networks - assert get.call_args[0][0] == url_prefix + 'networks' + assert get.call_args[0][0] == f"{url_prefix}networks" filters = json.loads(get.call_args[1]['params']['filters']) assert not filters @@ -54,7 +54,7 @@ class NetworkTest(BaseAPIClientTest): result = self.client.create_network('foo') assert result == network_data - assert post.call_args[0][0] == url_prefix + 'networks/create' + assert post.call_args[0][0] == f"{url_prefix}networks/create" assert json.loads(post.call_args[1]['data']) == {"Name": "foo"} @@ -97,7 +97,7 @@ class NetworkTest(BaseAPIClientTest): self.client.remove_network(network_id) args = delete.call_args - assert args[0][0] == url_prefix + f'networks/{network_id}' + assert args[0][0] == f"{url_prefix}networks/{network_id}" def test_inspect_network(self): network_id = 'abc12345' @@ -117,7 +117,7 @@ class NetworkTest(BaseAPIClientTest): assert result == network_data args = get.call_args - assert args[0][0] == url_prefix + f'networks/{network_id}' + assert args[0][0] == f"{url_prefix}networks/{network_id}" def test_connect_container_to_network(self): network_id = 'abc12345' @@ -135,7 +135,7 @@ class NetworkTest(BaseAPIClientTest): ) assert post.call_args[0][0] == ( - url_prefix + f'networks/{network_id}/connect' + f"{url_prefix}networks/{network_id}/connect" ) assert json.loads(post.call_args[1]['data']) == { @@ -158,7 +158,7 @@ class NetworkTest(BaseAPIClientTest): container={'Id': container_id}, net_id=network_id) assert post.call_args[0][0] == ( - url_prefix + f'networks/{network_id}/disconnect' + f"{url_prefix}networks/{network_id}/disconnect" ) assert json.loads(post.call_args[1]['data']) == { 'Container': container_id diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index 4b6099c9..7bc2ea8c 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -86,9 +86,7 @@ def fake_read_from_socket(self, response, stream, tty=False, demux=False): url_base = f'{fake_api.prefix}/' -url_prefix = '{}v{}/'.format( - url_base, - docker.constants.DEFAULT_DOCKER_API_VERSION) +url_prefix = f'{url_base}v{docker.constants.DEFAULT_DOCKER_API_VERSION}/' class BaseAPIClientTest(unittest.TestCase): @@ -130,22 +128,18 @@ class DockerApiTest(BaseAPIClientTest): def test_url_valid_resource(self): url = self.client._url('/hello/{0}/world', 'somename') - assert url == '{}{}'.format(url_prefix, 'hello/somename/world') + assert url == f"{url_prefix}hello/somename/world" url = self.client._url( '/hello/{0}/world/{1}', 'somename', 'someothername' ) - assert url == '{}{}'.format( - url_prefix, 'hello/somename/world/someothername' - ) + assert url == f"{url_prefix}hello/somename/world/someothername" url = self.client._url('/hello/{0}/world', 'some?name') - assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world') + assert url == f"{url_prefix}hello/some%3Fname/world" url = self.client._url("/images/{0}/push", "localhost:5000/image") - assert url == '{}{}'.format( - url_prefix, 'images/localhost:5000/image/push' - ) + assert url == f"{url_prefix}images/localhost:5000/image/push" def test_url_invalid_resource(self): with pytest.raises(ValueError): @@ -153,20 +147,20 @@ class DockerApiTest(BaseAPIClientTest): def test_url_no_resource(self): url = self.client._url('/simple') - assert url == '{}{}'.format(url_prefix, 'simple') + assert url == f"{url_prefix}simple" def test_url_unversioned_api(self): url = self.client._url( '/hello/{0}/world', 'somename', versioned_api=False ) - assert url == '{}{}'.format(url_base, 'hello/somename/world') + assert url == f"{url_base}hello/somename/world" def test_version(self): self.client.version() fake_request.assert_called_with( 'GET', - url_prefix + 'version', + f"{url_prefix}version", timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -175,7 +169,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_base + 'version', + f"{url_base}version", timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -194,7 +188,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'info', + f"{url_prefix}info", timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -203,7 +197,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/search', + f"{url_prefix}images/search", params={'term': 'busybox'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -212,7 +206,7 @@ class DockerApiTest(BaseAPIClientTest): self.client.login('sakuya', 'izayoi') args = fake_request.call_args assert args[0][0] == 'POST' - assert args[0][1] == url_prefix + 'auth' + assert args[0][1] == f"{url_prefix}auth" assert json.loads(args[1]['data']) == { 'username': 'sakuya', 'password': 'izayoi' } @@ -229,7 +223,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'events', + f"{url_prefix}events", params={'since': None, 'until': None, 'filters': None}, stream=True, timeout=None @@ -245,7 +239,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'events', + f"{url_prefix}events", params={ 'since': ts - 10, 'until': ts + 10, @@ -264,7 +258,7 @@ class DockerApiTest(BaseAPIClientTest): expected_filters = docker.utils.convert_filters(filters) fake_request.assert_called_with( 'GET', - url_prefix + 'events', + f"{url_prefix}events", params={ 'since': None, 'until': None, @@ -318,7 +312,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID, + f"{url_prefix}containers/{fake_api.FAKE_CONTAINER_ID}", params={'v': False, 'link': True, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -332,15 +326,15 @@ class DockerApiTest(BaseAPIClientTest): self.client.create_host_config(security_opt='wrong') def test_stream_helper_decoding(self): - status_code, content = fake_api.fake_responses[url_prefix + 'events']() + status_code, content = fake_api.fake_responses[f"{url_prefix}events"]() content_str = json.dumps(content) content_str = content_str.encode('utf-8') body = io.BytesIO(content_str) # mock a stream interface raw_resp = urllib3.HTTPResponse(body=body) - setattr(raw_resp._fp, 'chunked', True) - setattr(raw_resp._fp, 'chunk_left', len(body.getvalue()) - 1) + raw_resp._fp.chunked = True + raw_resp._fp.chunk_left = len(body.getvalue()) - 1 # pass `decode=False` to the helper raw_resp._fp.seek(0) @@ -355,7 +349,7 @@ class DockerApiTest(BaseAPIClientTest): assert result == content # non-chunked response, pass `decode=False` to the helper - setattr(raw_resp._fp, 'chunked', False) + raw_resp._fp.chunked = False raw_resp._fp.seek(0) resp = response(status_code=status_code, content=content, raw=raw_resp) result = next(self.client._stream_helper(resp)) @@ -443,7 +437,7 @@ class UnixSocketStreamTest(unittest.TestCase): lines = [] for i in range(0, 50): line = str(i).encode() - lines += [('%x' % len(line)).encode(), line] + lines += [f'{len(line):x}'.encode(), line] lines.append(b'0') lines.append(b'') @@ -454,7 +448,7 @@ class UnixSocketStreamTest(unittest.TestCase): ) + b'\r\n'.join(lines) with APIClient( - base_url="http+unix://" + self.socket_file, + base_url=f"http+unix://{self.socket_file}", version=DEFAULT_DOCKER_API_VERSION) as client: for i in range(5): try: @@ -490,8 +484,7 @@ class TCPSocketStreamTest(unittest.TestCase): cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread.daemon = True cls.thread.start() - cls.address = 'http://{}:{}'.format( - socket.gethostname(), cls.server.server_address[1]) + cls.address = f'http://{socket.gethostname()}:{cls.server.server_address[1]}' @classmethod def teardown_class(cls): @@ -588,7 +581,7 @@ class TCPSocketStreamTest(unittest.TestCase): def test_read_from_socket_no_stream_no_tty(self): res = self.request(stream=False, tty=False, demux=False) - res == self.stdout_data + self.stderr_data + assert res == self.stdout_data + self.stderr_data def test_read_from_socket_no_stream_no_tty_demux(self): res = self.request(stream=False, tty=False, demux=True) @@ -600,7 +593,7 @@ class UserAgentTest(unittest.TestCase): self.patcher = mock.patch.object( APIClient, 'send', - return_value=fake_resp("GET", "%s/version" % fake_api.prefix) + return_value=fake_resp("GET", f"{fake_api.prefix}/version") ) self.mock_send = self.patcher.start() @@ -613,7 +606,7 @@ class UserAgentTest(unittest.TestCase): assert self.mock_send.call_count == 1 headers = self.mock_send.call_args[0][0].headers - expected = 'docker-sdk-python/%s' % docker.__version__ + expected = f'docker-sdk-python/{docker.__version__}' assert headers['User-Agent'] == expected def test_custom_user_agent(self): diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py index a8d9193f..0a97ca51 100644 --- a/tests/unit/api_volume_test.py +++ b/tests/unit/api_volume_test.py @@ -14,7 +14,7 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'GET' - assert args[0][1] == url_prefix + 'volumes' + assert args[0][1] == f"{url_prefix}volumes" def test_list_volumes_and_filters(self): volumes = self.client.volumes(filters={'dangling': True}) @@ -23,7 +23,7 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'GET' - assert args[0][1] == url_prefix + 'volumes' + assert args[0][1] == f"{url_prefix}volumes" assert args[1] == {'params': {'filters': '{"dangling": ["true"]}'}, 'timeout': 60} @@ -37,7 +37,7 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'POST' - assert args[0][1] == url_prefix + 'volumes/create' + assert args[0][1] == f"{url_prefix}volumes/create" assert json.loads(args[1]['data']) == {'Name': name} @requires_api_version('1.23') @@ -63,7 +63,7 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'POST' - assert args[0][1] == url_prefix + 'volumes/create' + assert args[0][1] == f"{url_prefix}volumes/create" data = json.loads(args[1]['data']) assert 'Driver' in data assert data['Driver'] == driver_name diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index dd5b5f8b..0ed890fd 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -290,9 +290,10 @@ class LoadConfigTest(unittest.TestCase): folder = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, folder) - dockercfg_path = os.path.join(folder, - '.{}.dockercfg'.format( - random.randrange(100000))) + dockercfg_path = os.path.join( + folder, + f'.{random.randrange(100000)}.dockercfg', + ) registry = 'https://your.private.registry.io' auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') config = { @@ -777,8 +778,8 @@ class InMemoryStore(credentials.Store): def get(self, server): try: return self.__store[server] - except KeyError: - raise credentials.errors.CredentialsNotFound() + except KeyError as ke: + raise credentials.errors.CredentialsNotFound() from ke def store(self, server, username, secret): self.__store[server] = { diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index e7c7eec8..7012b212 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -85,10 +85,7 @@ class ClientTest(unittest.TestCase): mock_obj.return_value.urlopen.return_value.status = 200 client.ping() - base_url = "{base_url}/v{version}/_ping".format( - base_url=client.api.base_url, - version=client.api._version - ) + base_url = f"{client.api.base_url}/v{client.api._version}/_ping" mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", @@ -124,10 +121,7 @@ class ClientTest(unittest.TestCase): mock_obj.return_value.urlopen.return_value.status = 200 client.ping() - base_url = "{base_url}/v{version}/_ping".format( - base_url=client.api.base_url, - version=client.api._version - ) + base_url = f"{client.api.base_url}/v{client.api._version}/_ping" mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", @@ -159,7 +153,8 @@ class FromEnvTest(unittest.TestCase): self.os_environ = os.environ.copy() def tearDown(self): - os.environ = self.os_environ + os.environ.clear() + os.environ.update(self.os_environ) def test_from_env(self): """Test that environment variables are passed through to @@ -198,10 +193,7 @@ class FromEnvTest(unittest.TestCase): mock_obj.return_value.urlopen.return_value.status = 200 client.ping() - base_url = "{base_url}/v{version}/_ping".format( - base_url=client.api.base_url, - version=client.api._version - ) + base_url = f"{client.api.base_url}/v{client.api._version}/_ping" mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", @@ -235,10 +227,7 @@ class FromEnvTest(unittest.TestCase): mock_obj.return_value.urlopen.return_value.status = 200 client.ping() - base_url = "{base_url}/v{version}/_ping".format( - base_url=client.api.base_url, - version=client.api._version - ) + base_url = f"{client.api.base_url}/v{client.api._version}/_ping" mock_obj.assert_called_once_with(base_url, "/var/run/docker.sock", diff --git a/tests/unit/context_test.py b/tests/unit/context_test.py index 6d6d6726..25f0d8c6 100644 --- a/tests/unit/context_test.py +++ b/tests/unit/context_test.py @@ -13,7 +13,7 @@ class BaseContextTest(unittest.TestCase): ) def test_url_compatibility_on_linux(self): c = Context("test") - assert c.Host == DEFAULT_UNIX_SOCKET.strip("http+") + assert c.Host == DEFAULT_UNIX_SOCKET[5:] @pytest.mark.skipif( not IS_WINDOWS_PLATFORM, reason='Windows specific path check' @@ -45,5 +45,7 @@ class BaseContextTest(unittest.TestCase): ctx = ContextAPI.inspect_context() assert ctx["Name"] == "default" assert ctx["Metadata"]["StackOrchestrator"] == "swarm" - assert ctx["Endpoints"]["docker"]["Host"] in [ - DEFAULT_NPIPE, DEFAULT_UNIX_SOCKET.strip("http+")] + assert ctx["Endpoints"]["docker"]["Host"] in ( + DEFAULT_NPIPE, + DEFAULT_UNIX_SOCKET[5:], + ) diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index 6acfb64b..87d89275 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -4,10 +4,10 @@ from . import fake_stat CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}' -FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' # noqa: E501 +FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501 -FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' # noqa: E501 -FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' # noqa: E501 +FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' +FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' FAKE_IMAGE_NAME = 'test_image' FAKE_TARBALL_PATH = '/path/to/tarball' FAKE_REPO_NAME = 'repo' @@ -617,17 +617,11 @@ fake_responses = { get_fake_volume_list, (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'): get_fake_volume, - ('{1}/{0}/volumes/{2}'.format( - CURRENT_VERSION, prefix, FAKE_VOLUME_NAME - ), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'GET'): get_fake_volume, - ('{1}/{0}/volumes/{2}'.format( - CURRENT_VERSION, prefix, FAKE_VOLUME_NAME - ), 'DELETE'): + (f'{prefix}/{CURRENT_VERSION}/volumes/{FAKE_VOLUME_NAME}', 'DELETE'): fake_remove_volume, - ('{1}/{0}/nodes/{2}/update?version=1'.format( - CURRENT_VERSION, prefix, FAKE_NODE_ID - ), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/nodes/{FAKE_NODE_ID}/update?version=1', 'POST'): post_fake_update_node, (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'): post_fake_join_swarm, @@ -635,21 +629,13 @@ fake_responses = { get_fake_network_list, (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'): post_fake_network, - ('{1}/{0}/networks/{2}'.format( - CURRENT_VERSION, prefix, FAKE_NETWORK_ID - ), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'GET'): get_fake_network, - ('{1}/{0}/networks/{2}'.format( - CURRENT_VERSION, prefix, FAKE_NETWORK_ID - ), 'DELETE'): + (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}', 'DELETE'): delete_fake_network, - ('{1}/{0}/networks/{2}/connect'.format( - CURRENT_VERSION, prefix, FAKE_NETWORK_ID - ), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/connect', 'POST'): post_fake_network_connect, - ('{1}/{0}/networks/{2}/disconnect'.format( - CURRENT_VERSION, prefix, FAKE_NETWORK_ID - ), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/networks/{FAKE_NETWORK_ID}/disconnect', 'POST'): post_fake_network_disconnect, f'{prefix}/{CURRENT_VERSION}/secrets/create': post_fake_secret, diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index 0592af5e..2eabd268 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -31,77 +31,77 @@ class ContainerCollectionTest(unittest.TestCase): ) def test_create_container_args(self): - create_kwargs = _create_container_args(dict( - image='alpine', - command='echo hello world', - blkio_weight_device=[{'Path': 'foo', 'Weight': 3}], - blkio_weight=2, - cap_add=['foo'], - cap_drop=['bar'], - cgroup_parent='foobar', - cgroupns='host', - cpu_period=1, - cpu_quota=2, - cpu_shares=5, - cpuset_cpus='0-3', - detach=False, - device_read_bps=[{'Path': 'foo', 'Rate': 3}], - device_read_iops=[{'Path': 'foo', 'Rate': 3}], - device_write_bps=[{'Path': 'foo', 'Rate': 3}], - device_write_iops=[{'Path': 'foo', 'Rate': 3}], - devices=['/dev/sda:/dev/xvda:rwm'], - dns=['8.8.8.8'], - domainname='example.com', - dns_opt=['foo'], - dns_search=['example.com'], - entrypoint='/bin/sh', - environment={'FOO': 'BAR'}, - extra_hosts={'foo': '1.2.3.4'}, - group_add=['blah'], - ipc_mode='foo', - kernel_memory=123, - labels={'key': 'value'}, - links={'foo': 'bar'}, - log_config={'Type': 'json-file', 'Config': {}}, - lxc_conf={'foo': 'bar'}, - healthcheck={'test': 'true'}, - hostname='somehost', - mac_address='abc123', - mem_limit=123, - mem_reservation=123, - mem_swappiness=2, - memswap_limit=456, - name='somename', - network_disabled=False, - network='foo', - network_driver_opt={'key1': 'a'}, - oom_kill_disable=True, - oom_score_adj=5, - pid_mode='host', - pids_limit=500, - platform='linux', - ports={ + create_kwargs = _create_container_args({ + "image": 'alpine', + "command": 'echo hello world', + "blkio_weight_device": [{'Path': 'foo', 'Weight': 3}], + "blkio_weight": 2, + "cap_add": ['foo'], + "cap_drop": ['bar'], + "cgroup_parent": 'foobar', + "cgroupns": 'host', + "cpu_period": 1, + "cpu_quota": 2, + "cpu_shares": 5, + "cpuset_cpus": '0-3', + "detach": False, + "device_read_bps": [{'Path': 'foo', 'Rate': 3}], + "device_read_iops": [{'Path': 'foo', 'Rate': 3}], + "device_write_bps": [{'Path': 'foo', 'Rate': 3}], + "device_write_iops": [{'Path': 'foo', 'Rate': 3}], + "devices": ['/dev/sda:/dev/xvda:rwm'], + "dns": ['8.8.8.8'], + "domainname": 'example.com', + "dns_opt": ['foo'], + "dns_search": ['example.com'], + "entrypoint": '/bin/sh', + "environment": {'FOO': 'BAR'}, + "extra_hosts": {'foo': '1.2.3.4'}, + "group_add": ['blah'], + "ipc_mode": 'foo', + "kernel_memory": 123, + "labels": {'key': 'value'}, + "links": {'foo': 'bar'}, + "log_config": {'Type': 'json-file', 'Config': {}}, + "lxc_conf": {'foo': 'bar'}, + "healthcheck": {'test': 'true'}, + "hostname": 'somehost', + "mac_address": 'abc123', + "mem_limit": 123, + "mem_reservation": 123, + "mem_swappiness": 2, + "memswap_limit": 456, + "name": 'somename', + "network_disabled": False, + "network": 'foo', + "network_driver_opt": {'key1': 'a'}, + "oom_kill_disable": True, + "oom_score_adj": 5, + "pid_mode": 'host', + "pids_limit": 500, + "platform": 'linux', + "ports": { 1111: 4567, 2222: None }, - privileged=True, - publish_all_ports=True, - read_only=True, - restart_policy={'Name': 'always'}, - security_opt=['blah'], - shm_size=123, - stdin_open=True, - stop_signal=9, - sysctls={'foo': 'bar'}, - tmpfs={'/blah': ''}, - tty=True, - ulimits=[{"Name": "nofile", "Soft": 1024, "Hard": 2048}], - user='bob', - userns_mode='host', - uts_mode='host', - version='1.23', - volume_driver='some_driver', - volumes=[ + "privileged": True, + "publish_all_ports": True, + "read_only": True, + "restart_policy": {'Name': 'always'}, + "security_opt": ['blah'], + "shm_size": 123, + "stdin_open": True, + "stop_signal": 9, + "sysctls": {'foo': 'bar'}, + "tmpfs": {'/blah': ''}, + "tty": True, + "ulimits": [{"Name": "nofile", "Soft": 1024, "Hard": 2048}], + "user": 'bob', + "userns_mode": 'host', + "uts_mode": 'host', + "version": '1.23', + "volume_driver": 'some_driver', + "volumes": [ '/home/user1/:/mnt/vol2', '/var/www:/mnt/vol1:ro', 'volumename:/mnt/vol3r', @@ -109,18 +109,18 @@ class ContainerCollectionTest(unittest.TestCase): '/anothervolumewithnohostpath:ro', 'C:\\windows\\path:D:\\hello\\world:rw' ], - volumes_from=['container'], - working_dir='/code' - )) + "volumes_from": ['container'], + "working_dir": '/code' + }) - expected = dict( - image='alpine', - command='echo hello world', - domainname='example.com', - detach=False, - entrypoint='/bin/sh', - environment={'FOO': 'BAR'}, - host_config={ + expected = { + "image": 'alpine', + "command": 'echo hello world', + "domainname": 'example.com', + "detach": False, + "entrypoint": '/bin/sh', + "environment": {'FOO': 'BAR'}, + "host_config": { 'Binds': [ '/home/user1/:/mnt/vol2', '/var/www:/mnt/vol1:ro', @@ -183,20 +183,20 @@ class ContainerCollectionTest(unittest.TestCase): 'VolumeDriver': 'some_driver', 'VolumesFrom': ['container'], }, - healthcheck={'test': 'true'}, - hostname='somehost', - labels={'key': 'value'}, - mac_address='abc123', - name='somename', - network_disabled=False, - networking_config={'foo': {'driver_opt': {'key1': 'a'}}}, - platform='linux', - ports=[('1111', 'tcp'), ('2222', 'tcp')], - stdin_open=True, - stop_signal=9, - tty=True, - user='bob', - volumes=[ + "healthcheck": {'test': 'true'}, + "hostname": 'somehost', + "labels": {'key': 'value'}, + "mac_address": 'abc123', + "name": 'somename', + "network_disabled": False, + "networking_config": {'foo': {'driver_opt': {'key1': 'a'}}}, + "platform": 'linux', + "ports": [('1111', 'tcp'), ('2222', 'tcp')], + "stdin_open": True, + "stop_signal": 9, + "tty": True, + "user": 'bob', + "volumes": [ '/mnt/vol2', '/mnt/vol1', '/mnt/vol3r', @@ -204,8 +204,8 @@ class ContainerCollectionTest(unittest.TestCase): '/anothervolumewithnohostpath', 'D:\\hello\\world' ], - working_dir='/code' - ) + "working_dir": '/code' + } assert create_kwargs == expected diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py index aee1b9e8..3fc7c68c 100644 --- a/tests/unit/swarm_test.py +++ b/tests/unit/swarm_test.py @@ -20,7 +20,7 @@ class SwarmTest(BaseAPIClientTest): ) args = fake_request.call_args assert args[0][1] == ( - url_prefix + 'nodes/24ifsmvkjbyhk/update?version=1' + f"{url_prefix}nodes/24ifsmvkjbyhk/update?version=1" ) assert json.loads(args[1]['data']) == node_spec assert args[1]['headers']['Content-Type'] == 'application/json' @@ -45,7 +45,7 @@ class SwarmTest(BaseAPIClientTest): args = fake_request.call_args - assert (args[0][1] == url_prefix + 'swarm/join') + assert (args[0][1] == f"{url_prefix}swarm/join") assert (json.loads(args[1]['data']) == data) assert (args[1]['headers']['Content-Type'] == 'application/json') @@ -64,6 +64,6 @@ class SwarmTest(BaseAPIClientTest): args = fake_request.call_args - assert (args[0][1] == url_prefix + 'swarm/join') + assert (args[0][1] == f"{url_prefix}swarm/join") assert (json.loads(args[1]['data']) == data) assert (args[1]['headers']['Content-Type'] == 'application/json') diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 12cb7bd6..b47cb0c6 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -59,7 +59,8 @@ class KwargsFromEnvTest(unittest.TestCase): self.os_environ = os.environ.copy() def tearDown(self): - os.environ = self.os_environ + os.environ.clear() + os.environ.update(self.os_environ) def test_kwargs_from_env_empty(self): os.environ.update(DOCKER_HOST='', @@ -486,9 +487,9 @@ class PortsTest(unittest.TestCase): def test_split_port_with_protocol(self): for protocol in ['tcp', 'udp', 'sctp']: internal_port, external_port = split_port( - "127.0.0.1:1000:2000/" + protocol + f"127.0.0.1:1000:2000/{protocol}" ) - assert internal_port == ["2000/" + protocol] + assert internal_port == [f"2000/{protocol}"] assert external_port == [("127.0.0.1", "1000")] def test_split_port_with_host_ip_no_port(self): diff --git a/tox.ini b/tox.ini index 9edc15c5..2028dd39 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{37,38,39,310,311}, flake8 +envlist = py{37,38,39,310,311}, ruff skipsdist=True [testenv] @@ -10,7 +10,7 @@ deps = -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt -[testenv:flake8] -commands = flake8 docker tests setup.py +[testenv:ruff] +commands = ruff docker tests setup.py deps = -r{toxinidir}/test-requirements.txt