From 33b8fd6eecae3a5dc2c9476a409cf894354bf994 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 15:15:40 +0100 Subject: [PATCH 001/211] Xfail test_attach_stream_and_cancel on TLS This test is quite flaky on ssl integration test Signed-off-by: Ulysses Souza --- tests/integration/api_container_test.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 83df3424..558441e0 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1080,7 +1080,6 @@ class KillTest(BaseAPIIntegrationTest): class PortTest(BaseAPIIntegrationTest): def test_port(self): - port_bindings = { '1111': ('127.0.0.1', '4567'), '2222': ('127.0.0.1', '4568') @@ -1260,6 +1259,9 @@ class AttachContainerTest(BaseAPIIntegrationTest): @pytest.mark.timeout(5) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') + @pytest.mark.xfail(condition=os.environ.get('DOCKER_TLS_VERIFY') or + os.environ.get('DOCKER_CERT_PATH'), + reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( BUSYBOX, 'sh -c "echo hello && sleep 60"', From b05bfd7b22dd23e425cbe9838957e0f3460d2417 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 12:07:25 +0100 Subject: [PATCH 002/211] Fix base_url to keep TCP protocol This fix lets the responsability of changing the protocol to `parse_host` afterwards, letting `base_url` with the original value. Signed-off-by: Ulysses Souza --- docker/utils/utils.py | 4 +--- tests/unit/utils_test.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 61e307ad..7819ace4 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -352,9 +352,7 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None): params = {} if host: - params['base_url'] = ( - host.replace('tcp://', 'https://') if enable_tls else host - ) + params['base_url'] = host if not enable_tls: return params diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index a4e9c9c5..ee660a36 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -11,6 +11,7 @@ import unittest from docker.api.client import APIClient +from docker.constants import IS_WINDOWS_PLATFORM from docker.errors import DockerException from docker.utils import ( convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env, @@ -83,15 +84,17 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='1') kwargs = kwargs_from_env(assert_hostname=False) - assert 'https://192.168.59.103:2376' == kwargs['base_url'] + assert 'tcp://192.168.59.103:2376' == kwargs['base_url'] assert 'ca.pem' in kwargs['tls'].ca_cert assert 'cert.pem' in kwargs['tls'].cert[0] assert 'key.pem' in kwargs['tls'].cert[1] assert kwargs['tls'].assert_hostname is False assert kwargs['tls'].verify + + parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) try: client = APIClient(**kwargs) - assert kwargs['base_url'] == client.base_url + assert parsed_host == client.base_url assert kwargs['tls'].ca_cert == client.verify assert kwargs['tls'].cert == client.cert except TypeError as e: @@ -102,15 +105,16 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='') kwargs = kwargs_from_env(assert_hostname=True) - assert 'https://192.168.59.103:2376' == kwargs['base_url'] + assert 'tcp://192.168.59.103:2376' == kwargs['base_url'] assert 'ca.pem' in kwargs['tls'].ca_cert assert 'cert.pem' in kwargs['tls'].cert[0] assert 'key.pem' in kwargs['tls'].cert[1] assert kwargs['tls'].assert_hostname is True assert kwargs['tls'].verify is False + parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) try: client = APIClient(**kwargs) - assert kwargs['base_url'] == client.base_url + assert parsed_host == client.base_url assert kwargs['tls'].cert == client.cert assert not kwargs['tls'].verify except TypeError as e: From cd59491b9a595a70e9a5b33cd0e09d540c738ee2 Mon Sep 17 00:00:00 2001 From: Ian Campbell Date: Thu, 28 Mar 2019 11:42:02 +0000 Subject: [PATCH 003/211] scripts/version.py: Use regex grouping to extract the version The `lstrip` and `rstrip` functions take a set of characters to remove, not a prefix/suffix. Thus `rstrip('-x86_64')` will remove any trailing characters in the string `'-x86_64'` in any order (in effect it strips the suffix matching the regex `[-_x468]*`). So with `18.09.4` it removes the `4` suffix resulting in trying to `int('')` later on: Traceback (most recent call last): File "/src/scripts/versions.py", line 80, in main() File "/src/scripts/versions.py", line 73, in main versions, reverse=True, key=operator.attrgetter('order') File "/src/scripts/versions.py", line 52, in order return (int(self.major), int(self.minor), int(self.patch)) + stage ValueError: invalid literal for int() with base 10: '' Since we no longer need to check for the arch suffix (since it no longer appears in the URLs we are traversing) we could just drop the `rstrip` and invent a local prefix stripping helper to replace `lstrip('docker-')`. Instead lets take advantage of the behaviour of `re.findall` which is that if the regex contains a single `()` match that will be returned. This lets us match exactly the sub-section of the regex we require. While editing the regex, also ensure that the suffix is precisely `.tgz` and not merely `tgz` by adding an explicit `\.`, previously the literal `.` would be swallowed by the `.*` instead. Signed-off-by: Ian Campbell --- scripts/versions.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/scripts/versions.py b/scripts/versions.py index 7ad1d56a..93fe0d7f 100644 --- a/scripts/versions.py +++ b/scripts/versions.py @@ -62,13 +62,9 @@ def main(): for url in [base_url.format(cat) for cat in categories]: res = requests.get(url) content = res.text - versions = [ - Version.parse( - v.strip('"').lstrip('docker-').rstrip('.tgz').rstrip('-x86_64') - ) for v in re.findall( - r'"docker-[0-9]+\.[0-9]+\.[0-9]+-?.*tgz"', content - ) - ] + versions = [Version.parse(v) for v in re.findall( + r'"docker-([0-9]+\.[0-9]+\.[0-9]+)-?.*tgz"', content + )] sorted_versions = sorted( versions, reverse=True, key=operator.attrgetter('order') ) From 8f2d9a5687c2358cea364a1d4d64fb1c8d0af7c4 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 Mar 2019 14:23:19 +0100 Subject: [PATCH 004/211] Bump 3.7.2 Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- docs/change-log.md | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 249475f4..8f81f0d5 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "3.7.1" +version = "3.7.2" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 9edfee2f..d7c33611 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,17 @@ Change log ========== +3.7.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/59?closed=1) + +### Bugfixes + +* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the +protocol to `parse_host` afterwards, letting `base_url` with the original value. +* XFAIL test_attach_stream_and_cancel on TLS + 3.7.1 ----- From 12d73c6d381760840256a4cbc768933555abb512 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 15:15:40 +0100 Subject: [PATCH 005/211] Xfail test_attach_stream_and_cancel on TLS This test is quite flaky on ssl integration test Signed-off-by: Ulysses Souza --- tests/integration/api_container_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 1190d91e..26245c1f 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1252,6 +1252,9 @@ class AttachContainerTest(BaseAPIIntegrationTest): @pytest.mark.timeout(10) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') + @pytest.mark.xfail(condition=os.environ.get('DOCKER_TLS_VERIFY') or + os.environ.get('DOCKER_CERT_PATH'), + reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( BUSYBOX, 'sh -c "sleep 2 && echo hello && sleep 60"', From d863f729398911e2f918a1bc822b8f4f32151783 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 Mar 2019 14:23:19 +0100 Subject: [PATCH 006/211] Bump 3.7.2 Signed-off-by: Ulysses Souza --- docs/change-log.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 9edfee2f..d7c33611 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,17 @@ Change log ========== +3.7.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/59?closed=1) + +### Bugfixes + +* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the +protocol to `parse_host` afterwards, letting `base_url` with the original value. +* XFAIL test_attach_stream_and_cancel on TLS + 3.7.1 ----- From f6781575c12a8a9aebe1e1ccee4716eaabf88b3d Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 19:58:18 -0700 Subject: [PATCH 007/211] Bump version 4.0.0 Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 93d068eb..68d64c8a 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.0.0-dev" +version = "4.0.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 589e76ea3c13d469f141bf89aba3f142789da0ba Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 20:38:33 -0700 Subject: [PATCH 008/211] Update changelog for 4.0.0 Signed-off-by: Joffrey F --- docs/change-log.md | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index d7c33611..53e9f207 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,43 @@ Change log ========== +4.0.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/57?closed=1) + +### Breaking changes + +- Support for Python 3.3 and Python 3.4 has been dropped +- `APIClient.update_service`, `APIClient.init_swarm`, and + `DockerClient.swarm.init` now return a `dict` from the API's response body +- In `APIClient.build` and `DockerClient.images.build`, the `use_config_proxy` + parameter now defaults to True +- `init_path` is no longer a valid parameter for `HostConfig` + +### Features + +- It is now possible to provide `SCTP` ports for port mappings +- `ContainerSpec`s now support the `init` parameter +- `DockerClient.swarm.init` and `APIClient.init_swarm` now support the + `data_path_addr` parameter +- `APIClient.update_swarm` and `DockerClient.swarm.update` now support the + `rotate_manager_unlock_key` parameter +- `APIClient.update_service` returns the API's response body as a `dict` +- `APIClient.init_swarm`, and `DockerClient.swarm.init` now return the API's + response body as a `dict` + +### Bugfixes + +- Fixed `PlacementPreference` instances to produce a valid API type +- Fixed a bug where not setting a value for `buildargs` in `build` could cause + the library to attempt accessing attributes of a `None` value +- Fixed a bug where setting the `volume_driver` parameter in + `DockerClient.containers.create` would result in an error +- `APIClient.inspect_distribution` now correctly sets the authentication + headers on the request, allowing it to be used with private repositories + This change also applies to `DockerClient.get_registry_data` + 3.7.2 ----- From 5de5af115563d2f9a647bdeb4234fa440b2da58c Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 14 May 2019 13:16:27 +0200 Subject: [PATCH 009/211] Bump urllib3 -> 1.24.3 Signed-off-by: Ulysses Souza --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index eb66c9f5..70f37e20 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,5 +15,5 @@ pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' requests==2.20.0 six==1.10.0 -urllib3==1.24.1 +urllib3==1.24.3 websocket-client==0.40.0 From 80f68c81cd86447d3a9ed01049f353c32bb6adb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Batuhan=20Ta=C5=9Fkaya?= Date: Thu, 9 May 2019 20:29:11 +0300 Subject: [PATCH 010/211] reference swarm page correctly MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Batuhan Taşkaya --- docs/user_guides/swarm_services.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/user_guides/swarm_services.md b/docs/user_guides/swarm_services.md index 369fbed0..5c3a80d2 100644 --- a/docs/user_guides/swarm_services.md +++ b/docs/user_guides/swarm_services.md @@ -6,7 +6,7 @@ Starting with Engine version 1.12 (API 1.24), it is possible to manage services using the Docker Engine API. Note that the engine needs to be part of a -[Swarm cluster](../swarm.rst) before you can use the service-related methods. +[Swarm cluster](../swarm.html) before you can use the service-related methods. ## Creating a service @@ -66,4 +66,4 @@ Either the service name or service ID can be used as argument. ```python client.remove_service('my_service_name') -``` \ No newline at end of file +``` From 4a8a86eed41926781a0409b743c59f1fc675a497 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Sat, 18 May 2019 18:57:30 -0700 Subject: [PATCH 011/211] Add readthedocs config Signed-off-by: Joffrey F --- .readthedocs.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .readthedocs.yml diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..7679f80a --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,10 @@ +version: 2 + +sphinx: + configuration: docs/conf.py + +python: + version: 3.5 + install: + - requirements: docs-requirements.txt + - requirements: requirements.txt From df182fd42d0919fe5812ffcce15c54499c594c44 Mon Sep 17 00:00:00 2001 From: Simon Gurcke Date: Sun, 19 May 2019 12:20:12 +1000 Subject: [PATCH 012/211] Change os.errno to errno for py3.7 compatibility Signed-off-by: Simon Gurcke --- docker/credentials/store.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/credentials/store.py b/docker/credentials/store.py index 3f51e4a7..00178889 100644 --- a/docker/credentials/store.py +++ b/docker/credentials/store.py @@ -1,5 +1,5 @@ +import errno import json -import os import subprocess import six @@ -84,7 +84,7 @@ class Store(object): [self.exe, subcmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, env=env, ) - output, err = process.communicate(data_input) + output, _ = process.communicate(data_input) if process.returncode != 0: raise subprocess.CalledProcessError( returncode=process.returncode, cmd='', output=output @@ -92,7 +92,7 @@ class Store(object): except subprocess.CalledProcessError as e: raise errors.process_store_error(e, self.program) except OSError as e: - if e.errno == os.errno.ENOENT: + if e.errno == errno.ENOENT: raise errors.StoreError( '{} not installed or not available in PATH'.format( self.program From fc0285c09b4285129a5273cc01356e6178f7fbff Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Sat, 18 May 2019 19:28:39 -0700 Subject: [PATCH 013/211] Version bump Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 68d64c8a..21249253 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.0.0" +version = "4.1.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 4d08f2c33d0cd15413c5dd0fd5a16e22436b3e81 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Sat, 18 May 2019 19:51:14 -0700 Subject: [PATCH 014/211] Bump 4.0.1 Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 21249253..24731263 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.1.0-dev" +version = "4.0.1" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 307e2b3eda7198cbc991a8fbc2bce25b08e9eb9f Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Sat, 18 May 2019 19:53:53 -0700 Subject: [PATCH 015/211] Changelog 4.0.1 Signed-off-by: Joffrey F --- docs/change-log.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 53e9f207..4032249b 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,20 @@ Change log ========== +4.0.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/60?closed=1) + +### Bugfixes + +- Fixed an obsolete import in the `credentials` subpackage that caused import errors in + Python 3.7 + +### Miscellaneous + +- Docs building has been repaired + 4.0.0 ----- From 1ef822afee898b968476d0a8fff31e8455f4066d Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Sat, 18 May 2019 20:28:19 -0700 Subject: [PATCH 016/211] dev version Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 24731263..21249253 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.0.1" +version = "4.1.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From c5ca2ef85eac15ef38d98abaf28e97dd2f04822f Mon Sep 17 00:00:00 2001 From: Kajetan Champlewski Date: Fri, 24 May 2019 15:34:30 +0000 Subject: [PATCH 017/211] Fix documentation for inspect_secret referring to removal. Signed-off-by: Kajetan Champlewski --- docker/api/secret.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/secret.py b/docker/api/secret.py index fa4c2ab8..e57952b5 100644 --- a/docker/api/secret.py +++ b/docker/api/secret.py @@ -53,7 +53,7 @@ class SecretApiMixin(object): Retrieve secret metadata Args: - id (string): Full ID of the secret to remove + id (string): Full ID of the secret to inspect Returns (dict): A dictionary of metadata From 241aaaab238c54b07575c1c7bef8b321f4cd0fc3 Mon Sep 17 00:00:00 2001 From: Kajetan Champlewski Date: Fri, 31 May 2019 09:11:20 +0000 Subject: [PATCH 018/211] Handle str in setter for test. Signed-off-by: Kajetan Champlewski --- docker/types/healthcheck.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py index 61857c21..919c4fd7 100644 --- a/docker/types/healthcheck.py +++ b/docker/types/healthcheck.py @@ -53,6 +53,8 @@ class Healthcheck(DictType): @test.setter def test(self, value): + if isinstance(value, six.string_types): + value = ["CMD-SHELL", value] self['Test'] = value @property From 1f38d270e0ba0219ad8400c8f02f678ed3b90b47 Mon Sep 17 00:00:00 2001 From: Kajetan Champlewski Date: Fri, 31 May 2019 09:13:30 +0000 Subject: [PATCH 019/211] Clean up healtcheck.py docs Signed-off-by: Kajetan Champlewski --- docker/types/healthcheck.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py index 919c4fd7..9815018d 100644 --- a/docker/types/healthcheck.py +++ b/docker/types/healthcheck.py @@ -14,7 +14,7 @@ class Healthcheck(DictType): - Empty list: Inherit healthcheck from parent image - ``["NONE"]``: Disable healthcheck - ``["CMD", args...]``: exec arguments directly. - - ``["CMD-SHELL", command]``: RUn command in the system's + - ``["CMD-SHELL", command]``: Run command in the system's default shell. If a string is provided, it will be used as a ``CMD-SHELL`` @@ -23,9 +23,9 @@ class Healthcheck(DictType): should be 0 or at least 1000000 (1 ms). timeout (int): The time to wait before considering the check to have hung. It should be 0 or at least 1000000 (1 ms). - retries (integer): The number of consecutive failures needed to + retries (int): The number of consecutive failures needed to consider a container as unhealthy. - start_period (integer): Start period for the container to + start_period (int): Start period for the container to initialize before starting health-retries countdown in nanoseconds. It should be 0 or at least 1000000 (1 ms). """ From a821502b9ecf4dad26df0201a5c95111b00e42c3 Mon Sep 17 00:00:00 2001 From: Djordje Lukic Date: Wed, 19 Jun 2019 14:09:47 +0200 Subject: [PATCH 020/211] Bump websocket-client -> 0.56.0 Signed-off-by: Djordje Lukic --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 70f37e20..804a78a0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,4 +16,4 @@ pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' requests==2.20.0 six==1.10.0 urllib3==1.24.3 -websocket-client==0.40.0 +websocket-client==0.56.0 From 805f5f4b38cedce36b2037d66bf1a6f99982c017 Mon Sep 17 00:00:00 2001 From: Djordje Lukic Date: Thu, 20 Jun 2019 12:58:09 +0200 Subject: [PATCH 021/211] Bump 4.0.2 Signed-off-by: Djordje Lukic --- docker/version.py | 2 +- docs/change-log.md | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 21249253..25c92501 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.1.0-dev" +version = "4.0.2" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 4032249b..b10cfd54 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,20 @@ Change log ========== +4.0.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/62?closed=1) + +### Bugfixes + +- Unified the way `HealthCheck` is created/configured + +### Miscellaneous + +- Bumped version of websocket-client + + 4.0.1 ----- From c2ed66552bf5ff49199670f920ec5034f0a4c6ae Mon Sep 17 00:00:00 2001 From: Michael Crosby Date: Tue, 25 Jun 2019 13:08:39 -0400 Subject: [PATCH 022/211] Remove exec detach test Forking off an exec process and detaching isn't a supported method Signed-off-by: Michael Crosby --- tests/integration/api_exec_test.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index dda0ed90..53b7e22f 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -226,24 +226,6 @@ class ExecTest(BaseAPIIntegrationTest): assert_cat_socket_detached_with_keys(sock, [ctrl_with('p')]) - def test_detach_with_arg(self): - self.client._general_configs['detachKeys'] = 'ctrl-p' - container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True - ) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - exec_id = self.client.exec_create( - id, 'cat', - stdin=True, tty=True, detach_keys='ctrl-x', stdout=True - ) - sock = self.client.exec_start(exec_id, tty=True, socket=True) - self.addCleanup(sock.close) - - assert_cat_socket_detached_with_keys(sock, [ctrl_with('x')]) - class ExecDemuxTest(BaseAPIIntegrationTest): cmd = 'sh -c "{}"'.format(' ; '.join([ From f3961244a0473305b56e8ffe27d53a6e7902e8bc Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 12 Jul 2019 01:28:41 +0200 Subject: [PATCH 023/211] Update credentials-helpers to v0.6.2 Signed-off-by: Sebastiaan van Stijn --- tests/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/Dockerfile b/tests/Dockerfile index 042fc703..8f49cd2c 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -10,7 +10,7 @@ RUN gpg2 --import gpg-keys/secret RUN gpg2 --import-ownertrust gpg-keys/ownertrust RUN yes | pass init $(gpg2 --no-auto-check-trustdb --list-secret-keys | grep ^sec | cut -d/ -f2 | cut -d" " -f1) RUN gpg2 --check-trustdb -ARG CREDSTORE_VERSION=v0.6.0 +ARG CREDSTORE_VERSION=v0.6.2 RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \ https://github.com/docker/docker-credential-helpers/releases/download/$CREDSTORE_VERSION/docker-credential-pass-$CREDSTORE_VERSION-amd64.tar.gz && \ tar -xf /opt/docker-credential-pass.tar.gz -O > /usr/local/bin/docker-credential-pass && \ From 546bc63244941e8aa22a408635d0bff554b1702b Mon Sep 17 00:00:00 2001 From: Djordje Lukic Date: Thu, 20 Jun 2019 13:34:03 +0200 Subject: [PATCH 024/211] Bump dev Signed-off-by: Djordje Lukic --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 25c92501..21249253 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.0.2" +version = "4.1.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From ea4fbd7ddf5ff1b7a9b4a1900522d51537387156 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 12 Jul 2019 18:53:34 +0200 Subject: [PATCH 025/211] Update to python 3.7 (buster) and use build-args The build arg can be used to either test different versions, but also makes it easier to "grep" when upgrading versions. The output format of `gpg2 --list-secret-keys` changed in the version installed on Buster, so `grep` was replaced with `awk` to address the new output format; Debian Jessie: gpg2 --no-auto-check-trustdb --list-secret-keys /root/.gnupg/secring.gpg ------------------------ sec 1024D/A7B21401 2018-04-25 uid Sakuya Izayoi ssb 1024g/C235E4CE 2018-04-25 Debian Buster: gpg2 --no-auto-check-trustdb --list-secret-keys /root/.gnupg/pubring.kbx ------------------------ sec dsa1024 2018-04-25 [SCA] 9781B87DAB042E6FD51388A5464ED987A7B21401 uid [ultimate] Sakuya Izayoi ssb elg1024 2018-04-25 [E] Signed-off-by: Sebastiaan van Stijn --- Dockerfile | 4 +++- Dockerfile-docs | 4 +++- Dockerfile-py3 | 4 +++- tests/Dockerfile | 7 ++++--- tests/Dockerfile-dind-certs | 4 +++- 5 files changed, 16 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 82758daf..124f68cd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,6 @@ -FROM python:2.7 +ARG PYTHON_VERSION=2.7 + +FROM python:${PYTHON_VERSION} RUN mkdir /src WORKDIR /src diff --git a/Dockerfile-docs b/Dockerfile-docs index 105083e8..9d11312f 100644 --- a/Dockerfile-docs +++ b/Dockerfile-docs @@ -1,4 +1,6 @@ -FROM python:3.5 +ARG PYTHON_VERSION=3.7 + +FROM python:${PYTHON_VERSION} ARG uid=1000 ARG gid=1000 diff --git a/Dockerfile-py3 b/Dockerfile-py3 index d558ba3e..22732dec 100644 --- a/Dockerfile-py3 +++ b/Dockerfile-py3 @@ -1,4 +1,6 @@ -FROM python:3.6 +ARG PYTHON_VERSION=3.7 + +FROM python:${PYTHON_VERSION} RUN mkdir /src WORKDIR /src diff --git a/tests/Dockerfile b/tests/Dockerfile index 8f49cd2c..f2f36b44 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,5 +1,6 @@ -ARG PYTHON_VERSION=3.6 -FROM python:$PYTHON_VERSION-jessie +ARG PYTHON_VERSION=3.7 + +FROM python:${PYTHON_VERSION} RUN apt-get update && apt-get -y install \ gnupg2 \ pass \ @@ -8,7 +9,7 @@ RUN apt-get update && apt-get -y install \ COPY ./tests/gpg-keys /gpg-keys RUN gpg2 --import gpg-keys/secret RUN gpg2 --import-ownertrust gpg-keys/ownertrust -RUN yes | pass init $(gpg2 --no-auto-check-trustdb --list-secret-keys | grep ^sec | cut -d/ -f2 | cut -d" " -f1) +RUN yes | pass init $(gpg2 --no-auto-check-trustdb --list-secret-key | awk '/^sec/{getline; $1=$1; print}') RUN gpg2 --check-trustdb ARG CREDSTORE_VERSION=v0.6.2 RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \ diff --git a/tests/Dockerfile-dind-certs b/tests/Dockerfile-dind-certs index 9e8c042b..2ab87ef7 100644 --- a/tests/Dockerfile-dind-certs +++ b/tests/Dockerfile-dind-certs @@ -1,4 +1,6 @@ -FROM python:2.7 +ARG PYTHON_VERSION=2.7 + +FROM python:${PYTHON_VERSION} RUN mkdir /tmp/certs VOLUME /certs From a316e6a9274cc3e153305b443aa77ef531c4e4a9 Mon Sep 17 00:00:00 2001 From: Francis Laniel Date: Wed, 9 Jan 2019 19:31:56 +0100 Subject: [PATCH 026/211] Add documentation to argument 'mem_reservation'. The documentation was added for function ContainerCollection::run and ContainerApiMixin::create_host_config. Signed-off-by: Francis Laniel Add documentation to argument 'mem_reservation'. The documentation was added for function ContainerCollection::run and ContainerApiMixin::create_host_config. Signed-off-by: Francis Laniel --- docker/api/container.py | 1 + docker/models/containers.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/api/container.py b/docker/api/container.py index 2dca68a1..326e7679 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -502,6 +502,7 @@ class ContainerApiMixin(object): bytes) or a string with a units identification char (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is specified without a units character, bytes are assumed as an + mem_reservation (int or str): Memory soft limit. mem_swappiness (int): Tune a container's memory swappiness behavior. Accepts number between 0 and 100. memswap_limit (str or int): Maximum amount of memory + swap a diff --git a/docker/models/containers.py b/docker/models/containers.py index d321a580..999851ec 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -618,7 +618,7 @@ class ContainerCollection(Collection): (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is specified without a units character, bytes are assumed as an intended unit. - mem_reservation (int or str): Memory soft limit + mem_reservation (int or str): Memory soft limit. mem_swappiness (int): Tune a container's memory swappiness behavior. Accepts number between 0 and 100. memswap_limit (str or int): Maximum amount of memory + swap a From 38d18a2d1f53fd674447812b28fef3b3b5f81301 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Tue, 16 Jul 2019 16:04:38 +0200 Subject: [PATCH 027/211] Update credentials-helpers to v0.6.3 full diff: https://github.com/docker/docker-credential-helpers/compare/v0.6.2...v0.6.3 Signed-off-by: Sebastiaan van Stijn --- tests/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/Dockerfile b/tests/Dockerfile index f2f36b44..4bd98f87 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -11,7 +11,7 @@ RUN gpg2 --import gpg-keys/secret RUN gpg2 --import-ownertrust gpg-keys/ownertrust RUN yes | pass init $(gpg2 --no-auto-check-trustdb --list-secret-key | awk '/^sec/{getline; $1=$1; print}') RUN gpg2 --check-trustdb -ARG CREDSTORE_VERSION=v0.6.2 +ARG CREDSTORE_VERSION=v0.6.3 RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \ https://github.com/docker/docker-credential-helpers/releases/download/$CREDSTORE_VERSION/docker-credential-pass-$CREDSTORE_VERSION-amd64.tar.gz && \ tar -xf /opt/docker-credential-pass.tar.gz -O > /usr/local/bin/docker-credential-pass && \ From cd3a696603d2fa985415aeb45896ab8bceccb2b7 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 12 Jul 2019 22:50:35 +0200 Subject: [PATCH 028/211] xfail test_init_swarm_data_path_addr This test can fail if `eth0` has multiple IP addresses; E docker.errors.APIError: 400 Client Error: Bad Request ("interface eth0 has more than one IPv6 address (2001:db8:1::242:ac11:2 and fe80::42:acff:fe11:2)") Which is not a failiure, but depends on the environment that the test is run in. Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_swarm_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index bf809bd0..f1cbc264 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -250,5 +250,6 @@ class SwarmTest(BaseAPIIntegrationTest): assert key_1['UnlockKey'] != key_2['UnlockKey'] @requires_api_version('1.30') + @pytest.mark.xfail(reason='Can fail if eth0 has multiple IP addresses') def test_init_swarm_data_path_addr(self): assert self.init_swarm(data_path_addr='eth0') From 23635d43abe7ac0c3512e03292ee786a07e543a8 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Mon, 15 Jul 2019 15:04:31 +0200 Subject: [PATCH 029/211] Adjust `--platform` tests for changes in docker engine These tests started failing on recent versions of the engine because the error string changed, and due to a regression, the status code for one endpoint changed from a 400 to a 500. On Docker 18.03: The `docker build` case properly returns a 400, and "invalid platform" as error string; ```bash docker build --platform=foobar -<}] module=grpc INFO[2019-07-15T11:59:20.688270160Z] ClientConn switching balancer to "pick_first" module=grpc INFO[2019-07-15T11:59:20.688353083Z] pickfirstBalancer: HandleSubConnStateChange: 0xc4209b0630, CONNECTING module=grpc INFO[2019-07-15T11:59:20.688985698Z] pickfirstBalancer: HandleSubConnStateChange: 0xc4209b0630, READY module=grpc DEBU[2019-07-15T11:59:20.812700550Z] client is session enabled DEBU[2019-07-15T11:59:20.813139288Z] FIXME: Got an API for which error does not match any expected type!!!: invalid argument github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs.init /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs/errors.go:40 github.com/docker/docker/vendor/github.com/containerd/containerd/content.init :1 github.com/docker/docker/builder/builder-next.init :1 github.com/docker/docker/api/server/backend/build.init :1 main.init :1 runtime.main /usr/local/go/src/runtime/proc.go:186 runtime.goexit /usr/local/go/src/runtime/asm_amd64.s:2361 error_type="*errors.fundamental" module=api ERRO[2019-07-15T11:59:20.813210677Z] Handler for POST /v1.39/build returned error: "foobar": unknown operating system or architecture: invalid argument DEBU[2019-07-15T11:59:20.813276737Z] FIXME: Got an API for which error does not match any expected type!!!: invalid argument github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs.init /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs/errors.go:40 github.com/docker/docker/vendor/github.com/containerd/containerd/content.init :1 github.com/docker/docker/builder/builder-next.init :1 github.com/docker/docker/api/server/backend/build.init :1 main.init :1 runtime.main /usr/local/go/src/runtime/proc.go:186 runtime.goexit /usr/local/go/src/runtime/asm_amd64.s:2361 error_type="*errors.fundamental" module=api ``` Same for the `docker pull --platform=foobar hello-world:latest` case: ```bash docker pull --platform=foobar hello-world:latest Error response from daemon: "foobar": unknown operating system or architecture: invalid argument ``` ``` DEBU[2019-07-15T12:00:18.812995330Z] Calling POST /v1.39/images/create?fromImage=hello-world&platform=foobar&tag=latest DEBU[2019-07-15T12:00:18.813229172Z] FIXME: Got an API for which error does not match any expected type!!!: invalid argument github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs.init /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs/errors.go:40 github.com/docker/docker/vendor/github.com/containerd/containerd/content.init :1 github.com/docker/docker/builder/builder-next.init :1 github.com/docker/docker/api/server/backend/build.init :1 main.init :1 runtime.main /usr/local/go/src/runtime/proc.go:186 runtime.goexit /usr/local/go/src/runtime/asm_amd64.s:2361 error_type="*errors.fundamental" module=api ERRO[2019-07-15T12:00:18.813365546Z] Handler for POST /v1.39/images/create returned error: "foobar": unknown operating system or architecture: invalid argument DEBU[2019-07-15T12:00:18.813461428Z] FIXME: Got an API for which error does not match any expected type!!!: invalid argument github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs.init /go/src/github.com/docker/docker/vendor/github.com/containerd/containerd/errdefs/errors.go:40 github.com/docker/docker/vendor/github.com/containerd/containerd/content.init :1 github.com/docker/docker/builder/builder-next.init :1 github.com/docker/docker/api/server/backend/build.init :1 main.init :1 runtime.main /usr/local/go/src/runtime/proc.go:186 runtime.goexit /usr/local/go/src/runtime/asm_amd64.s:2361 error_type="*errors.fundamental" module=api ``` Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_build_test.py | 6 ++++-- tests/integration/api_image_test.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 8bfc7960..4776f453 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -448,8 +448,10 @@ class BuildTest(BaseAPIIntegrationTest): for _ in stream: pass - assert excinfo.value.status_code == 400 - assert 'invalid platform' in excinfo.exconly() + # Some API versions incorrectly returns 500 status; assert 4xx or 5xx + assert excinfo.value.is_error() + assert 'unknown operating system' in excinfo.exconly() \ + or 'invalid platform' in excinfo.exconly() def test_build_out_of_context_dockerfile(self): base_dir = tempfile.mkdtemp() diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index 050e7f33..56a76924 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -69,8 +69,10 @@ class PullImageTest(BaseAPIIntegrationTest): with pytest.raises(docker.errors.APIError) as excinfo: self.client.pull('hello-world', platform='foobar') - assert excinfo.value.status_code == 500 - assert 'invalid platform' in excinfo.exconly() + # Some API versions incorrectly returns 500 status; assert 4xx or 5xx + assert excinfo.value.is_error() + assert 'unknown operating system' in excinfo.exconly() \ + or 'invalid platform' in excinfo.exconly() class CommitTest(BaseAPIIntegrationTest): From 73ad8b8f1909a2f1191605f2204f44dcac90c104 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 10 Aug 2019 18:30:40 +0200 Subject: [PATCH 030/211] Update alpine version to 3.10, and rename BUSYBOX variable Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_build_test.py | 4 +- tests/integration/api_container_test.py | 190 +++++++++++----------- tests/integration/api_exec_test.py | 30 ++-- tests/integration/api_healthcheck_test.py | 10 +- tests/integration/api_image_test.py | 18 +- tests/integration/api_network_test.py | 18 +- tests/integration/api_service_test.py | 68 ++++---- tests/integration/base.py | 4 +- tests/integration/conftest.py | 10 +- tests/integration/errors_test.py | 4 +- tests/integration/models_images_test.py | 12 +- tests/integration/regression_test.py | 10 +- 12 files changed, 189 insertions(+), 189 deletions(-) diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 4776f453..57128124 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -9,7 +9,7 @@ from docker.utils.proxy import ProxyConfig import pytest import six -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG from ..helpers import random_name, requires_api_version, requires_experimental @@ -277,7 +277,7 @@ class BuildTest(BaseAPIIntegrationTest): # Set up pingable endpoint on custom network network = self.client.create_network(random_name())['Id'] self.tmp_networks.append(network) - container = self.client.create_container(BUSYBOX, 'top') + container = self.client.create_container(TEST_IMG, 'top') self.tmp_containers.append(container) self.client.start(container) self.client.connect_container_to_network( diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 26245c1f..1ba3eaa5 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -15,7 +15,7 @@ from ..helpers import assert_cat_socket_detached_with_keys from ..helpers import ctrl_with from ..helpers import requires_api_version from .base import BaseAPIIntegrationTest -from .base import BUSYBOX +from .base import TEST_IMG from docker.constants import IS_WINDOWS_PLATFORM from docker.utils.socket import next_frame_header from docker.utils.socket import read_exactly @@ -25,7 +25,7 @@ class ListContainersTest(BaseAPIIntegrationTest): def test_list_containers(self): res0 = self.client.containers(all=True) size = len(res0) - res1 = self.client.create_container(BUSYBOX, 'true') + res1 = self.client.create_container(TEST_IMG, 'true') assert 'Id' in res1 self.client.start(res1['Id']) self.tmp_containers.append(res1['Id']) @@ -44,13 +44,13 @@ class ListContainersTest(BaseAPIIntegrationTest): class CreateContainerTest(BaseAPIIntegrationTest): def test_create(self): - res = self.client.create_container(BUSYBOX, 'true') + res = self.client.create_container(TEST_IMG, 'true') assert 'Id' in res self.tmp_containers.append(res['Id']) def test_create_with_host_pid_mode(self): ctnr = self.client.create_container( - BUSYBOX, 'true', host_config=self.client.create_host_config( + TEST_IMG, 'true', host_config=self.client.create_host_config( pid_mode='host', network_mode='none' ) ) @@ -65,7 +65,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_links(self): res0 = self.client.create_container( - BUSYBOX, 'cat', + TEST_IMG, 'cat', detach=True, stdin_open=True, environment={'FOO': '1'}) @@ -75,7 +75,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): self.client.start(container1_id) res1 = self.client.create_container( - BUSYBOX, 'cat', + TEST_IMG, 'cat', detach=True, stdin_open=True, environment={'FOO': '1'}) @@ -94,7 +94,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): link_env_prefix2 = link_alias2.upper() res2 = self.client.create_container( - BUSYBOX, 'env', host_config=self.client.create_host_config( + TEST_IMG, 'env', host_config=self.client.create_host_config( links={link_path1: link_alias1, link_path2: link_alias2}, network_mode='bridge' ) @@ -114,7 +114,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_restart_policy(self): container = self.client.create_container( - BUSYBOX, ['sleep', '2'], + TEST_IMG, ['sleep', '2'], host_config=self.client.create_host_config( restart_policy={"Name": "always", "MaximumRetryCount": 0}, network_mode='none' @@ -133,21 +133,21 @@ class CreateContainerTest(BaseAPIIntegrationTest): vol_names = ['foobar_vol0', 'foobar_vol1'] res0 = self.client.create_container( - BUSYBOX, 'true', name=vol_names[0] + TEST_IMG, 'true', name=vol_names[0] ) container1_id = res0['Id'] self.tmp_containers.append(container1_id) self.client.start(container1_id) res1 = self.client.create_container( - BUSYBOX, 'true', name=vol_names[1] + TEST_IMG, 'true', name=vol_names[1] ) container2_id = res1['Id'] self.tmp_containers.append(container2_id) self.client.start(container2_id) res = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True, + TEST_IMG, 'cat', detach=True, stdin_open=True, host_config=self.client.create_host_config( volumes_from=vol_names, network_mode='none' ) @@ -161,7 +161,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def create_container_readonly_fs(self): ctnr = self.client.create_container( - BUSYBOX, ['mkdir', '/shrine'], + TEST_IMG, ['mkdir', '/shrine'], host_config=self.client.create_host_config( read_only=True, network_mode='none' ) @@ -173,7 +173,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): assert res != 0 def create_container_with_name(self): - res = self.client.create_container(BUSYBOX, 'true', name='foobar') + res = self.client.create_container(TEST_IMG, 'true', name='foobar') assert 'Id' in res self.tmp_containers.append(res['Id']) inspect = self.client.inspect_container(res['Id']) @@ -182,7 +182,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def create_container_privileged(self): res = self.client.create_container( - BUSYBOX, 'true', host_config=self.client.create_host_config( + TEST_IMG, 'true', host_config=self.client.create_host_config( privileged=True, network_mode='none' ) ) @@ -208,7 +208,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_mac_address(self): mac_address_expected = "02:42:ac:11:00:0a" container = self.client.create_container( - BUSYBOX, ['sleep', '60'], mac_address=mac_address_expected) + TEST_IMG, ['sleep', '60'], mac_address=mac_address_expected) id = container['Id'] @@ -220,7 +220,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_group_id_ints(self): container = self.client.create_container( - BUSYBOX, 'id -G', + TEST_IMG, 'id -G', host_config=self.client.create_host_config(group_add=[1000, 1001]) ) self.tmp_containers.append(container) @@ -236,7 +236,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_group_id_strings(self): container = self.client.create_container( - BUSYBOX, 'id -G', host_config=self.client.create_host_config( + TEST_IMG, 'id -G', host_config=self.client.create_host_config( group_add=['1000', '1001'] ) ) @@ -259,7 +259,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) container = self.client.create_container( - BUSYBOX, ['true'], + TEST_IMG, ['true'], host_config=self.client.create_host_config(log_config=log_config) ) self.tmp_containers.append(container['Id']) @@ -281,7 +281,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): with pytest.raises(docker.errors.APIError) as excinfo: # raises an internal server error 500 container = self.client.create_container( - BUSYBOX, ['true'], host_config=self.client.create_host_config( + TEST_IMG, ['true'], host_config=self.client.create_host_config( log_config=log_config ) ) @@ -296,7 +296,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) container = self.client.create_container( - BUSYBOX, ['true'], + TEST_IMG, ['true'], host_config=self.client.create_host_config(log_config=log_config) ) self.tmp_containers.append(container['Id']) @@ -315,7 +315,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) container = self.client.create_container( - BUSYBOX, ['true'], + TEST_IMG, ['true'], host_config=self.client.create_host_config(log_config=log_config) ) self.tmp_containers.append(container['Id']) @@ -329,7 +329,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_memory_constraints_with_str(self): ctnr = self.client.create_container( - BUSYBOX, 'true', + TEST_IMG, 'true', host_config=self.client.create_host_config( memswap_limit='1G', mem_limit='700M' @@ -347,7 +347,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_memory_constraints_with_int(self): ctnr = self.client.create_container( - BUSYBOX, 'true', + TEST_IMG, 'true', host_config=self.client.create_host_config(mem_swappiness=40) ) assert 'Id' in ctnr @@ -361,7 +361,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_environment_variable_no_value(self): container = self.client.create_container( - BUSYBOX, + TEST_IMG, ['echo'], environment={'Foo': None, 'Other': 'one', 'Blank': ''}, ) @@ -378,7 +378,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): } container = self.client.create_container( - BUSYBOX, + TEST_IMG, ['echo'], host_config=self.client.create_host_config( tmpfs=tmpfs)) @@ -390,7 +390,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): @requires_api_version('1.24') def test_create_with_isolation(self): container = self.client.create_container( - BUSYBOX, ['echo'], host_config=self.client.create_host_config( + TEST_IMG, ['echo'], host_config=self.client.create_host_config( isolation='default' ) ) @@ -404,7 +404,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): auto_remove=True ) container = self.client.create_container( - BUSYBOX, ['echo', 'test'], host_config=host_config + TEST_IMG, ['echo', 'test'], host_config=host_config ) self.tmp_containers.append(container['Id']) config = self.client.inspect_container(container) @@ -413,7 +413,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_create_with_stop_timeout(self): container = self.client.create_container( - BUSYBOX, ['echo', 'test'], stop_timeout=25 + TEST_IMG, ['echo', 'test'], stop_timeout=25 ) self.tmp_containers.append(container['Id']) config = self.client.inspect_container(container) @@ -426,7 +426,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): storage_opt={'size': '120G'} ) container = self.client.create_container( - BUSYBOX, ['echo', 'test'], host_config=host_config + TEST_IMG, ['echo', 'test'], host_config=host_config ) self.tmp_containers.append(container) config = self.client.inspect_container(container) @@ -437,7 +437,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_create_with_init(self): ctnr = self.client.create_container( - BUSYBOX, 'true', + TEST_IMG, 'true', host_config=self.client.create_host_config( init=True ) @@ -451,7 +451,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): reason='CONFIG_RT_GROUP_SCHED isn\'t enabled') def test_create_with_cpu_rt_options(self): ctnr = self.client.create_container( - BUSYBOX, 'true', host_config=self.client.create_host_config( + TEST_IMG, 'true', host_config=self.client.create_host_config( cpu_rt_period=1000, cpu_rt_runtime=500 ) ) @@ -464,7 +464,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_device_cgroup_rules(self): rule = 'c 7:128 rwm' ctnr = self.client.create_container( - BUSYBOX, 'cat /sys/fs/cgroup/devices/devices.list', + TEST_IMG, 'cat /sys/fs/cgroup/devices/devices.list', host_config=self.client.create_host_config( device_cgroup_rules=[rule] ) @@ -477,7 +477,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_uts_mode(self): container = self.client.create_container( - BUSYBOX, ['echo'], host_config=self.client.create_host_config( + TEST_IMG, ['echo'], host_config=self.client.create_host_config( uts_mode='host' ) ) @@ -501,7 +501,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): self.run_with_volume( False, - BUSYBOX, + TEST_IMG, ['touch', os.path.join(self.mount_dest, self.filename)], ) @@ -509,7 +509,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): container = self.run_with_volume( False, - BUSYBOX, + TEST_IMG, ['ls', self.mount_dest], ) logs = self.client.logs(container) @@ -523,12 +523,12 @@ class VolumeBindTest(BaseAPIIntegrationTest): def test_create_with_binds_ro(self): self.run_with_volume( False, - BUSYBOX, + TEST_IMG, ['touch', os.path.join(self.mount_dest, self.filename)], ) container = self.run_with_volume( True, - BUSYBOX, + TEST_IMG, ['ls', self.mount_dest], ) logs = self.client.logs(container) @@ -547,7 +547,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): ) host_config = self.client.create_host_config(mounts=[mount]) container = self.run_container( - BUSYBOX, ['ls', self.mount_dest], + TEST_IMG, ['ls', self.mount_dest], host_config=host_config ) assert container @@ -566,7 +566,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): ) host_config = self.client.create_host_config(mounts=[mount]) container = self.run_container( - BUSYBOX, ['ls', self.mount_dest], + TEST_IMG, ['ls', self.mount_dest], host_config=host_config ) assert container @@ -585,7 +585,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): ) host_config = self.client.create_host_config(mounts=[mount]) container = self.client.create_container( - BUSYBOX, ['true'], host_config=host_config, + TEST_IMG, ['true'], host_config=host_config, ) assert container inspect_data = self.client.inspect_container(container) @@ -631,7 +631,7 @@ class ArchiveTest(BaseAPIIntegrationTest): def test_get_file_archive_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - BUSYBOX, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), + TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -650,7 +650,7 @@ class ArchiveTest(BaseAPIIntegrationTest): def test_get_file_stat_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - BUSYBOX, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), + TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -668,7 +668,7 @@ class ArchiveTest(BaseAPIIntegrationTest): test_file.write(data) test_file.seek(0) ctnr = self.client.create_container( - BUSYBOX, + TEST_IMG, 'cat {0}'.format( os.path.join('/vol1/', os.path.basename(test_file.name)) ), @@ -690,7 +690,7 @@ class ArchiveTest(BaseAPIIntegrationTest): dirs = ['foo', 'bar'] base = helpers.make_tree(dirs, files) ctnr = self.client.create_container( - BUSYBOX, 'ls -p /vol1', volumes=['/vol1'] + TEST_IMG, 'ls -p /vol1', volumes=['/vol1'] ) self.tmp_containers.append(ctnr) with docker.utils.tar(base) as test_tar: @@ -711,7 +711,7 @@ class RenameContainerTest(BaseAPIIntegrationTest): def test_rename_container(self): version = self.client.version()['Version'] name = 'hong_meiling' - res = self.client.create_container(BUSYBOX, 'true') + res = self.client.create_container(TEST_IMG, 'true') assert 'Id' in res self.tmp_containers.append(res['Id']) self.client.rename(res, name) @@ -725,7 +725,7 @@ class RenameContainerTest(BaseAPIIntegrationTest): class StartContainerTest(BaseAPIIntegrationTest): def test_start_container(self): - res = self.client.create_container(BUSYBOX, 'true') + res = self.client.create_container(TEST_IMG, 'true') assert 'Id' in res self.tmp_containers.append(res['Id']) self.client.start(res['Id']) @@ -741,7 +741,7 @@ class StartContainerTest(BaseAPIIntegrationTest): assert inspect['State']['ExitCode'] == 0 def test_start_container_with_dict_instead_of_id(self): - res = self.client.create_container(BUSYBOX, 'true') + res = self.client.create_container(TEST_IMG, 'true') assert 'Id' in res self.tmp_containers.append(res['Id']) self.client.start(res) @@ -769,7 +769,7 @@ class StartContainerTest(BaseAPIIntegrationTest): 'true && echo "Night of Nights"' ] for cmd in commands: - container = self.client.create_container(BUSYBOX, cmd) + container = self.client.create_container(TEST_IMG, cmd) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -779,7 +779,7 @@ class StartContainerTest(BaseAPIIntegrationTest): class WaitTest(BaseAPIIntegrationTest): def test_wait(self): - res = self.client.create_container(BUSYBOX, ['sleep', '3']) + res = self.client.create_container(TEST_IMG, ['sleep', '3']) id = res['Id'] self.tmp_containers.append(id) self.client.start(id) @@ -792,7 +792,7 @@ class WaitTest(BaseAPIIntegrationTest): assert inspect['State']['ExitCode'] == exitcode def test_wait_with_dict_instead_of_id(self): - res = self.client.create_container(BUSYBOX, ['sleep', '3']) + res = self.client.create_container(TEST_IMG, ['sleep', '3']) id = res['Id'] self.tmp_containers.append(id) self.client.start(res) @@ -806,13 +806,13 @@ class WaitTest(BaseAPIIntegrationTest): @requires_api_version('1.30') def test_wait_with_condition(self): - ctnr = self.client.create_container(BUSYBOX, 'true') + ctnr = self.client.create_container(TEST_IMG, 'true') self.tmp_containers.append(ctnr) with pytest.raises(requests.exceptions.ConnectionError): self.client.wait(ctnr, condition='removed', timeout=1) ctnr = self.client.create_container( - BUSYBOX, ['sleep', '3'], + TEST_IMG, ['sleep', '3'], host_config=self.client.create_host_config(auto_remove=True) ) self.tmp_containers.append(ctnr) @@ -826,7 +826,7 @@ class LogsTest(BaseAPIIntegrationTest): def test_logs(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - BUSYBOX, 'echo {0}'.format(snippet) + TEST_IMG, 'echo {0}'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -840,7 +840,7 @@ class LogsTest(BaseAPIIntegrationTest): snippet = '''Line1 Line2''' container = self.client.create_container( - BUSYBOX, 'echo "{0}"'.format(snippet) + TEST_IMG, 'echo "{0}"'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -853,7 +853,7 @@ Line2''' def test_logs_streaming_and_follow(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - BUSYBOX, 'echo {0}'.format(snippet) + TEST_IMG, 'echo {0}'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -873,7 +873,7 @@ Line2''' def test_logs_streaming_and_follow_and_cancel(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - BUSYBOX, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) + TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -891,7 +891,7 @@ Line2''' def test_logs_with_dict_instead_of_id(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - BUSYBOX, 'echo {0}'.format(snippet) + TEST_IMG, 'echo {0}'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -904,7 +904,7 @@ Line2''' def test_logs_with_tail_0(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - BUSYBOX, 'echo "{0}"'.format(snippet) + TEST_IMG, 'echo "{0}"'.format(snippet) ) id = container['Id'] self.tmp_containers.append(id) @@ -918,7 +918,7 @@ Line2''' def test_logs_with_until(self): snippet = 'Shanghai Teahouse (Hong Meiling)' container = self.client.create_container( - BUSYBOX, 'echo "{0}"'.format(snippet) + TEST_IMG, 'echo "{0}"'.format(snippet) ) self.tmp_containers.append(container) @@ -933,7 +933,7 @@ Line2''' class DiffTest(BaseAPIIntegrationTest): def test_diff(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) + container = self.client.create_container(TEST_IMG, ['touch', '/test']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -946,7 +946,7 @@ class DiffTest(BaseAPIIntegrationTest): assert test_diff[0]['Kind'] == 1 def test_diff_with_dict_instead_of_id(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) + container = self.client.create_container(TEST_IMG, ['touch', '/test']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -961,7 +961,7 @@ class DiffTest(BaseAPIIntegrationTest): class StopTest(BaseAPIIntegrationTest): def test_stop(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -973,7 +973,7 @@ class StopTest(BaseAPIIntegrationTest): assert state['Running'] is False def test_stop_with_dict_instead_of_id(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) assert 'Id' in container id = container['Id'] self.client.start(container) @@ -988,7 +988,7 @@ class StopTest(BaseAPIIntegrationTest): class KillTest(BaseAPIIntegrationTest): def test_kill(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -1002,7 +1002,7 @@ class KillTest(BaseAPIIntegrationTest): assert state['Running'] is False def test_kill_with_dict_instead_of_id(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -1016,7 +1016,7 @@ class KillTest(BaseAPIIntegrationTest): assert state['Running'] is False def test_kill_with_signal(self): - id = self.client.create_container(BUSYBOX, ['sleep', '60']) + id = self.client.create_container(TEST_IMG, ['sleep', '60']) self.tmp_containers.append(id) self.client.start(id) self.client.kill( @@ -1033,7 +1033,7 @@ class KillTest(BaseAPIIntegrationTest): assert state['Running'] is False, state def test_kill_with_signal_name(self): - id = self.client.create_container(BUSYBOX, ['sleep', '60']) + id = self.client.create_container(TEST_IMG, ['sleep', '60']) self.client.start(id) self.tmp_containers.append(id) self.client.kill(id, signal='SIGKILL') @@ -1048,7 +1048,7 @@ class KillTest(BaseAPIIntegrationTest): assert state['Running'] is False, state def test_kill_with_signal_integer(self): - id = self.client.create_container(BUSYBOX, ['sleep', '60']) + id = self.client.create_container(TEST_IMG, ['sleep', '60']) self.client.start(id) self.tmp_containers.append(id) self.client.kill(id, signal=9) @@ -1077,7 +1077,7 @@ class PortTest(BaseAPIIntegrationTest): ] container = self.client.create_container( - BUSYBOX, ['sleep', '60'], ports=ports, + TEST_IMG, ['sleep', '60'], ports=ports, host_config=self.client.create_host_config( port_bindings=port_bindings, network_mode='bridge' ) @@ -1104,7 +1104,7 @@ class PortTest(BaseAPIIntegrationTest): class ContainerTopTest(BaseAPIIntegrationTest): def test_top(self): container = self.client.create_container( - BUSYBOX, ['sleep', '60'] + TEST_IMG, ['sleep', '60'] ) self.tmp_containers.append(container) @@ -1124,7 +1124,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): ) def test_top_with_psargs(self): container = self.client.create_container( - BUSYBOX, ['sleep', '60']) + TEST_IMG, ['sleep', '60']) self.tmp_containers.append(container) @@ -1140,7 +1140,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): class RestartContainerTest(BaseAPIIntegrationTest): def test_restart(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -1159,7 +1159,7 @@ class RestartContainerTest(BaseAPIIntegrationTest): self.client.kill(id) def test_restart_with_low_timeout(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) self.client.start(container) self.client.timeout = 3 self.client.restart(container, timeout=1) @@ -1168,7 +1168,7 @@ class RestartContainerTest(BaseAPIIntegrationTest): self.client.kill(container) def test_restart_with_dict_instead_of_id(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) assert 'Id' in container id = container['Id'] self.client.start(container) @@ -1190,7 +1190,7 @@ class RestartContainerTest(BaseAPIIntegrationTest): class RemoveContainerTest(BaseAPIIntegrationTest): def test_remove(self): - container = self.client.create_container(BUSYBOX, ['true']) + container = self.client.create_container(TEST_IMG, ['true']) id = container['Id'] self.client.start(id) self.client.wait(id) @@ -1200,7 +1200,7 @@ class RemoveContainerTest(BaseAPIIntegrationTest): assert len(res) == 0 def test_remove_with_dict_instead_of_id(self): - container = self.client.create_container(BUSYBOX, ['true']) + container = self.client.create_container(TEST_IMG, ['true']) id = container['Id'] self.client.start(id) self.client.wait(id) @@ -1212,7 +1212,7 @@ class RemoveContainerTest(BaseAPIIntegrationTest): class AttachContainerTest(BaseAPIIntegrationTest): def test_run_container_streaming(self): - container = self.client.create_container(BUSYBOX, '/bin/sh', + container = self.client.create_container(TEST_IMG, '/bin/sh', detach=True, stdin_open=True) id = container['Id'] self.tmp_containers.append(id) @@ -1224,7 +1224,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): line = 'hi there and stuff and things, words!' # `echo` appends CRLF, `printf` doesn't command = "printf '{0}'".format(line) - container = self.client.create_container(BUSYBOX, command, + container = self.client.create_container(TEST_IMG, command, detach=True, tty=False) self.tmp_containers.append(container) @@ -1242,7 +1242,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): def test_attach_no_stream(self): container = self.client.create_container( - BUSYBOX, 'echo hello' + TEST_IMG, 'echo hello' ) self.tmp_containers.append(container) self.client.start(container) @@ -1257,7 +1257,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( - BUSYBOX, 'sh -c "sleep 2 && echo hello && sleep 60"', + TEST_IMG, 'sh -c "sleep 2 && echo hello && sleep 60"', tty=True ) self.tmp_containers.append(container) @@ -1275,7 +1275,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): def test_detach_with_default(self): container = self.client.create_container( - BUSYBOX, 'cat', + TEST_IMG, 'cat', detach=True, stdin_open=True, tty=True ) self.tmp_containers.append(container) @@ -1294,7 +1294,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): self.client._general_configs['detachKeys'] = 'ctrl-p' container = self.client.create_container( - BUSYBOX, 'cat', + TEST_IMG, 'cat', detach=True, stdin_open=True, tty=True ) self.tmp_containers.append(container) @@ -1311,7 +1311,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): self.client._general_configs['detachKeys'] = 'ctrl-p' container = self.client.create_container( - BUSYBOX, 'cat', + TEST_IMG, 'cat', detach=True, stdin_open=True, tty=True ) self.tmp_containers.append(container) @@ -1327,7 +1327,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): class PauseTest(BaseAPIIntegrationTest): def test_pause_unpause(self): - container = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container = self.client.create_container(TEST_IMG, ['sleep', '9999']) id = container['Id'] self.tmp_containers.append(id) self.client.start(container) @@ -1358,9 +1358,9 @@ class PruneTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_prune_containers(self): container1 = self.client.create_container( - BUSYBOX, ['sh', '-c', 'echo hello > /data.txt'] + TEST_IMG, ['sh', '-c', 'echo hello > /data.txt'] ) - container2 = self.client.create_container(BUSYBOX, ['sleep', '9999']) + container2 = self.client.create_container(TEST_IMG, ['sleep', '9999']) self.client.start(container1) self.client.start(container2) self.client.wait(container1) @@ -1373,7 +1373,7 @@ class PruneTest(BaseAPIIntegrationTest): class GetContainerStatsTest(BaseAPIIntegrationTest): def test_get_container_stats_no_stream(self): container = self.client.create_container( - BUSYBOX, ['sleep', '60'], + TEST_IMG, ['sleep', '60'], ) self.tmp_containers.append(container) self.client.start(container) @@ -1387,7 +1387,7 @@ class GetContainerStatsTest(BaseAPIIntegrationTest): def test_get_container_stats_stream(self): container = self.client.create_container( - BUSYBOX, ['sleep', '60'], + TEST_IMG, ['sleep', '60'], ) self.tmp_containers.append(container) self.client.start(container) @@ -1405,7 +1405,7 @@ class ContainerUpdateTest(BaseAPIIntegrationTest): old_mem_limit = 400 * 1024 * 1024 new_mem_limit = 300 * 1024 * 1024 container = self.client.create_container( - BUSYBOX, 'top', host_config=self.client.create_host_config( + TEST_IMG, 'top', host_config=self.client.create_host_config( mem_limit=old_mem_limit ) ) @@ -1426,7 +1426,7 @@ class ContainerUpdateTest(BaseAPIIntegrationTest): 'Name': 'on-failure' } container = self.client.create_container( - BUSYBOX, ['sleep', '60'], + TEST_IMG, ['sleep', '60'], host_config=self.client.create_host_config( restart_policy=old_restart_policy ) @@ -1450,7 +1450,7 @@ class ContainerCPUTest(BaseAPIIntegrationTest): def test_container_cpu_shares(self): cpu_shares = 512 container = self.client.create_container( - BUSYBOX, 'ls', host_config=self.client.create_host_config( + TEST_IMG, 'ls', host_config=self.client.create_host_config( cpu_shares=cpu_shares ) ) @@ -1462,7 +1462,7 @@ class ContainerCPUTest(BaseAPIIntegrationTest): def test_container_cpuset(self): cpuset_cpus = "0,1" container = self.client.create_container( - BUSYBOX, 'ls', host_config=self.client.create_host_config( + TEST_IMG, 'ls', host_config=self.client.create_host_config( cpuset_cpus=cpuset_cpus ) ) @@ -1474,7 +1474,7 @@ class ContainerCPUTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_create_with_runtime(self): container = self.client.create_container( - BUSYBOX, ['echo', 'test'], runtime='runc' + TEST_IMG, ['echo', 'test'], runtime='runc' ) self.tmp_containers.append(container['Id']) config = self.client.inspect_container(container) @@ -1485,7 +1485,7 @@ class LinkTest(BaseAPIIntegrationTest): def test_remove_link(self): # Create containers container1 = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True + TEST_IMG, 'cat', detach=True, stdin_open=True ) container1_id = container1['Id'] self.tmp_containers.append(container1_id) @@ -1497,7 +1497,7 @@ class LinkTest(BaseAPIIntegrationTest): link_alias = 'mylink' container2 = self.client.create_container( - BUSYBOX, 'cat', host_config=self.client.create_host_config( + TEST_IMG, 'cat', host_config=self.client.create_host_config( links={link_path: link_alias} ) ) diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index 53b7e22f..554e8629 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -2,7 +2,7 @@ from ..helpers import assert_cat_socket_detached_with_keys from ..helpers import ctrl_with from ..helpers import requires_api_version from .base import BaseAPIIntegrationTest -from .base import BUSYBOX +from .base import TEST_IMG from docker.utils.proxy import ProxyConfig from docker.utils.socket import next_frame_header from docker.utils.socket import read_exactly @@ -16,7 +16,7 @@ class ExecTest(BaseAPIIntegrationTest): ) container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True, + TEST_IMG, 'cat', detach=True, stdin_open=True, ) self.client.start(container) self.tmp_containers.append(container) @@ -48,7 +48,7 @@ class ExecTest(BaseAPIIntegrationTest): assert item in output def test_execute_command(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -61,7 +61,7 @@ class ExecTest(BaseAPIIntegrationTest): assert exec_log == b'hello\n' def test_exec_command_string(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -74,7 +74,7 @@ class ExecTest(BaseAPIIntegrationTest): assert exec_log == b'hello world\n' def test_exec_command_as_user(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -87,7 +87,7 @@ class ExecTest(BaseAPIIntegrationTest): assert exec_log == b'postgres\n' def test_exec_command_as_root(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -100,7 +100,7 @@ class ExecTest(BaseAPIIntegrationTest): assert exec_log == b'root\n' def test_exec_command_streaming(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.tmp_containers.append(id) @@ -115,7 +115,7 @@ class ExecTest(BaseAPIIntegrationTest): assert res == b'hello\nworld\n' def test_exec_start_socket(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) container_id = container['Id'] self.client.start(container_id) @@ -137,7 +137,7 @@ class ExecTest(BaseAPIIntegrationTest): assert data.decode('utf-8') == line def test_exec_start_detached(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) container_id = container['Id'] self.client.start(container_id) @@ -152,7 +152,7 @@ class ExecTest(BaseAPIIntegrationTest): assert response == "" def test_exec_inspect(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -167,7 +167,7 @@ class ExecTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_exec_command_with_env(self): - container = self.client.create_container(BUSYBOX, 'cat', + container = self.client.create_container(TEST_IMG, 'cat', detach=True, stdin_open=True) id = container['Id'] self.client.start(id) @@ -182,7 +182,7 @@ class ExecTest(BaseAPIIntegrationTest): @requires_api_version('1.35') def test_exec_command_with_workdir(self): container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True + TEST_IMG, 'cat', detach=True, stdin_open=True ) self.tmp_containers.append(container) self.client.start(container) @@ -193,7 +193,7 @@ class ExecTest(BaseAPIIntegrationTest): def test_detach_with_default(self): container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True + TEST_IMG, 'cat', detach=True, stdin_open=True ) id = container['Id'] self.client.start(id) @@ -212,7 +212,7 @@ class ExecTest(BaseAPIIntegrationTest): def test_detach_with_config_file(self): self.client._general_configs['detachKeys'] = 'ctrl-p' container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True + TEST_IMG, 'cat', detach=True, stdin_open=True ) id = container['Id'] self.client.start(id) @@ -241,7 +241,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest): def setUp(self): super(ExecDemuxTest, self).setUp() self.container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True + TEST_IMG, 'cat', detach=True, stdin_open=True ) self.client.start(self.container) self.tmp_containers.append(self.container) diff --git a/tests/integration/api_healthcheck_test.py b/tests/integration/api_healthcheck_test.py index 5dbac376..c54583b0 100644 --- a/tests/integration/api_healthcheck_test.py +++ b/tests/integration/api_healthcheck_test.py @@ -1,4 +1,4 @@ -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG from .. import helpers SECOND = 1000000000 @@ -16,7 +16,7 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_shell_command(self): container = self.client.create_container( - BUSYBOX, 'top', healthcheck=dict(test='echo "hello world"')) + TEST_IMG, 'top', healthcheck=dict(test='echo "hello world"')) self.tmp_containers.append(container) res = self.client.inspect_container(container) @@ -27,7 +27,7 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_passes(self): container = self.client.create_container( - BUSYBOX, 'top', healthcheck=dict( + TEST_IMG, 'top', healthcheck=dict( test="true", interval=1 * SECOND, timeout=1 * SECOND, @@ -40,7 +40,7 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.24') def test_healthcheck_fails(self): container = self.client.create_container( - BUSYBOX, 'top', healthcheck=dict( + TEST_IMG, 'top', healthcheck=dict( test="false", interval=1 * SECOND, timeout=1 * SECOND, @@ -53,7 +53,7 @@ class HealthcheckTest(BaseAPIIntegrationTest): @helpers.requires_api_version('1.29') def test_healthcheck_start_period(self): container = self.client.create_container( - BUSYBOX, 'top', healthcheck=dict( + TEST_IMG, 'top', healthcheck=dict( test="echo 'x' >> /counter.txt && " "test `cat /counter.txt | wc -l` -ge 3", interval=1 * SECOND, diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index 56a76924..2bc96abf 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -15,7 +15,7 @@ from six.moves import socketserver import docker from ..helpers import requires_api_version, requires_experimental -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG class ListImagesTest(BaseAPIIntegrationTest): @@ -77,7 +77,7 @@ class PullImageTest(BaseAPIIntegrationTest): class CommitTest(BaseAPIIntegrationTest): def test_commit(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) + container = self.client.create_container(TEST_IMG, ['touch', '/test']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -90,13 +90,13 @@ class CommitTest(BaseAPIIntegrationTest): assert img['Container'].startswith(id) assert 'ContainerConfig' in img assert 'Image' in img['ContainerConfig'] - assert BUSYBOX == img['ContainerConfig']['Image'] - busybox_id = self.client.inspect_image(BUSYBOX)['Id'] + assert TEST_IMG == img['ContainerConfig']['Image'] + busybox_id = self.client.inspect_image(TEST_IMG)['Id'] assert 'Parent' in img assert img['Parent'] == busybox_id def test_commit_with_changes(self): - cid = self.client.create_container(BUSYBOX, ['touch', '/test']) + cid = self.client.create_container(TEST_IMG, ['touch', '/test']) self.tmp_containers.append(cid) self.client.start(cid) img_id = self.client.commit( @@ -112,7 +112,7 @@ class CommitTest(BaseAPIIntegrationTest): class RemoveImageTest(BaseAPIIntegrationTest): def test_remove(self): - container = self.client.create_container(BUSYBOX, ['touch', '/test']) + container = self.client.create_container(TEST_IMG, ['touch', '/test']) id = container['Id'] self.client.start(id) self.tmp_containers.append(id) @@ -319,7 +319,7 @@ class PruneImagesTest(BaseAPIIntegrationTest): pass # Ensure busybox does not get pruned - ctnr = self.client.create_container(BUSYBOX, ['sleep', '9999']) + ctnr = self.client.create_container(TEST_IMG, ['sleep', '9999']) self.tmp_containers.append(ctnr) self.client.pull('hello-world', tag='latest') @@ -343,7 +343,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest): @requires_api_version('1.23') def test_get_image_load_image(self): with tempfile.TemporaryFile() as f: - stream = self.client.get_image(BUSYBOX) + stream = self.client.get_image(TEST_IMG) for chunk in stream: f.write(chunk) @@ -351,7 +351,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest): result = self.client.load_image(f.read()) success = False - result_line = 'Loaded image: {}\n'.format(BUSYBOX) + result_line = 'Loaded image: {}\n'.format(TEST_IMG) for data in result: print(data) if 'stream' in data: diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py index db37cbd9..0f26827b 100644 --- a/tests/integration/api_network_test.py +++ b/tests/integration/api_network_test.py @@ -3,7 +3,7 @@ from docker.types import IPAMConfig, IPAMPool import pytest from ..helpers import random_name, requires_api_version -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG class TestNetworks(BaseAPIIntegrationTest): @@ -92,7 +92,7 @@ class TestNetworks(BaseAPIIntegrationTest): def test_connect_and_disconnect_container(self): net_name, net_id = self.create_network() - container = self.client.create_container(BUSYBOX, 'top') + container = self.client.create_container(TEST_IMG, 'top') self.tmp_containers.append(container) self.client.start(container) @@ -119,7 +119,7 @@ class TestNetworks(BaseAPIIntegrationTest): def test_connect_and_force_disconnect_container(self): net_name, net_id = self.create_network() - container = self.client.create_container(BUSYBOX, 'top') + container = self.client.create_container(TEST_IMG, 'top') self.tmp_containers.append(container) self.client.start(container) @@ -144,7 +144,7 @@ class TestNetworks(BaseAPIIntegrationTest): def test_connect_with_aliases(self): net_name, net_id = self.create_network() - container = self.client.create_container(BUSYBOX, 'top') + container = self.client.create_container(TEST_IMG, 'top') self.tmp_containers.append(container) self.client.start(container) @@ -161,7 +161,7 @@ class TestNetworks(BaseAPIIntegrationTest): net_name, net_id = self.create_network() container = self.client.create_container( - image=BUSYBOX, + image=TEST_IMG, command='top', host_config=self.client.create_host_config(network_mode=net_name), ) @@ -181,7 +181,7 @@ class TestNetworks(BaseAPIIntegrationTest): net_name, net_id = self.create_network() container = self.client.create_container( - image=BUSYBOX, + image=TEST_IMG, command='top', host_config=self.client.create_host_config( network_mode=net_name, @@ -211,7 +211,7 @@ class TestNetworks(BaseAPIIntegrationTest): ), ) container = self.client.create_container( - image=BUSYBOX, command='top', + image=TEST_IMG, command='top', host_config=self.client.create_host_config(network_mode=net_name), networking_config=self.client.create_networking_config({ net_name: self.client.create_endpoint_config( @@ -237,7 +237,7 @@ class TestNetworks(BaseAPIIntegrationTest): ), ) container = self.client.create_container( - image=BUSYBOX, command='top', + image=TEST_IMG, command='top', host_config=self.client.create_host_config(network_mode=net_name), networking_config=self.client.create_networking_config({ net_name: self.client.create_endpoint_config( @@ -257,7 +257,7 @@ class TestNetworks(BaseAPIIntegrationTest): @requires_api_version('1.24') def test_create_with_linklocal_ips(self): container = self.client.create_container( - BUSYBOX, 'top', + TEST_IMG, 'top', networking_config=self.client.create_networking_config( { 'bridge': self.client.create_endpoint_config( diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 71e0869e..c170a0a8 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -10,7 +10,7 @@ import six from ..helpers import ( force_leave_swarm, requires_api_version, requires_experimental ) -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG class ServiceTest(BaseAPIIntegrationTest): @@ -60,7 +60,7 @@ class ServiceTest(BaseAPIIntegrationTest): name = self.get_service_name() container_spec = docker.types.ContainerSpec( - BUSYBOX, ['echo', 'hello'] + TEST_IMG, ['echo', 'hello'] ) task_tmpl = docker.types.TaskTemplate(container_spec) return name, self.client.create_service( @@ -156,7 +156,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_custom_log_driver(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['echo', 'hello'] + TEST_IMG, ['echo', 'hello'] ) log_cfg = docker.types.DriverConfig('none') task_tmpl = docker.types.TaskTemplate( @@ -174,7 +174,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_with_volume_mount(self): vol_name = self.get_service_name() container_spec = docker.types.ContainerSpec( - BUSYBOX, ['ls'], + TEST_IMG, ['ls'], mounts=[ docker.types.Mount(target='/test', source=vol_name) ] @@ -194,7 +194,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert mount['Type'] == 'volume' def test_create_service_with_resources_constraints(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) resources = docker.types.Resources( cpu_limit=4000000, mem_limit=3 * 1024 * 1024 * 1024, cpu_reservation=3500000, mem_reservation=2 * 1024 * 1024 * 1024 @@ -214,7 +214,7 @@ class ServiceTest(BaseAPIIntegrationTest): ] def _create_service_with_generic_resources(self, generic_resources): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) resources = docker.types.Resources( generic_resources=generic_resources @@ -265,7 +265,7 @@ class ServiceTest(BaseAPIIntegrationTest): self._create_service_with_generic_resources(test_input) def test_create_service_with_update_config(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) update_config = docker.types.UpdateConfig( parallelism=10, delay=5, failure_action='pause' @@ -283,7 +283,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.28') def test_create_service_with_failure_action_rollback(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) update_config = docker.types.UpdateConfig(failure_action='rollback') name = self.get_service_name() @@ -314,7 +314,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.28') def test_create_service_with_rollback_config(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) rollback_cfg = docker.types.RollbackConfig( parallelism=10, delay=5, failure_action='pause', @@ -334,7 +334,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert rollback_cfg['MaxFailureRatio'] == rc['MaxFailureRatio'] def test_create_service_with_restart_policy(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) policy = docker.types.RestartPolicy( docker.types.RestartPolicy.condition_types.ANY, delay=5, max_attempts=5 @@ -357,7 +357,7 @@ class ServiceTest(BaseAPIIntegrationTest): 'dockerpytest_2', driver='overlay', ipam={'Driver': 'default'} ) self.tmp_networks.append(net2['Id']) - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() svc_id = self.client.create_service( @@ -373,7 +373,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_with_placement(self): node_id = self.client.nodes()[0]['ID'] - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate( container_spec, placement=['node.id=={}'.format(node_id)] ) @@ -386,7 +386,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_with_placement_object(self): node_id = self.client.nodes()[0]['ID'] - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement( constraints=['node.id=={}'.format(node_id)] ) @@ -401,7 +401,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.30') def test_create_service_with_placement_platform(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement(platforms=[('x86_64', 'linux')]) task_tmpl = docker.types.TaskTemplate( container_spec, placement=placemt @@ -414,7 +414,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.27') def test_create_service_with_placement_preferences(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement(preferences=[ {'Spread': {'SpreadDescriptor': 'com.dockerpy.test'}} ]) @@ -429,7 +429,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.27') def test_create_service_with_placement_preferences_tuple(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement(preferences=( ('spread', 'com.dockerpy.test'), )) @@ -443,7 +443,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt def test_create_service_with_endpoint_spec(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() endpoint_spec = docker.types.EndpointSpec(ports={ @@ -473,7 +473,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.32') def test_create_service_with_endpoint_spec_host_publish_mode(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() endpoint_spec = docker.types.EndpointSpec(ports={ @@ -493,7 +493,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_with_env(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['true'], env={'DOCKER_PY_TEST': 1} + TEST_IMG, ['true'], env={'DOCKER_PY_TEST': 1} ) task_tmpl = docker.types.TaskTemplate( container_spec, @@ -509,7 +509,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.29') def test_create_service_with_update_order(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) update_config = docker.types.UpdateConfig( parallelism=10, delay=5, order='start-first' @@ -528,7 +528,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_create_service_with_tty(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['true'], tty=True + TEST_IMG, ['true'], tty=True ) task_tmpl = docker.types.TaskTemplate( container_spec, @@ -545,7 +545,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.25') def test_create_service_with_tty_dict(self): container_spec = { - 'Image': BUSYBOX, + 'Image': TEST_IMG, 'Command': ['true'], 'TTY': True } @@ -561,7 +561,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_global_mode(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['echo', 'hello'] + TEST_IMG, ['echo', 'hello'] ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -574,7 +574,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_replicated_mode(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['echo', 'hello'] + TEST_IMG, ['echo', 'hello'] ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -767,7 +767,7 @@ class ServiceTest(BaseAPIIntegrationTest): search=['local'], options=['debug'] ) container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], dns_config=dns_config + TEST_IMG, ['sleep', '999'], dns_config=dns_config ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -787,7 +787,7 @@ class ServiceTest(BaseAPIIntegrationTest): start_period=3 * second, interval=int(second / 2), ) container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], healthcheck=hc + TEST_IMG, ['sleep', '999'], healthcheck=hc ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -804,7 +804,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.28') def test_create_service_with_readonly(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], read_only=True + TEST_IMG, ['sleep', '999'], read_only=True ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -818,7 +818,7 @@ class ServiceTest(BaseAPIIntegrationTest): @requires_api_version('1.28') def test_create_service_with_stop_signal(self): container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], stop_signal='SIGINT' + TEST_IMG, ['sleep', '999'], stop_signal='SIGINT' ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -836,7 +836,7 @@ class ServiceTest(BaseAPIIntegrationTest): def test_create_service_with_privileges(self): priv = docker.types.Privileges(selinux_disable=True) container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], privileges=priv + TEST_IMG, ['sleep', '999'], privileges=priv ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -992,7 +992,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert labels['container.label'] == 'SampleLabel' def test_update_service_with_defaults_update_config(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) update_config = docker.types.UpdateConfig( parallelism=10, delay=5, failure_action='pause' @@ -1031,7 +1031,7 @@ class ServiceTest(BaseAPIIntegrationTest): 'dockerpytest_2', driver='overlay', ipam={'Driver': 'default'} ) self.tmp_networks.append(net2['Id']) - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() svc_id = self.client.create_service( @@ -1070,7 +1070,7 @@ class ServiceTest(BaseAPIIntegrationTest): ] def test_update_service_with_defaults_endpoint_spec(self): - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() endpoint_spec = docker.types.EndpointSpec(ports={ @@ -1134,7 +1134,7 @@ class ServiceTest(BaseAPIIntegrationTest): start_period=3 * second, interval=int(second / 2), ) container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], healthcheck=hc + TEST_IMG, ['sleep', '999'], healthcheck=hc ) task_tmpl = docker.types.TaskTemplate(container_spec) name = self.get_service_name() @@ -1149,7 +1149,7 @@ class ServiceTest(BaseAPIIntegrationTest): ) container_spec = docker.types.ContainerSpec( - BUSYBOX, ['sleep', '999'], healthcheck={} + TEST_IMG, ['sleep', '999'], healthcheck={} ) task_tmpl = docker.types.TaskTemplate(container_spec) diff --git a/tests/integration/base.py b/tests/integration/base.py index 0ebf5b99..a7613f69 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -6,7 +6,7 @@ import docker from .. import helpers from docker.utils import kwargs_from_env -BUSYBOX = 'alpine:3.9.3' # FIXME: this should probably be renamed +TEST_IMG = 'alpine:3.10' TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION') @@ -108,7 +108,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): return container - def create_and_start(self, image=BUSYBOX, command='top', **kwargs): + def create_and_start(self, image=TEST_IMG, command='top', **kwargs): container = self.client.create_container( image=image, command=command, **kwargs) self.tmp_containers.append(container) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 4e8d2683..ec48835d 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -7,7 +7,7 @@ import docker.errors from docker.utils import kwargs_from_env import pytest -from .base import BUSYBOX +from .base import TEST_IMG @pytest.fixture(autouse=True, scope='session') @@ -15,15 +15,15 @@ def setup_test_session(): warnings.simplefilter('error') c = docker.APIClient(version='auto', **kwargs_from_env()) try: - c.inspect_image(BUSYBOX) + c.inspect_image(TEST_IMG) except docker.errors.NotFound: - print("\npulling {0}".format(BUSYBOX), file=sys.stderr) - for data in c.pull(BUSYBOX, stream=True, decode=True): + print("\npulling {0}".format(TEST_IMG), file=sys.stderr) + for data in c.pull(TEST_IMG, stream=True, decode=True): status = data.get("status") progress = data.get("progress") detail = "{0} - {1}".format(status, progress) print(detail, file=sys.stderr) # Double make sure we now have busybox - c.inspect_image(BUSYBOX) + c.inspect_image(TEST_IMG) c.close() diff --git a/tests/integration/errors_test.py b/tests/integration/errors_test.py index ac74d721..7bf156af 100644 --- a/tests/integration/errors_test.py +++ b/tests/integration/errors_test.py @@ -1,11 +1,11 @@ from docker.errors import APIError -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG import pytest class ErrorsTest(BaseAPIIntegrationTest): def test_api_error_parses_json(self): - container = self.client.create_container(BUSYBOX, ['sleep', '10']) + container = self.client.create_container(TEST_IMG, ['sleep', '10']) self.client.start(container['Id']) with pytest.raises(APIError) as cm: self.client.remove_container(container['Id']) diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 31fab109..375d972d 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -4,7 +4,7 @@ import tempfile import docker import pytest -from .base import BaseIntegrationTest, BUSYBOX, TEST_API_VERSION +from .base import BaseIntegrationTest, TEST_IMG, TEST_API_VERSION from ..helpers import random_name @@ -72,8 +72,8 @@ class ImageCollectionTest(BaseIntegrationTest): def test_pull_with_tag(self): client = docker.from_env(version=TEST_API_VERSION) - image = client.images.pull('alpine', tag='3.3') - assert 'alpine:3.3' in image.attrs['RepoTags'] + image = client.images.pull('alpine', tag='3.10') + assert 'alpine:3.10' in image.attrs['RepoTags'] def test_pull_with_sha(self): image_ref = ( @@ -97,7 +97,7 @@ class ImageCollectionTest(BaseIntegrationTest): def test_save_and_load(self): client = docker.from_env(version=TEST_API_VERSION) - image = client.images.get(BUSYBOX) + image = client.images.get(TEST_IMG) with tempfile.TemporaryFile() as f: stream = image.save() for chunk in stream: @@ -111,7 +111,7 @@ class ImageCollectionTest(BaseIntegrationTest): def test_save_and_load_repo_name(self): client = docker.from_env(version=TEST_API_VERSION) - image = client.images.get(BUSYBOX) + image = client.images.get(TEST_IMG) additional_tag = random_name() image.tag(additional_tag) self.tmp_imgs.append(additional_tag) @@ -131,7 +131,7 @@ class ImageCollectionTest(BaseIntegrationTest): def test_save_name_error(self): client = docker.from_env(version=TEST_API_VERSION) - image = client.images.get(BUSYBOX) + image = client.images.get(TEST_IMG) with pytest.raises(docker.errors.InvalidArgument): image.save(named='sakuya/izayoi') diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py index 9aab076e..a63883c4 100644 --- a/tests/integration/regression_test.py +++ b/tests/integration/regression_test.py @@ -4,7 +4,7 @@ import random import docker import six -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BaseAPIIntegrationTest, TEST_IMG import pytest @@ -19,7 +19,7 @@ class TestRegressions(BaseAPIIntegrationTest): def test_542_truncate_ids_client_side(self): self.client.start( - self.client.create_container(BUSYBOX, ['true']) + self.client.create_container(TEST_IMG, ['true']) ) result = self.client.containers(all=True, trunc=True) assert len(result[0]['Id']) == 12 @@ -30,12 +30,12 @@ class TestRegressions(BaseAPIIntegrationTest): def test_649_handle_timeout_value_none(self): self.client.timeout = None - ctnr = self.client.create_container(BUSYBOX, ['sleep', '2']) + ctnr = self.client.create_container(TEST_IMG, ['sleep', '2']) self.client.start(ctnr) self.client.stop(ctnr) def test_715_handle_user_param_as_int_value(self): - ctnr = self.client.create_container(BUSYBOX, ['id', '-u'], user=1000) + ctnr = self.client.create_container(TEST_IMG, ['id', '-u'], user=1000) self.client.start(ctnr) self.client.wait(ctnr) logs = self.client.logs(ctnr) @@ -47,7 +47,7 @@ class TestRegressions(BaseAPIIntegrationTest): tcp_port, udp_port = random.sample(range(9999, 32000), 2) ctnr = self.client.create_container( - BUSYBOX, ['sleep', '9999'], ports=[2000, (2000, 'udp')], + TEST_IMG, ['sleep', '9999'], ports=[2000, (2000, 'udp')], host_config=self.client.create_host_config( port_bindings={'2000/tcp': tcp_port, '2000/udp': udp_port} ) From cce095408927836712cf78346df8e2b5460e00ad Mon Sep 17 00:00:00 2001 From: Matt Fluet Date: Wed, 7 Aug 2019 17:32:41 -0400 Subject: [PATCH 031/211] Fix for empty auth keys in config.json Signed-off-by: Matt Fluet --- docker/auth.py | 2 + tests/unit/auth_test.py | 116 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 118 insertions(+) diff --git a/docker/auth.py b/docker/auth.py index 5f34ac08..6a07ea20 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -303,12 +303,14 @@ class AuthConfig(dict): auth_data[k] = self._resolve_authconfig_credstore( k, self.creds_store ) + auth_data[convert_to_hostname(k)] = auth_data[k] # credHelpers entries take priority over all others for reg, store_name in self.cred_helpers.items(): auth_data[reg] = self._resolve_authconfig_credstore( reg, store_name ) + auth_data[convert_to_hostname(reg)] = auth_data[reg] return auth_data diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index d46da503..aac89109 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -530,11 +530,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'user', 'Password': 'hunter2', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, } def test_get_all_credentials_with_empty_credhelper(self): @@ -548,11 +558,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'user', 'Password': 'hunter2', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, 'registry1.io': None, } @@ -571,11 +591,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'user', 'Password': 'hunter2', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, } def test_get_all_credentials_with_auths_entries(self): @@ -591,11 +621,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'user', 'Password': 'hunter2', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, 'registry1.io': { 'ServerAddress': 'registry1.io', 'Username': 'reimu', @@ -603,6 +643,62 @@ class CredstoreTest(unittest.TestCase): }, } + def test_get_all_credentials_with_empty_auths_entry(self): + self.authconfig.add_auth('default.com', {}) + + assert self.authconfig.get_all_credentials() == { + 'https://gensokyo.jp/v2': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, + 'https://default.com/v2': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, + } + + def test_get_all_credentials_credstore_overrides_auth_entry(self): + self.authconfig.add_auth('default.com', { + 'Username': 'shouldnotsee', + 'Password': 'thisentry', + 'ServerAddress': 'https://default.com/v2', + }) + + assert self.authconfig.get_all_credentials() == { + 'https://gensokyo.jp/v2': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, + 'https://default.com/v2': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, + } + def test_get_all_credentials_helpers_override_default(self): self.authconfig['credHelpers'] = { 'https://default.com/v2': 'truesecret', @@ -616,11 +712,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'reimu', 'Password': 'hakurei', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'reimu', + 'Password': 'hakurei', + 'ServerAddress': 'https://default.com/v2', + }, } def test_get_all_credentials_3_sources(self): @@ -642,11 +748,21 @@ class CredstoreTest(unittest.TestCase): 'Password': 'izayoi', 'ServerAddress': 'https://gensokyo.jp/v2', }, + 'gensokyo.jp': { + 'Username': 'sakuya', + 'Password': 'izayoi', + 'ServerAddress': 'https://gensokyo.jp/v2', + }, 'https://default.com/v2': { 'Username': 'user', 'Password': 'hunter2', 'ServerAddress': 'https://default.com/v2', }, + 'default.com': { + 'Username': 'user', + 'Password': 'hunter2', + 'ServerAddress': 'https://default.com/v2', + }, 'registry1.io': { 'ServerAddress': 'registry1.io', 'Username': 'reimu', From 2d327bf74304170d86ae63f18ed91a7fa3ac1a79 Mon Sep 17 00:00:00 2001 From: Ryan McCullagh Date: Fri, 23 Aug 2019 10:36:58 -0500 Subject: [PATCH 032/211] Fix typo in comment. networks => network Signed-off-by: Ryan McCullagh --- docker/api/network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/network.py b/docker/api/network.py index 57ed8d3b..c56a8d0b 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -7,7 +7,7 @@ from .. import utils class NetworkApiMixin(object): def networks(self, names=None, ids=None, filters=None): """ - List networks. Similar to the ``docker networks ls`` command. + List networks. Similar to the ``docker network ls`` command. Args: names (:py:class:`list`): List of names to filter by From 57c2193f6d0a6be6240d4fee9793e59ee7a9a2de Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 10 Aug 2019 18:50:36 +0200 Subject: [PATCH 033/211] pytest: set junitxml suite name to "docker-py" Signed-off-by: Sebastiaan van Stijn --- pytest.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pytest.ini b/pytest.ini index 21b47a6a..d233c56f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,2 +1,4 @@ [pytest] addopts = --tb=short -rxs + +junit_suite_name = docker-py From c238315c64ba3a0b1d3252e7965a94bb3618f94f Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 10 Aug 2019 19:04:54 +0200 Subject: [PATCH 034/211] pytest: update to v4.2.1 - use xunit2 for compatibility with Jenkins - pytest-dev/pytest#3547: `--junitxml` can emit XML compatible with Jenkins xUnit. `junit_family` INI option accepts `legacy|xunit1`, which produces old style output, and `xunit2` that conforms more strictly to https://github.com/jenkinsci/xunit-plugin/blob/xunit-2.3.2/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd Signed-off-by: Sebastiaan van Stijn --- pytest.ini | 1 + test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index d233c56f..d4f718e7 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,3 +2,4 @@ addopts = --tb=short -rxs junit_suite_name = docker-py +junit_family = xunit2 diff --git a/test-requirements.txt b/test-requirements.txt index b89f6462..bebfee86 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,6 @@ coverage==4.5.2 flake8==3.6.0 mock==1.0.1 -pytest==4.1.0 +pytest==4.2.1 pytest-cov==2.6.1 pytest-timeout==1.3.3 From 06c606300c5f6f16cc83e10edb261b81d9ab4133 Mon Sep 17 00:00:00 2001 From: Matt Fluet Date: Mon, 5 Aug 2019 18:31:56 -0400 Subject: [PATCH 035/211] Correct INDEX_URL logic in build.py _set_auth_headers Signed-off-by: Matt Fluet --- docker/api/build.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/api/build.py b/docker/api/build.py index e0a4ac96..365129a0 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -308,7 +308,8 @@ class BuildApiMixin(object): auth_data = self._auth_configs.get_all_credentials() # See https://github.com/docker/docker-py/issues/1683 - if auth.INDEX_URL not in auth_data and auth.INDEX_URL in auth_data: + if (auth.INDEX_URL not in auth_data and + auth.INDEX_NAME in auth_data): auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) log.debug( From 63760b192228725ac6c2808c996ccf6f45aff7e4 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Tue, 16 Jul 2019 12:51:01 +0200 Subject: [PATCH 036/211] test/Dockerfile: allow using a mirror for the apt repository With this change applied, the default debian package repository can be replaced with a mirror; ``` make APT_MIRROR=cdn-fastly.deb.debian.org build-py3 ... Step 5/19 : RUN apt-get update && apt-get -y install gnupg2 pass curl ---> Running in 01c1101a0bd0 Get:1 http://cdn-fastly.deb.debian.org/debian buster InRelease [118 kB] Get:2 http://cdn-fastly.deb.debian.org/debian-security buster/updates InRelease [39.1 kB] Get:3 http://cdn-fastly.deb.debian.org/debian buster-updates InRelease [46.8 kB] Get:4 http://cdn-fastly.deb.debian.org/debian buster/main amd64 Packages [7897 kB] Get:5 http://cdn-fastly.deb.debian.org/debian-security buster/updates/main amd64 Packages [22.8 kB] ``` Signed-off-by: Sebastiaan van Stijn --- Makefile | 4 ++-- tests/Dockerfile | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index ad643e80..db103f5b 100644 --- a/Makefile +++ b/Makefile @@ -8,11 +8,11 @@ clean: .PHONY: build build: - docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 . + docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . .PHONY: build-py3 build-py3: - docker build -t docker-sdk-python3 -f tests/Dockerfile . + docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . .PHONY: build-docs build-docs: diff --git a/tests/Dockerfile b/tests/Dockerfile index 4bd98f87..df8468ab 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,6 +1,11 @@ ARG PYTHON_VERSION=3.7 FROM python:${PYTHON_VERSION} + +ARG APT_MIRROR +RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \ + && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list + RUN apt-get update && apt-get -y install \ gnupg2 \ pass \ From c88205c5cea0e04d67b5604d130f22cc3c833c66 Mon Sep 17 00:00:00 2001 From: Frank Sachsenheim Date: Mon, 27 May 2019 22:07:24 +0200 Subject: [PATCH 037/211] Amends the docs concerning multiple label filters Closes #2338 Signed-off-by: Frank Sachsenheim --- docker/api/container.py | 3 ++- docker/api/image.py | 3 ++- docker/api/network.py | 3 ++- docker/models/containers.py | 3 ++- docker/models/images.py | 3 ++- docker/models/networks.py | 3 ++- 6 files changed, 12 insertions(+), 6 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 326e7679..45bd3528 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -174,7 +174,8 @@ class ContainerApiMixin(object): - `exited` (int): Only containers with specified exit code - `status` (str): One of ``restarting``, ``running``, ``paused``, ``exited`` - - `label` (str): format either ``"key"`` or ``"key=value"`` + - `label` (str|list): format either ``"key"``, ``"key=value"`` + or a list of such. - `id` (str): The id of the container. - `name` (str): The name of the container. - `ancestor` (str): Filter by container ancestor. Format of diff --git a/docker/api/image.py b/docker/api/image.py index b370b7d8..11c8cf75 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -70,7 +70,8 @@ class ImageApiMixin(object): filters (dict): Filters to be processed on the image list. Available filters: - ``dangling`` (bool) - - ``label`` (str): format either ``key`` or ``key=value`` + - `label` (str|list): format either ``"key"``, ``"key=value"`` + or a list of such. Returns: (dict or list): A list if ``quiet=True``, otherwise a dict. diff --git a/docker/api/network.py b/docker/api/network.py index c56a8d0b..750b91b2 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -15,7 +15,8 @@ class NetworkApiMixin(object): filters (dict): Filters to be processed on the network list. Available filters: - ``driver=[]`` Matches a network's driver. - - ``label=[]`` or ``label=[=]``. + - ``label=[]``, ``label=[=]`` or a list of + such. - ``type=["custom"|"builtin"]`` Filters networks by type. Returns: diff --git a/docker/models/containers.py b/docker/models/containers.py index 999851ec..d1f275f7 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -900,7 +900,8 @@ class ContainerCollection(Collection): - `exited` (int): Only containers with specified exit code - `status` (str): One of ``restarting``, ``running``, ``paused``, ``exited`` - - `label` (str): format either ``"key"`` or ``"key=value"`` + - `label` (str|list): format either ``"key"``, ``"key=value"`` + or a list of such. - `id` (str): The id of the container. - `name` (str): The name of the container. - `ancestor` (str): Filter by container ancestor. Format of diff --git a/docker/models/images.py b/docker/models/images.py index 54196829..757a5a47 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -350,7 +350,8 @@ class ImageCollection(Collection): filters (dict): Filters to be processed on the image list. Available filters: - ``dangling`` (bool) - - ``label`` (str): format either ``key`` or ``key=value`` + - `label` (str|list): format either ``"key"``, ``"key=value"`` + or a list of such. Returns: (list of :py:class:`Image`): The images. diff --git a/docker/models/networks.py b/docker/models/networks.py index be3291a4..f944c8e2 100644 --- a/docker/models/networks.py +++ b/docker/models/networks.py @@ -190,7 +190,8 @@ class NetworkCollection(Collection): filters (dict): Filters to be processed on the network list. Available filters: - ``driver=[]`` Matches a network's driver. - - ``label=[]`` or ``label=[=]``. + - `label` (str|list): format either ``"key"``, ``"key=value"`` + or a list of such. - ``type=["custom"|"builtin"]`` Filters networks by type. greedy (bool): Fetch more details for each network individually. You might want this to get the containers attached to them. From 38fe3983ba7dd22ef34c4612e3aacdab82fbe08a Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 10 Aug 2019 19:19:32 +0200 Subject: [PATCH 038/211] Jenkinsfile: update API version matrix; set default to v1.40 - Added new entry for Docker 19.03 - Removed obsolete engine versions that reached EOL (both as Community Edition and Enterprise Edition) - Set the fallback/default API version to v1.40, which corresponds with Docker 19.03 (current release) Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index e618c5dd..a0f983c2 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -46,12 +46,14 @@ def getDockerVersions = { -> def getAPIVersion = { engineVersion -> def versionMap = [ - '17.06': '1.30', '17.12': '1.35', '18.02': '1.36', '18.03': '1.37', - '18.06': '1.38', '18.09': '1.39' + '17.06': '1.30', + '18.03': '1.37', + '18.09': '1.39', + '19.03': '1.40' ] def result = versionMap[engineVersion.substring(0, 5)] if (!result) { - return '1.39' + return '1.40' } return result } From 0be550dcf059efb28d27813b2a4486fc02d7b688 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 10 Aug 2019 19:22:52 +0200 Subject: [PATCH 039/211] Jenkinsfile: update python 3.6 -> 3.7 Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index a0f983c2..e879eb43 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,7 +25,7 @@ def buildImages = { -> imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") - buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.6 .", "py3.6") + buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") } } } From 934072a5e7fd73f395a788ac4200e42883ab327f Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Fri, 3 May 2019 21:53:36 +0200 Subject: [PATCH 040/211] Add NetworkAttachmentConfig type Signed-off-by: Hannes Ljungberg --- docker/api/service.py | 10 +++++---- docker/models/services.py | 5 +++-- docker/types/__init__.py | 2 +- docker/types/services.py | 22 ++++++++++++++++++-- docs/api.rst | 1 + tests/integration/api_service_test.py | 29 +++++++++++++++++++++++++++ 6 files changed, 60 insertions(+), 9 deletions(-) diff --git a/docker/api/service.py b/docker/api/service.py index 372dd10b..e9027bfa 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -135,8 +135,9 @@ class ServiceApiMixin(object): of the service. Default: ``None`` rollback_config (RollbackConfig): Specification for the rollback strategy of the service. Default: ``None`` - networks (:py:class:`list`): List of network names or IDs to attach - the service to. Default: ``None``. + networks (:py:class:`list`): List of network names or IDs or + :py:class:`~docker.types.NetworkAttachmentConfig` to attach the + service to. Default: ``None``. endpoint_spec (EndpointSpec): Properties that can be configured to access and load balance a service. Default: ``None``. @@ -383,8 +384,9 @@ class ServiceApiMixin(object): of the service. Default: ``None``. rollback_config (RollbackConfig): Specification for the rollback strategy of the service. Default: ``None`` - networks (:py:class:`list`): List of network names or IDs to attach - the service to. Default: ``None``. + networks (:py:class:`list`): List of network names or IDs or + :py:class:`~docker.types.NetworkAttachmentConfig` to attach the + service to. Default: ``None``. endpoint_spec (EndpointSpec): Properties that can be configured to access and load balance a service. Default: ``None``. fetch_current_spec (boolean): Use the undefined settings from the diff --git a/docker/models/services.py b/docker/models/services.py index 2b6479f2..5eff8c88 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -178,8 +178,9 @@ class ServiceCollection(Collection): ``source:target:options``, where options is either ``ro`` or ``rw``. name (str): Name to give to the service. - networks (list of str): List of network names or IDs to attach - the service to. Default: ``None``. + networks (:py:class:`list`): List of network names or IDs or + :py:class:`~docker.types.NetworkAttachmentConfig` to attach the + service to. Default: ``None``. resources (Resources): Resource limits and reservations. restart_policy (RestartPolicy): Restart policy for containers. secrets (list of :py:class:`docker.types.SecretReference`): List diff --git a/docker/types/__init__.py b/docker/types/__init__.py index f3cac1bc..5db330e2 100644 --- a/docker/types/__init__.py +++ b/docker/types/__init__.py @@ -7,6 +7,6 @@ from .services import ( ConfigReference, ContainerSpec, DNSConfig, DriverConfig, EndpointSpec, Mount, Placement, PlacementPreference, Privileges, Resources, RestartPolicy, RollbackConfig, SecretReference, ServiceMode, TaskTemplate, - UpdateConfig + UpdateConfig, NetworkAttachmentConfig ) from .swarm import SwarmSpec, SwarmExternalCA diff --git a/docker/types/services.py b/docker/types/services.py index 5722b0e3..05dda15d 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -26,8 +26,8 @@ class TaskTemplate(dict): placement (Placement): Placement instructions for the scheduler. If a list is passed instead, it is assumed to be a list of constraints as part of a :py:class:`Placement` object. - networks (:py:class:`list`): List of network names or IDs to attach - the containers to. + networks (:py:class:`list`): List of network names or IDs or + :py:class:`NetworkAttachmentConfig` to attach the service to. force_update (int): A counter that triggers an update even if no relevant parameters have been changed. """ @@ -770,3 +770,21 @@ class Privileges(dict): if len(selinux_context) > 0: self['SELinuxContext'] = selinux_context + + +class NetworkAttachmentConfig(dict): + """ + Network attachment options for a service. + + Args: + target (str): The target network for attachment. + Can be a network name or ID. + aliases (:py:class:`list`): A list of discoverable alternate names + for the service. + options (:py:class:`dict`): Driver attachment options for the + network target. + """ + def __init__(self, target, aliases=None, options=None): + self['Target'] = target + self['Aliases'] = aliases + self['DriverOpts'] = options diff --git a/docs/api.rst b/docs/api.rst index edb8fffa..bd046614 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -142,6 +142,7 @@ Configuration types .. autoclass:: IPAMPool .. autoclass:: LogConfig .. autoclass:: Mount +.. autoclass:: NetworkAttachmentConfig .. autoclass:: Placement .. autoclass:: PlacementPreference .. autoclass:: Privileges diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index c170a0a8..784d1e37 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -371,6 +371,35 @@ class ServiceTest(BaseAPIIntegrationTest): {'Target': net1['Id']}, {'Target': net2['Id']} ] + def test_create_service_with_network_attachment_config(self): + network = self.client.create_network( + 'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'} + ) + self.tmp_networks.append(network['Id']) + container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + network_config = docker.types.NetworkAttachmentConfig( + target='dockerpytest_1', + aliases=['dockerpytest_1_alias'], + options={ + 'foo': 'bar' + } + ) + task_tmpl = docker.types.TaskTemplate( + container_spec, + networks=[network_config] + ) + name = self.get_service_name() + svc_id = self.client.create_service( + task_tmpl, name=name + ) + svc_info = self.client.inspect_service(svc_id) + assert 'Networks' in svc_info['Spec']['TaskTemplate'] + service_networks_info = svc_info['Spec']['TaskTemplate']['Networks'] + assert len(service_networks_info) == 1 + assert service_networks_info[0]['Target'] == network['Id'] + assert service_networks_info[0]['Aliases'] == ['dockerpytest_1_alias'] + assert service_networks_info[0]['DriverOpts'] == {'foo': 'bar'} + def test_create_service_with_placement(self): node_id = self.client.nodes()[0]['ID'] container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) From ec63237da0edb2fd7180d14183c3f2bcfe12cc0c Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Fri, 3 May 2019 22:09:48 +0200 Subject: [PATCH 041/211] Correctly reference ConfigReference Signed-off-by: Hannes Ljungberg --- docker/models/services.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/models/services.py b/docker/models/services.py index 5eff8c88..e866545d 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -206,8 +206,9 @@ class ServiceCollection(Collection): the container's `hosts` file. dns_config (DNSConfig): Specification for DNS related configurations in resolver configuration file. - configs (:py:class:`list`): List of :py:class:`ConfigReference` - that will be exposed to the service. + configs (:py:class:`list`): List of + :py:class:`~docker.types.ConfigReference` that will be exposed + to the service. privileges (Privileges): Security options for the service's containers. From 7c8264ce9629be771a8245ef702e8608fc528544 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Fri, 3 May 2019 22:24:33 +0200 Subject: [PATCH 042/211] Correctly reference SecretReference Signed-off-by: Hannes Ljungberg --- docker/models/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index e866545d..a35687b3 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -183,7 +183,7 @@ class ServiceCollection(Collection): service to. Default: ``None``. resources (Resources): Resource limits and reservations. restart_policy (RestartPolicy): Restart policy for containers. - secrets (list of :py:class:`docker.types.SecretReference`): List + secrets (list of :py:class:`~docker.types.SecretReference`): List of secrets accessible to containers for this service. stop_grace_period (int): Amount of time to wait for containers to terminate before forcefully killing them. From bc89de6047a771b778d6e7d85fba401befd80675 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 30 Aug 2019 00:14:20 +0200 Subject: [PATCH 043/211] Fix broken test due to BUSYBOX -> TEST_IMG rename The BUSYBOX variable was renamed to TEST_IMG in 54b48a9b7ab59b4dcf49acf49ddf52035ba3ea08, however 0ddf428b6ce7accdac3506b45047df2cb72941ec got merged after that change, but was out of date, and therefore caused the tests to fail: ``` =================================== FAILURES =================================== ________ ServiceTest.test_create_service_with_network_attachment_config ________ tests/integration/api_service_test.py:379: in test_create_service_with_network_attachment_config container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) E NameError: global name 'BUSYBOX' is not defined ``` Fix the test by using the correct variable name. Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_service_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 784d1e37..b6b7ec53 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -376,7 +376,7 @@ class ServiceTest(BaseAPIIntegrationTest): 'dockerpytest_1', driver='overlay', ipam={'Driver': 'default'} ) self.tmp_networks.append(network['Id']) - container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) network_config = docker.types.NetworkAttachmentConfig( target='dockerpytest_1', aliases=['dockerpytest_1_alias'], From 88219c682c9ba673a03197191233cb3a7c69167d Mon Sep 17 00:00:00 2001 From: Kir Kolyshkin Date: Tue, 1 Oct 2019 17:21:38 -0700 Subject: [PATCH 044/211] Bump pytest to 4.3.1 Pytest 4.3.1 includes the fix from https://github.com/pytest-dev/pytest/pull/4795 which should fix the following failure: > INFO: Building docker-sdk-python3:4.0.2... > sha256:c7a40413c985b6e75df324fae39b1c30cb78a25df71b7892f1a4a15449537fb3 > INFO: Starting docker-py tests... > Traceback (most recent call last): > File "/usr/local/bin/pytest", line 10, in > sys.exit(main()) > File "/usr/local/lib/python3.6/site-packages/_pytest/config/__init__.py", line 61, in main > config = _prepareconfig(args, plugins) > File "/usr/local/lib/python3.6/site-packages/_pytest/config/__init__.py", line 182, in _prepareconfig > config = get_config() > File "/usr/local/lib/python3.6/site-packages/_pytest/config/__init__.py", line 156, in get_config > pluginmanager.import_plugin(spec) > File "/usr/local/lib/python3.6/site-packages/_pytest/config/__init__.py", line 530, in import_plugin > __import__(importspec) > File "/usr/local/lib/python3.6/site-packages/_pytest/tmpdir.py", line 25, in > class TempPathFactory(object): > File "/usr/local/lib/python3.6/site-packages/_pytest/tmpdir.py", line 35, in TempPathFactory > lambda p: Path(os.path.abspath(six.text_type(p))) > TypeError: attrib() got an unexpected keyword argument 'convert' > Sending interrupt signal to process > Terminated > script returned exit code 143 Signed-off-by: Kir Kolyshkin --- test-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-requirements.txt b/test-requirements.txt index bebfee86..0b01e569 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,6 @@ coverage==4.5.2 flake8==3.6.0 mock==1.0.1 -pytest==4.2.1 +pytest==4.3.1 pytest-cov==2.6.1 pytest-timeout==1.3.3 From 2bb08b3985fbde794a75fbf321872c9c4d84abf9 Mon Sep 17 00:00:00 2001 From: Christopher Crone Date: Thu, 3 Oct 2019 15:44:27 +0200 Subject: [PATCH 045/211] Bump 4.1.0 Signed-off-by: Christopher Crone --- docker/version.py | 2 +- docs/change-log.md | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 21249253..99a8b424 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.1.0-dev" +version = "4.1.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index b10cfd54..7cc05068 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,27 @@ Change log ========== +4.1.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/61?closed=1) + +### Bugfixes + +- Correct `INDEX_URL` logic in build.py _set_auth_headers +- Fix for empty auth keys in config.json + +### Features + +- Add `NetworkAttachmentConfig` for service create/update + +### Miscellaneous + +- Bump pytest to 4.3.1 +- Adjust `--platform` tests for changes in docker engine +- Update credentials-helpers to v0.6.3 + + 4.0.2 ----- From 1e567223ef4bc23b85e0f19da89ea910aebf46ca Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 10:38:07 +0200 Subject: [PATCH 046/211] set logging level of paramiko to warn Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 5a8ceb08..6d2b1af6 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -1,6 +1,7 @@ import paramiko import requests.adapters import six +import logging from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants @@ -77,6 +78,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): def __init__(self, base_url, timeout=60, pool_connections=constants.DEFAULT_NUM_POOLS): + logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() self.ssh_client.load_system_host_keys() From bc6777eb01d34d5aacc38c6579ce6ae3a1569e8d Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 10:40:15 +0200 Subject: [PATCH 047/211] set host key policy for ssh transport to WarningPolicy() Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 6d2b1af6..79aefcec 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -81,6 +81,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() self.ssh_client.load_system_host_keys() + self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) self.base_url = base_url self._connect() From ed9b208e156a2685b032d934c02621f54caf7608 Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 13:52:12 +0200 Subject: [PATCH 048/211] obey Hostname Username Port and ProxyCommand settings from .ssh/config Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 79aefcec..7de0e590 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,6 +2,7 @@ import paramiko import requests.adapters import six import logging +import os from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants @@ -73,17 +74,40 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class SSHHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [ - 'pools', 'timeout', 'ssh_client', + 'pools', 'timeout', 'ssh_client', 'ssh_params' ] def __init__(self, base_url, timeout=60, pool_connections=constants.DEFAULT_NUM_POOLS): logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() + base_url = six.moves.urllib_parse.urlparse(base_url) + self.ssh_params = { + "hostname": base_url.hostname, + "port": base_url.port, + "username": base_url.username + } + ssh_config_file = os.path.expanduser("~/.ssh/config") + if os.path.exists(ssh_config_file): + conf = paramiko.SSHConfig() + with open(ssh_config_file) as f: + conf.parse(f) + host_config = conf.lookup(base_url.hostname) + self.ssh_conf = host_config + if 'proxycommand' in host_config: + self.ssh_params["sock"] = paramiko.ProxyCommand( + self.ssh_conf['proxycommand'] + ) + if 'hostname' in host_config: + self.ssh_params['hostname'] = host_config['hostname'] + if base_url.port is None and 'port' in host_config: + self.ssh_params['port'] = self.ssh_conf['port'] + if base_url.username is None and 'user' in host_config: + self.ssh_params['username'] = self.ssh_conf['user'] + self.ssh_client.load_system_host_keys() self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) - self.base_url = base_url self._connect() self.timeout = timeout self.pools = RecentlyUsedContainer( @@ -92,10 +116,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): super(SSHHTTPAdapter, self).__init__() def _connect(self): - parsed = six.moves.urllib_parse.urlparse(self.base_url) - self.ssh_client.connect( - parsed.hostname, parsed.port, parsed.username, - ) + self.ssh_client.connect(**self.ssh_params) def get_connection(self, url, proxies=None): with self.pools.lock: From a67d180e2c4346aedc44a066ff9d95a0f59155c8 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Mon, 14 Oct 2019 12:24:55 +0200 Subject: [PATCH 049/211] Fix CI labels so we run on amd64 nodes Signed-off-by: Nicolas De Loof --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index e879eb43..7af23e9c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -32,7 +32,7 @@ def buildImages = { -> def getDockerVersions = { -> def dockerVersions = ["17.06.2-ce"] - wrappedNode(label: "ubuntu && !zfs") { + wrappedNode(label: "ubuntu && !zfs && amd64") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ ${imageNamePy3} \\ From 61e2d5f69bd189679306e8e3b52d9c109b339f6f Mon Sep 17 00:00:00 2001 From: rentu Date: Fri, 30 Aug 2019 09:35:46 +0100 Subject: [PATCH 050/211] Fix win32pipe.WaitNamedPipe throw exception in windows container. Signed-off-by: Renlong Tu --- docker/transport/npipesocket.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py index ef020316..176b5c87 100644 --- a/docker/transport/npipesocket.py +++ b/docker/transport/npipesocket.py @@ -1,4 +1,5 @@ import functools +import time import io import six @@ -9,7 +10,7 @@ cERROR_PIPE_BUSY = 0xe7 cSECURITY_SQOS_PRESENT = 0x100000 cSECURITY_ANONYMOUS = 0 -RETRY_WAIT_TIMEOUT = 10000 +MAXIMUM_RETRY_COUNT = 10 def check_closed(f): @@ -46,8 +47,7 @@ class NpipeSocket(object): self._closed = True @check_closed - def connect(self, address): - win32pipe.WaitNamedPipe(address, self._timeout) + def connect(self, address, retry_count=0): try: handle = win32file.CreateFile( address, @@ -65,8 +65,10 @@ class NpipeSocket(object): # Another program or thread has grabbed our pipe instance # before we got to it. Wait for availability and attempt to # connect again. - win32pipe.WaitNamedPipe(address, RETRY_WAIT_TIMEOUT) - return self.connect(address) + retry_count = retry_count + 1 + if (retry_count < MAXIMUM_RETRY_COUNT): + time.sleep(1) + return self.connect(address, retry_count) raise e self.flags = win32pipe.GetNamedPipeInfo(handle)[0] From 9b0d07f9a8ced5f5f0100fc579a25b82a585c20b Mon Sep 17 00:00:00 2001 From: Christopher Crone Date: Thu, 3 Oct 2019 16:40:23 +0200 Subject: [PATCH 051/211] Version bump Signed-off-by: Christopher Crone --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 99a8b424..0c9ec47c 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.1.0" +version = "4.2.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 6c29375fd13bcaccc3d9f88a1449d83aed322794 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 3 Jan 2020 15:20:20 +0100 Subject: [PATCH 052/211] Fix ImageCollectionTest.test_pull_multiple flakiness The ImageCollectionTest.test_pull_multiple test performs a `docker pull` without a `:tag` specified) to pull all tags of the given repository (image). After pulling the image, the image(s) pulled are checked to verify if the list of images contains the `:latest` tag. However, the test assumes that all tags of the image are tags for the same version of the image (same digest), and thus a *single* image is returned, which is not always the case. Currently, the `hello-world:latest` and `hello-world:linux` tags point to a different digest, therefore the `client.images.pull()` returns multiple images: one image for digest, making the test fail: =================================== FAILURES =================================== ____________________ ImageCollectionTest.test_pull_multiple ____________________ tests/integration/models_images_test.py:90: in test_pull_multiple assert len(images) == 1 E AssertionError: assert 2 == 1 E + where 2 = len([, ]) This patch updates the test to not assume a single image is returned, and instead loop through the list of images and check if any of the images contains the `:latest` tag. Signed-off-by: Sebastiaan van Stijn --- tests/integration/models_images_test.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 375d972d..223d102f 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -87,8 +87,10 @@ class ImageCollectionTest(BaseIntegrationTest): def test_pull_multiple(self): client = docker.from_env(version=TEST_API_VERSION) images = client.images.pull('hello-world') - assert len(images) == 1 - assert 'hello-world:latest' in images[0].attrs['RepoTags'] + assert len(images) >= 1 + assert any([ + 'hello-world:latest' in img.attrs['RepoTags'] for img in images + ]) def test_load_error(self): client = docker.from_env(version=TEST_API_VERSION) From 6e44d8422c3bd74578787582fa73cba73184c7f5 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Fri, 17 Jan 2020 19:25:55 +0100 Subject: [PATCH 053/211] Implement context management, lifecycle and unittests. Signed-off-by: Anca Iordache --- .travis.yml | 2 +- appveyor.yml | 3 +- docker/__init__.py | 3 + docker/constants.py | 12 ++ docker/context/__init__.py | 3 + docker/context/api.py | 205 +++++++++++++++++++++++++ docker/context/config.py | 81 ++++++++++ docker/context/context.py | 208 ++++++++++++++++++++++++++ docker/errors.py | 32 ++++ docker/utils/utils.py | 15 +- test-requirements.txt | 1 + tests/integration/context_api_test.py | 52 +++++++ tests/unit/context_test.py | 45 ++++++ tests/unit/errors_test.py | 20 +-- 14 files changed, 659 insertions(+), 23 deletions(-) create mode 100644 docker/context/__init__.py create mode 100644 docker/context/api.py create mode 100644 docker/context/config.py create mode 100644 docker/context/context.py create mode 100644 tests/integration/context_api_test.py create mode 100644 tests/unit/context_test.py diff --git a/.travis.yml b/.travis.yml index 577b893f..7b3d7248 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,6 @@ matrix: - env: TOXENV=flake8 install: - - pip install tox + - pip install tox==2.9.1 script: - tox diff --git a/appveyor.yml b/appveyor.yml index d659b586..144ab352 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,8 +1,9 @@ version: '{branch}-{build}' install: - - "SET PATH=C:\\Python27-x64;C:\\Python27-x64\\Scripts;%PATH%" + - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%" - "python --version" + - "python -m pip install --upgrade pip" - "pip install tox==2.9.1" # Build the binary after tests diff --git a/docker/__init__.py b/docker/__init__.py index cf732e13..e5c1a8f6 100644 --- a/docker/__init__.py +++ b/docker/__init__.py @@ -1,6 +1,9 @@ # flake8: noqa from .api import APIClient from .client import DockerClient, from_env +from .context import Context +from .context import ContextAPI +from .tls import TLSConfig from .version import version, version_info __version__ = version diff --git a/docker/constants.py b/docker/constants.py index 4b96e1ce..e4daed5d 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -9,6 +9,18 @@ CONTAINER_LIMITS_KEYS = [ 'memory', 'memswap', 'cpushares', 'cpusetcpus' ] +DEFAULT_HTTP_HOST = "127.0.0.1" +DEFAULT_UNIX_SOCKET = "http+unix:///var/run/docker.sock" +DEFAULT_NPIPE = 'npipe:////./pipe/docker_engine' + +BYTE_UNITS = { + 'b': 1, + 'k': 1024, + 'm': 1024 * 1024, + 'g': 1024 * 1024 * 1024 +} + + INSECURE_REGISTRY_DEPRECATION_WARNING = \ 'The `insecure_registry` argument to {} ' \ 'is deprecated and non-functional. Please remove it.' diff --git a/docker/context/__init__.py b/docker/context/__init__.py new file mode 100644 index 00000000..0a6707f9 --- /dev/null +++ b/docker/context/__init__.py @@ -0,0 +1,3 @@ +# flake8: noqa +from .context import Context +from .api import ContextAPI diff --git a/docker/context/api.py b/docker/context/api.py new file mode 100644 index 00000000..fc7e8940 --- /dev/null +++ b/docker/context/api.py @@ -0,0 +1,205 @@ +import json +import os + +from docker import errors +from docker.context.config import get_meta_dir +from docker.context.config import METAFILE +from docker.context.config import get_current_context_name +from docker.context.config import write_context_name_to_docker_config +from docker.context import Context + + +class ContextAPI(object): + """Context API. + Contains methods for context management: + create, list, remove, get, inspect. + """ + DEFAULT_CONTEXT = Context("default") + + @classmethod + def create_context( + cls, name, orchestrator="swarm", host=None, tls_cfg=None, + default_namespace=None, skip_tls_verify=False): + """Creates a new context. + Returns: + (Context): a Context object. + Raises: + :py:class:`docker.errors.MissingContextParameter` + If a context name is not provided. + :py:class:`docker.errors.ContextAlreadyExists` + If a context with the name already exists. + :py:class:`docker.errors.ContextException` + If name is default. + + Example: + + >>> from docker.context import ContextAPI + >>> ctx = ContextAPI.create_context(name='test') + >>> print(ctx.Metadata) + { + "Name": "test", + "Metadata": { + "StackOrchestrator": "swarm" + }, + "Endpoints": { + "docker": { + "Host": "unix:///var/run/docker.sock", + "SkipTLSVerify": false + } + } + } + """ + if not name: + raise errors.MissingContextParameter("name") + if name == "default": + raise errors.ContextException( + '"default" is a reserved context name') + ctx = Context.load_context(name) + if ctx: + raise errors.ContextAlreadyExists(name) + endpoint = "docker" if orchestrator == "swarm" else orchestrator + ctx = Context(name, orchestrator) + ctx.set_endpoint( + endpoint, host, tls_cfg, + skip_tls_verify=skip_tls_verify, + def_namespace=default_namespace) + ctx.save() + return ctx + + @classmethod + def get_context(cls, name=None): + """Retrieves a context object. + Args: + name (str): The name of the context + + Example: + + >>> from docker.context import ContextAPI + >>> ctx = ContextAPI.get_context(name='test') + >>> print(ctx.Metadata) + { + "Name": "test", + "Metadata": { + "StackOrchestrator": "swarm" + }, + "Endpoints": { + "docker": { + "Host": "unix:///var/run/docker.sock", + "SkipTLSVerify": false + } + } + } + """ + if not name: + name = get_current_context_name() + if name == "default": + return cls.DEFAULT_CONTEXT + return Context.load_context(name) + + @classmethod + def contexts(cls): + """Context list. + Returns: + (Context): List of context objects. + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + names = [] + for dirname, dirnames, fnames in os.walk(get_meta_dir()): + for filename in fnames + dirnames: + if filename == METAFILE: + try: + data = json.load( + open(os.path.join(dirname, filename), "r")) + names.append(data["Name"]) + except Exception as e: + raise errors.ContextException( + "Failed to load metafile {}: {}".format( + filename, e)) + + contexts = [cls.DEFAULT_CONTEXT] + for name in names: + contexts.append(Context.load_context(name)) + return contexts + + @classmethod + def get_current_context(cls): + """Get current context. + Returns: + (Context): current context object. + """ + return cls.get_context() + + @classmethod + def set_current_context(cls, name="default"): + ctx = cls.get_context(name) + if not ctx: + raise errors.ContextNotFound(name) + + err = write_context_name_to_docker_config(name) + if err: + raise errors.ContextException( + 'Failed to set current context: {}'.format(err)) + + @classmethod + def remove_context(cls, name): + """Remove a context. Similar to the ``docker context rm`` command. + + Args: + name (str): The name of the context + + Raises: + :py:class:`docker.errors.MissingContextParameter` + If a context name is not provided. + :py:class:`docker.errors.ContextNotFound` + If a context with the name does not exist. + :py:class:`docker.errors.ContextException` + If name is default. + + Example: + + >>> from docker.context import ContextAPI + >>> ContextAPI.remove_context(name='test') + >>> + """ + if not name: + raise errors.MissingContextParameter("name") + if name == "default": + raise errors.ContextException( + 'context "default" cannot be removed') + ctx = Context.load_context(name) + if not ctx: + raise errors.ContextNotFound(name) + if name == get_current_context_name(): + write_context_name_to_docker_config(None) + ctx.remove() + + @classmethod + def inspect_context(cls, name="default"): + """Remove a context. Similar to the ``docker context inspect`` command. + + Args: + name (str): The name of the context + + Raises: + :py:class:`docker.errors.MissingContextParameter` + If a context name is not provided. + :py:class:`docker.errors.ContextNotFound` + If a context with the name does not exist. + + Example: + + >>> from docker.context import ContextAPI + >>> ContextAPI.remove_context(name='test') + >>> + """ + if not name: + raise errors.MissingContextParameter("name") + if name == "default": + return cls.DEFAULT_CONTEXT() + ctx = Context.load_context(name) + if not ctx: + raise errors.ContextNotFound(name) + + return ctx() diff --git a/docker/context/config.py b/docker/context/config.py new file mode 100644 index 00000000..ac9a342e --- /dev/null +++ b/docker/context/config.py @@ -0,0 +1,81 @@ +import os +import json +import hashlib + +from docker import utils +from docker.constants import IS_WINDOWS_PLATFORM +from docker.constants import DEFAULT_UNIX_SOCKET +from docker.utils.config import find_config_file + +METAFILE = "meta.json" + + +def get_current_context_name(): + name = "default" + docker_cfg_path = find_config_file() + if docker_cfg_path: + try: + with open(docker_cfg_path, "r") as f: + name = json.load(f).get("currentContext", "default") + except Exception: + return "default" + return name + + +def write_context_name_to_docker_config(name=None): + if name == 'default': + name = None + docker_cfg_path = find_config_file() + config = {} + if docker_cfg_path: + try: + with open(docker_cfg_path, "r") as f: + config = json.load(f) + except Exception as e: + return e + current_context = config.get("currentContext", None) + if current_context and not name: + del config["currentContext"] + elif name: + config["currentContext"] = name + else: + return + try: + with open(docker_cfg_path, "w") as f: + json.dump(config, f, indent=4) + except Exception as e: + return e + + +def get_context_id(name): + return hashlib.sha256(name.encode('utf-8')).hexdigest() + + +def get_context_dir(): + return os.path.join(os.path.dirname(find_config_file() or ""), "contexts") + + +def get_meta_dir(name=None): + meta_dir = os.path.join(get_context_dir(), "meta") + if name: + return os.path.join(meta_dir, get_context_id(name)) + return meta_dir + + +def get_meta_file(name): + return os.path.join(get_meta_dir(name), METAFILE) + + +def get_tls_dir(name=None, endpoint=""): + context_dir = get_context_dir() + if name: + return os.path.join(context_dir, "tls", get_context_id(name), endpoint) + return os.path.join(context_dir, "tls") + + +def get_context_host(path=None): + host = utils.parse_host(path, IS_WINDOWS_PLATFORM) + if host == DEFAULT_UNIX_SOCKET: + # remove http+ from default docker socket url + return host.strip("http+") + return host diff --git a/docker/context/context.py b/docker/context/context.py new file mode 100644 index 00000000..4a0549ca --- /dev/null +++ b/docker/context/context.py @@ -0,0 +1,208 @@ +import os +import json +from shutil import copyfile, rmtree +from docker.tls import TLSConfig +from docker.errors import ContextException +from docker.context.config import get_meta_dir +from docker.context.config import get_meta_file +from docker.context.config import get_tls_dir +from docker.context.config import get_context_host + + +class Context: + """A context.""" + def __init__(self, name, orchestrator="swarm", host=None, endpoints=None): + if not name: + raise Exception("Name not provided") + self.name = name + self.orchestrator = orchestrator + if not endpoints: + default_endpoint = "docker" if ( + orchestrator == "swarm" + ) else orchestrator + self.endpoints = { + default_endpoint: { + "Host": get_context_host(host), + "SkipTLSVerify": False + } + } + else: + for k, v in endpoints.items(): + ekeys = v.keys() + for param in ["Host", "SkipTLSVerify"]: + if param not in ekeys: + raise ContextException( + "Missing parameter {} from endpoint {}".format( + param, k)) + self.endpoints = endpoints + + self.tls_cfg = {} + self.meta_path = "IN MEMORY" + self.tls_path = "IN MEMORY" + + def set_endpoint( + self, name="docker", host=None, tls_cfg=None, + skip_tls_verify=False, def_namespace=None): + self.endpoints[name] = { + "Host": get_context_host(host), + "SkipTLSVerify": skip_tls_verify + } + if def_namespace: + self.endpoints[name]["DefaultNamespace"] = def_namespace + + if tls_cfg: + self.tls_cfg[name] = tls_cfg + + def inspect(self): + return self.__call__() + + @classmethod + def load_context(cls, name): + name, orchestrator, endpoints = Context._load_meta(name) + if name: + instance = cls(name, orchestrator, endpoints=endpoints) + instance._load_certs() + instance.meta_path = get_meta_dir(name) + return instance + return None + + @classmethod + def _load_meta(cls, name): + metadata = {} + meta_file = get_meta_file(name) + if os.path.isfile(meta_file): + with open(meta_file) as f: + try: + with open(meta_file) as f: + metadata = json.load(f) + for k, v in metadata["Endpoints"].items(): + metadata["Endpoints"][k]["SkipTLSVerify"] = bool( + v["SkipTLSVerify"]) + except (IOError, KeyError, ValueError) as e: + # unknown format + raise Exception("""Detected corrupted meta file for + context {} : {}""".format(name, e)) + + return ( + metadata["Name"], metadata["Metadata"]["StackOrchestrator"], + metadata["Endpoints"]) + return None, None, None + + def _load_certs(self): + certs = {} + tls_dir = get_tls_dir(self.name) + for endpoint in self.endpoints.keys(): + if not os.path.isdir(os.path.join(tls_dir, endpoint)): + continue + ca_cert = None + cert = None + key = None + for filename in os.listdir(os.path.join(tls_dir, endpoint)): + if filename.startswith("ca"): + ca_cert = os.path.join(tls_dir, endpoint, filename) + elif filename.startswith("cert"): + cert = os.path.join(tls_dir, endpoint, filename) + elif filename.startswith("key"): + key = os.path.join(tls_dir, endpoint, filename) + if all([ca_cert, cert, key]): + certs[endpoint] = TLSConfig( + client_cert=(cert, key), ca_cert=ca_cert) + self.tls_cfg = certs + self.tls_path = tls_dir + + def save(self): + meta_dir = get_meta_dir(self.name) + if not os.path.isdir(meta_dir): + os.makedirs(meta_dir) + with open(get_meta_file(self.name), "w") as f: + f.write(json.dumps(self.Metadata)) + + tls_dir = get_tls_dir(self.name) + for endpoint, tls in self.tls_cfg.items(): + if not os.path.isdir(os.path.join(tls_dir, endpoint)): + os.makedirs(os.path.join(tls_dir, endpoint)) + + ca_file = tls.ca_cert + if ca_file: + copyfile(ca_file, os.path.join( + tls_dir, endpoint, os.path.basename(ca_file))) + + if tls.cert: + cert_file, key_file = tls.cert + copyfile(cert_file, os.path.join( + tls_dir, endpoint, os.path.basename(cert_file))) + copyfile(key_file, os.path.join( + tls_dir, endpoint, os.path.basename(key_file))) + + self.meta_path = get_meta_dir(self.name) + self.tls_path = get_tls_dir(self.name) + + def remove(self): + if os.path.isdir(self.meta_path): + rmtree(self.meta_path) + if os.path.isdir(self.tls_path): + rmtree(self.tls_path) + + def __repr__(self): + return "<%s: '%s'>" % (self.__class__.__name__, self.name) + + def __str__(self): + return json.dumps(self.__call__(), indent=2) + + def __call__(self): + result = self.Metadata + result.update(self.TLSMaterial) + result.update(self.Storage) + return result + + @property + def Name(self): + return self.name + + @property + def Host(self): + if self.orchestrator == "swarm": + return self.endpoints["docker"]["Host"] + return self.endpoints[self.orchestrator]["Host"] + + @property + def Orchestrator(self): + return self.orchestrator + + @property + def Metadata(self): + return { + "Name": self.name, + "Metadata": { + "StackOrchestrator": self.orchestrator + }, + "Endpoints": self.endpoints + } + + @property + def TLSConfig(self): + key = self.orchestrator + if key == "swarm": + key = "docker" + if key in self.tls_cfg.keys(): + return self.tls_cfg[key] + return None + + @property + def TLSMaterial(self): + certs = {} + for endpoint, tls in self.tls_cfg.items(): + cert, key = tls.cert + certs[endpoint] = list( + map(os.path.basename, [tls.ca_cert, cert, key])) + return { + "TLSMaterial": certs + } + + @property + def Storage(self): + return { + "Storage": { + "MetadataPath": self.meta_path, + "TLSPath": self.tls_path + }} diff --git a/docker/errors.py b/docker/errors.py index c340dcb1..e5d07a5b 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -163,3 +163,35 @@ def create_unexpected_kwargs_error(name, kwargs): text.append("got unexpected keyword arguments ") text.append(', '.join(quoted_kwargs)) return TypeError(''.join(text)) + + +class MissingContextParameter(DockerException): + def __init__(self, param): + self.param = param + + def __str__(self): + return ("missing parameter: {}".format(self.param)) + + +class ContextAlreadyExists(DockerException): + def __init__(self, name): + self.name = name + + def __str__(self): + return ("context {} already exists".format(self.name)) + + +class ContextException(DockerException): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return (self.msg) + + +class ContextNotFound(DockerException): + def __init__(self, name): + self.name = name + + def __str__(self): + return ("context '{}' not found".format(self.name)) diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 7819ace4..447760b4 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -11,6 +11,10 @@ import six from .. import errors from .. import tls +from ..constants import DEFAULT_HTTP_HOST +from ..constants import DEFAULT_UNIX_SOCKET +from ..constants import DEFAULT_NPIPE +from ..constants import BYTE_UNITS if six.PY2: from urllib import splitnport @@ -18,17 +22,6 @@ if six.PY2: else: from urllib.parse import splitnport, urlparse -DEFAULT_HTTP_HOST = "127.0.0.1" -DEFAULT_UNIX_SOCKET = "http+unix:///var/run/docker.sock" -DEFAULT_NPIPE = 'npipe:////./pipe/docker_engine' - -BYTE_UNITS = { - 'b': 1, - 'k': 1024, - 'm': 1024 * 1024, - 'g': 1024 * 1024 * 1024 -} - def create_ipam_pool(*args, **kwargs): raise errors.DeprecatedMethod( diff --git a/test-requirements.txt b/test-requirements.txt index 0b01e569..24078e27 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,3 +1,4 @@ +setuptools==44.0.0 # last version with python 2.7 support coverage==4.5.2 flake8==3.6.0 mock==1.0.1 diff --git a/tests/integration/context_api_test.py b/tests/integration/context_api_test.py new file mode 100644 index 00000000..60235ee7 --- /dev/null +++ b/tests/integration/context_api_test.py @@ -0,0 +1,52 @@ +import os +import tempfile +import pytest +from docker import errors +from docker.context import ContextAPI +from docker.tls import TLSConfig +from .base import BaseAPIIntegrationTest + + +class ContextLifecycleTest(BaseAPIIntegrationTest): + def test_lifecycle(self): + assert ContextAPI.get_context().Name == "default" + assert not ContextAPI.get_context("test") + assert ContextAPI.get_current_context().Name == "default" + + dirpath = tempfile.mkdtemp() + ca = tempfile.NamedTemporaryFile( + prefix=os.path.join(dirpath, "ca.pem"), mode="r") + cert = tempfile.NamedTemporaryFile( + prefix=os.path.join(dirpath, "cert.pem"), mode="r") + key = tempfile.NamedTemporaryFile( + prefix=os.path.join(dirpath, "key.pem"), mode="r") + + # create context 'test + docker_tls = TLSConfig( + client_cert=(cert.name, key.name), + ca_cert=ca.name) + ContextAPI.create_context( + "test", tls_cfg=docker_tls) + + # check for a context 'test' in the context store + assert any([ctx.Name == "test" for ctx in ContextAPI.contexts()]) + # retrieve a context object for 'test' + assert ContextAPI.get_context("test") + # remove context + ContextAPI.remove_context("test") + with pytest.raises(errors.ContextNotFound): + ContextAPI.inspect_context("test") + # check there is no 'test' context in store + assert not ContextAPI.get_context("test") + + ca.close() + key.close() + cert.close() + + def test_context_remove(self): + ContextAPI.create_context("test") + assert ContextAPI.inspect_context("test")["Name"] == "test" + + ContextAPI.remove_context("test") + with pytest.raises(errors.ContextNotFound): + ContextAPI.inspect_context("test") diff --git a/tests/unit/context_test.py b/tests/unit/context_test.py new file mode 100644 index 00000000..5e88c691 --- /dev/null +++ b/tests/unit/context_test.py @@ -0,0 +1,45 @@ +import unittest +import docker +import pytest +from docker.constants import DEFAULT_UNIX_SOCKET +from docker.constants import DEFAULT_NPIPE +from docker.constants import IS_WINDOWS_PLATFORM +from docker.context import ContextAPI, Context + + +class BaseContextTest(unittest.TestCase): + @pytest.mark.skipif( + IS_WINDOWS_PLATFORM, reason='Linux specific path check' + ) + def test_url_compatibility_on_linux(self): + c = Context("test") + assert c.Host == DEFAULT_UNIX_SOCKET.strip("http+") + + @pytest.mark.skipif( + not IS_WINDOWS_PLATFORM, reason='Windows specific path check' + ) + def test_url_compatibility_on_windows(self): + c = Context("test") + assert c.Host == DEFAULT_NPIPE + + def test_fail_on_default_context_create(self): + with pytest.raises(docker.errors.ContextException): + ContextAPI.create_context("default") + + def test_default_in_context_list(self): + found = False + ctx = ContextAPI.contexts() + for c in ctx: + if c.Name == "default": + found = True + assert found is True + + def test_get_current_context(self): + assert ContextAPI.get_current_context().Name == "default" + + def test_context_inspect_without_params(self): + ctx = ContextAPI.inspect_context() + assert ctx["Name"] == "default" + assert ctx["Metadata"]["StackOrchestrator"] == "swarm" + assert ctx["Endpoints"]["docker"]["Host"] in [ + DEFAULT_NPIPE, DEFAULT_UNIX_SOCKET.strip("http+")] diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py index 2134f86f..54c2ba8f 100644 --- a/tests/unit/errors_test.py +++ b/tests/unit/errors_test.py @@ -101,17 +101,17 @@ class APIErrorTest(unittest.TestCase): assert err.is_error() is True def test_create_error_from_exception(self): - resp = requests.Response() - resp.status_code = 500 - err = APIError('') + resp = requests.Response() + resp.status_code = 500 + err = APIError('') + try: + resp.raise_for_status() + except requests.exceptions.HTTPError as e: try: - resp.raise_for_status() - except requests.exceptions.HTTPError as e: - try: - create_api_error_from_http_exception(e) - except APIError as e: - err = e - assert err.is_server_error() is True + create_api_error_from_http_exception(e) + except APIError as e: + err = e + assert err.is_server_error() is True class ContainerErrorTest(unittest.TestCase): From ab5678469c7d2dc73367a63b947aa84d16f36591 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 6 Feb 2020 10:23:58 +0100 Subject: [PATCH 054/211] Bump 4.2.0 Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- docs/change-log.md | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 0c9ec47c..f0a31709 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.2.0-dev" +version = "4.2.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 7cc05068..2f0a9ed6 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,23 @@ Change log ========== +4.2.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/63?closed=1) + +### Bugfixes + +- Fix `win32pipe.WaitNamedPipe` throw exception in Windows containers +- Use `Hostname`, `Username`, `Port` and `ProxyCommand` settings from `.ssh/config` when on SSH +- Set host key policy for ssh transport to `paramiko.WarningPolicy()` +- Set logging level of `paramiko` to warn + +### Features + +- Add support for docker contexts through `docker.ContextAPI` + + 4.1.0 ----- From 51fd6dd1ced0b16486ca66a52baf64515e63131f Mon Sep 17 00:00:00 2001 From: Niklas Saari Date: Wed, 26 Feb 2020 22:34:40 +0200 Subject: [PATCH 055/211] Disable compression by default when using get_archive method Signed-off-by: Niklas Saari --- docker/api/container.py | 12 ++++++++++-- docker/models/containers.py | 8 ++++++-- tests/unit/models_containers_test.py | 2 +- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 45bd3528..391832af 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -694,7 +694,8 @@ class ContainerApiMixin(object): return self._stream_raw_result(res, chunk_size, False) @utils.check_resource('container') - def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE): + def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, + encode_stream=False): """ Retrieve a file or folder from a container in the form of a tar archive. @@ -705,6 +706,8 @@ class ContainerApiMixin(object): chunk_size (int): The number of bytes returned by each iteration of the generator. If ``None``, data will be streamed as it is received. Default: 2 MB + encode_stream (bool): Determines if data should be encoded + (gzip-compressed) during transmission. Default: False Returns: (tuple): First element is a raw tar data stream. Second element is @@ -729,8 +732,13 @@ class ContainerApiMixin(object): params = { 'path': path } + headers = { + "Accept-Encoding": "gzip, deflate" + } if encode_stream else { + "Accept-Encoding": "identity" + } url = self._url('/containers/{0}/archive', container) - res = self._get(url, params=params, stream=True) + res = self._get(url, params=params, stream=True, headers=headers) self._raise_for_status(res) encoded_stat = res.headers.get('x-docker-container-path-stat') return ( diff --git a/docker/models/containers.py b/docker/models/containers.py index d1f275f7..f143d424 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -225,7 +225,8 @@ class Container(Model): """ return self.client.api.export(self.id, chunk_size) - def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE): + def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, + encode_stream=False): """ Retrieve a file or folder from the container in the form of a tar archive. @@ -235,6 +236,8 @@ class Container(Model): chunk_size (int): The number of bytes returned by each iteration of the generator. If ``None``, data will be streamed as it is received. Default: 2 MB + encode_stream (bool): Determines if data should be encoded + (gzip-compressed) during transmission. Default: False Returns: (tuple): First element is a raw tar data stream. Second element is @@ -255,7 +258,8 @@ class Container(Model): ... f.write(chunk) >>> f.close() """ - return self.client.api.get_archive(self.id, path, chunk_size) + return self.client.api.get_archive(self.id, path, + chunk_size, encode_stream) def kill(self, signal=None): """ diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index da5f0ab9..c9f73f37 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -450,7 +450,7 @@ class ContainerTest(unittest.TestCase): container = client.containers.get(FAKE_CONTAINER_ID) container.get_archive('foo') client.api.get_archive.assert_called_with( - FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE + FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE, False ) def test_image(self): From dac038aca2fd47328846a7f98457b574d31b33ab Mon Sep 17 00:00:00 2001 From: Leo Hanisch <23164374+HaaLeo@users.noreply.github.com> Date: Fri, 20 Mar 2020 12:40:58 +0100 Subject: [PATCH 056/211] Fixes docker/docker-py#2533 Signed-off-by: Leo Hanisch <23164374+HaaLeo@users.noreply.github.com> --- docker/transport/sshconn.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 7de0e590..9cfd9980 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -100,6 +100,8 @@ class SSHHTTPAdapter(BaseHTTPAdapter): ) if 'hostname' in host_config: self.ssh_params['hostname'] = host_config['hostname'] + if 'identityfile' in host_config: + self.ssh_params['key_filename'] = host_config['identityfile'] if base_url.port is None and 'port' in host_config: self.ssh_params['port'] = self.ssh_conf['port'] if base_url.username is None and 'user' in host_config: From a07b5ee16c1368a5873cfa08e5f407cbe7d275f5 Mon Sep 17 00:00:00 2001 From: fengbaolong Date: Tue, 28 Apr 2020 16:37:02 +0800 Subject: [PATCH 057/211] fix docker build error when dockerfile contains unicode character. if dockerfile contains unicode character,len(contents) will return character length,this length will less than len(contents_encoded) length,so contants data will be truncated. Signed-off-by: fengbaolong --- docker/utils/build.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/utils/build.py b/docker/utils/build.py index 4fa57518..5787cab0 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -105,8 +105,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False, for name, contents in extra_files: info = tarfile.TarInfo(name) - info.size = len(contents) - t.addfile(info, io.BytesIO(contents.encode('utf-8'))) + contents_encoded = contents.encode('utf-8') + info.size = len(contents_encoded) + t.addfile(info, io.BytesIO(contents_encoded)) t.close() fileobj.seek(0) From df7bf5f5e0a5baafbbf5b88638c09abfd288f686 Mon Sep 17 00:00:00 2001 From: Mike Haboustak Date: Fri, 24 Apr 2020 06:42:59 -0400 Subject: [PATCH 058/211] Add support for DriverOpts in EndpointConfig Docker API 1.32 added support for providing options to a network driver via EndpointConfig when connecting a container to a network. Signed-off-by: Mike Haboustak --- docker/api/container.py | 2 ++ docker/api/network.py | 5 +++-- docker/models/networks.py | 2 ++ docker/types/networks.py | 11 ++++++++++- tests/integration/api_network_test.py | 21 +++++++++++++++++++++ tests/unit/api_network_test.py | 4 +++- 6 files changed, 41 insertions(+), 4 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 45bd3528..9df22a52 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -636,6 +636,8 @@ class ContainerApiMixin(object): network, using the IPv6 protocol. Defaults to ``None``. link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) addresses. + driver_opt (dict): A dictionary of options to provide to the + network driver. Defaults to ``None``. Returns: (dict) An endpoint config. diff --git a/docker/api/network.py b/docker/api/network.py index 750b91b2..139c2d1a 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -216,7 +216,7 @@ class NetworkApiMixin(object): def connect_container_to_network(self, container, net_id, ipv4_address=None, ipv6_address=None, aliases=None, links=None, - link_local_ips=None): + link_local_ips=None, driver_opt=None): """ Connect a container to a network. @@ -240,7 +240,8 @@ class NetworkApiMixin(object): "Container": container, "EndpointConfig": self.create_endpoint_config( aliases=aliases, links=links, ipv4_address=ipv4_address, - ipv6_address=ipv6_address, link_local_ips=link_local_ips + ipv6_address=ipv6_address, link_local_ips=link_local_ips, + driver_opt=driver_opt ), } diff --git a/docker/models/networks.py b/docker/models/networks.py index f944c8e2..093deb7f 100644 --- a/docker/models/networks.py +++ b/docker/models/networks.py @@ -46,6 +46,8 @@ class Network(Model): network, using the IPv6 protocol. Defaults to ``None``. link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) addresses. + driver_opt (dict): A dictionary of options to provide to the + network driver. Defaults to ``None``. Raises: :py:class:`docker.errors.APIError` diff --git a/docker/types/networks.py b/docker/types/networks.py index 1c7b2c9e..1370dc19 100644 --- a/docker/types/networks.py +++ b/docker/types/networks.py @@ -4,7 +4,7 @@ from ..utils import normalize_links, version_lt class EndpointConfig(dict): def __init__(self, version, aliases=None, links=None, ipv4_address=None, - ipv6_address=None, link_local_ips=None): + ipv6_address=None, link_local_ips=None, driver_opt=None): if version_lt(version, '1.22'): raise errors.InvalidVersion( 'Endpoint config is not supported for API version < 1.22' @@ -33,6 +33,15 @@ class EndpointConfig(dict): if ipam_config: self['IPAMConfig'] = ipam_config + if driver_opt: + if version_lt(version, '1.32'): + raise errors.InvalidVersion( + 'DriverOpts is not supported for API version < 1.32' + ) + if not isinstance(driver_opt, dict): + raise TypeError('driver_opt must be a dictionary') + self['DriverOpts'] = driver_opt + class NetworkingConfig(dict): def __init__(self, endpoints_config=None): diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py index 0f26827b..af22da8d 100644 --- a/tests/integration/api_network_test.py +++ b/tests/integration/api_network_test.py @@ -275,6 +275,27 @@ class TestNetworks(BaseAPIIntegrationTest): assert 'LinkLocalIPs' in net_cfg['IPAMConfig'] assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8'] + @requires_api_version('1.32') + def test_create_with_driveropt(self): + container = self.client.create_container( + TEST_IMG, 'top', + networking_config=self.client.create_networking_config( + { + 'bridge': self.client.create_endpoint_config( + driver_opt={'com.docker-py.setting': 'on'} + ) + } + ), + host_config=self.client.create_host_config(network_mode='bridge') + ) + self.tmp_containers.append(container) + self.client.start(container) + container_data = self.client.inspect_container(container) + net_cfg = container_data['NetworkSettings']['Networks']['bridge'] + assert 'DriverOpts' in net_cfg + assert 'com.docker-py.setting' in net_cfg['DriverOpts'] + assert net_cfg['DriverOpts']['com.docker-py.setting'] == 'on' + @requires_api_version('1.22') def test_create_with_links(self): net_name, net_id = self.create_network() diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index c78554da..758f0132 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -136,7 +136,8 @@ class NetworkTest(BaseAPIClientTest): container={'Id': container_id}, net_id=network_id, aliases=['foo', 'bar'], - links=[('baz', 'quux')] + links=[('baz', 'quux')], + driver_opt={'com.docker-py.setting': 'yes'}, ) assert post.call_args[0][0] == ( @@ -148,6 +149,7 @@ class NetworkTest(BaseAPIClientTest): 'EndpointConfig': { 'Aliases': ['foo', 'bar'], 'Links': ['baz:quux'], + 'DriverOpts': {'com.docker-py.setting': 'yes'}, }, } From 81eb5d42c99b8fc5cae975e8beeadc716caf349a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Mon, 25 May 2020 08:31:24 +0300 Subject: [PATCH 059/211] Fix parameter names in TLSConfig error messages and comments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Ville Skyttä --- docker/tls.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/tls.py b/docker/tls.py index d4671d12..1b297ab6 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -32,7 +32,7 @@ class TLSConfig(object): # https://docs.docker.com/engine/articles/https/ # This diverges from the Docker CLI in that users can specify 'tls' # here, but also disable any public/default CA pool verification by - # leaving tls_verify=False + # leaving verify=False self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint @@ -62,7 +62,7 @@ class TLSConfig(object): # https://github.com/docker/docker-py/issues/963 self.ssl_version = ssl.PROTOCOL_TLSv1 - # "tls" and "tls_verify" must have both or neither cert/key files In + # "client_cert" must have both or neither cert/key files. In # either case, Alert the user when both are expected, but any are # missing. @@ -71,7 +71,7 @@ class TLSConfig(object): tls_cert, tls_key = client_cert except ValueError: raise errors.TLSParameterError( - 'client_config must be a tuple of' + 'client_cert must be a tuple of' ' (client certificate, key file)' ) @@ -79,7 +79,7 @@ class TLSConfig(object): not os.path.isfile(tls_key)): raise errors.TLSParameterError( 'Path to a certificate and key files must be provided' - ' through the client_config param' + ' through the client_cert param' ) self.cert = (tls_cert, tls_key) @@ -88,7 +88,7 @@ class TLSConfig(object): self.ca_cert = ca_cert if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert): raise errors.TLSParameterError( - 'Invalid CA certificate provided for `tls_ca_cert`.' + 'Invalid CA certificate provided for `ca_cert`.' ) def configure_client(self, client): From fd4526a7d34a08d55532dac34d0e94804176de10 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Mon, 17 Feb 2020 10:19:56 +0100 Subject: [PATCH 060/211] xfail "docker top" tests, and adjust for alpine image Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_container_test.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 1ba3eaa5..c503a367 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1102,6 +1102,8 @@ class PortTest(BaseAPIIntegrationTest): class ContainerTopTest(BaseAPIIntegrationTest): + @pytest.mark.xfail(reason='Output of docker top depends on host distro, ' + 'and is not formalized.') def test_top(self): container = self.client.create_container( TEST_IMG, ['sleep', '60'] @@ -1112,9 +1114,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): self.client.start(container) res = self.client.top(container) if not IS_WINDOWS_PLATFORM: - assert res['Titles'] == [ - 'UID', 'PID', 'PPID', 'C', 'STIME', 'TTY', 'TIME', 'CMD' - ] + assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND'] assert len(res['Processes']) == 1 assert res['Processes'][0][-1] == 'sleep 60' self.client.kill(container) @@ -1122,6 +1122,8 @@ class ContainerTopTest(BaseAPIIntegrationTest): @pytest.mark.skipif( IS_WINDOWS_PLATFORM, reason='No psargs support on windows' ) + @pytest.mark.xfail(reason='Output of docker top depends on host distro, ' + 'and is not formalized.') def test_top_with_psargs(self): container = self.client.create_container( TEST_IMG, ['sleep', '60']) @@ -1129,11 +1131,8 @@ class ContainerTopTest(BaseAPIIntegrationTest): self.tmp_containers.append(container) self.client.start(container) - res = self.client.top(container, 'waux') - assert res['Titles'] == [ - 'USER', 'PID', '%CPU', '%MEM', 'VSZ', 'RSS', - 'TTY', 'STAT', 'START', 'TIME', 'COMMAND' - ] + res = self.client.top(container, '-eopid,user') + assert res['Titles'] == [u'PID', u'USER'] assert len(res['Processes']) == 1 assert res['Processes'][0][10] == 'sleep 60' From db6a2471f527c69b33840ed1121114dd526a0134 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Sat, 15 Feb 2020 00:13:16 +0100 Subject: [PATCH 061/211] Use official docker:dind image instead of custom image This replaces the custom dockerswarm/dind image with the official dind images, which should provide the same functionality. Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 2 +- Makefile | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 7af23e9c..28511b22 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -84,7 +84,7 @@ def runTests = { Map settings -> try { sh """docker network create ${testNetwork}""" sh """docker run -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ - dockerswarm/dind:${dockerVersion} dockerd -H tcp://0.0.0.0:2375 + docker:${dockerVersion}-dind dockerd -H tcp://0.0.0.0:2375 """ sh """docker run \\ --name ${testContainerName} \\ diff --git a/Makefile b/Makefile index db103f5b..f456283f 100644 --- a/Makefile +++ b/Makefile @@ -55,7 +55,7 @@ integration-dind: integration-dind-py2 integration-dind-py3 integration-dind-py2: build setup-network docker rm -vf dpy-dind-py2 || : docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ - dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd -H tcp://0.0.0.0:2375 --experimental + docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py2:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ --network dpy-tests docker-sdk-python py.test tests/integration docker rm -vf dpy-dind-py2 @@ -64,7 +64,7 @@ integration-dind-py2: build setup-network integration-dind-py3: build-py3 setup-network docker rm -vf dpy-dind-py3 || : docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ - dockerswarm/dind:${TEST_ENGINE_VERSION} dockerd -H tcp://0.0.0.0:2375 --experimental + docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ --network dpy-tests docker-sdk-python3 py.test tests/integration docker rm -vf dpy-dind-py3 @@ -76,7 +76,7 @@ integration-dind-ssl: build-dind-certs build build-py3 docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\ --env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\ --network dpy-tests --network-alias docker -v /tmp --privileged\ - dockerswarm/dind:${TEST_ENGINE_VERSION}\ + docker:${TEST_ENGINE_VERSION}-dind\ dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\ --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ From 9713227d7bca2ae37357a500a4e80e6bab152b16 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 14 Feb 2020 23:51:44 +0100 Subject: [PATCH 062/211] Jenkinsfile: remove obsolete engine versions Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 28511b22..f905325c 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,7 +31,7 @@ def buildImages = { -> } def getDockerVersions = { -> - def dockerVersions = ["17.06.2-ce"] + def dockerVersions = ["19.03.5"] wrappedNode(label: "ubuntu && !zfs && amd64") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ @@ -46,8 +46,6 @@ def getDockerVersions = { -> def getAPIVersion = { engineVersion -> def versionMap = [ - '17.06': '1.30', - '18.03': '1.37', '18.09': '1.39', '19.03': '1.40' ] From 913d129dc9e5cb84bfe385a1b58badfae48e1344 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 14 Feb 2020 23:54:20 +0100 Subject: [PATCH 063/211] Update test engine version to 19.03.5 Signed-off-by: Sebastiaan van Stijn --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f456283f..551868ec 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,7 @@ integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} TEST_API_VERSION ?= 1.35 -TEST_ENGINE_VERSION ?= 18.09.5 +TEST_ENGINE_VERSION ?= 19.03.5 .PHONY: setup-network setup-network: From 9b59e4911309dc1e9ff9017f2adad8bad8060e80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wilson=20J=C3=BAnior?= Date: Fri, 17 Apr 2020 09:37:34 -0300 Subject: [PATCH 064/211] Fix tests to support both log plugin feedbacks MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Wilson Júnior Docker-DCO-1.1-Signed-off-by: Wilson Júnior (github: wpjunior) --- tests/integration/api_container_test.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index c503a367..411d4c2e 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -273,11 +273,14 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_invalid_log_driver_raises_exception(self): log_config = docker.types.LogConfig( - type='asdf-nope', + type='asdf', config={} ) - expected_msg = "logger: no log driver named 'asdf-nope' is registered" + expected_msgs = [ + "logger: no log driver named 'asdf' is registered", + "looking up logging plugin asdf: plugin \"asdf\" not found", + ] with pytest.raises(docker.errors.APIError) as excinfo: # raises an internal server error 500 container = self.client.create_container( @@ -287,7 +290,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) self.client.start(container) - assert excinfo.value.explanation == expected_msg + assert excinfo.value.explanation in expected_msgs def test_valid_no_log_driver_specified(self): log_config = docker.types.LogConfig( From 105efa02a9016646998400efe3cb4f0c7dcce16b Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 May 2020 20:53:45 +0200 Subject: [PATCH 065/211] Specify when to use `tls` on Context constructor Signed-off-by: Ulysses Souza --- docker/context/config.py | 4 ++-- docker/context/context.py | 9 +++++---- tests/unit/context_test.py | 4 ++++ 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/docker/context/config.py b/docker/context/config.py index ac9a342e..baf54f79 100644 --- a/docker/context/config.py +++ b/docker/context/config.py @@ -73,8 +73,8 @@ def get_tls_dir(name=None, endpoint=""): return os.path.join(context_dir, "tls") -def get_context_host(path=None): - host = utils.parse_host(path, IS_WINDOWS_PLATFORM) +def get_context_host(path=None, tls=False): + host = utils.parse_host(path, IS_WINDOWS_PLATFORM, tls) if host == DEFAULT_UNIX_SOCKET: # remove http+ from default docker socket url return host.strip("http+") diff --git a/docker/context/context.py b/docker/context/context.py index 4a0549ca..fdc290a0 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -11,7 +11,8 @@ from docker.context.config import get_context_host class Context: """A context.""" - def __init__(self, name, orchestrator="swarm", host=None, endpoints=None): + def __init__(self, name, orchestrator="swarm", host=None, endpoints=None, + tls=False): if not name: raise Exception("Name not provided") self.name = name @@ -22,8 +23,8 @@ class Context: ) else orchestrator self.endpoints = { default_endpoint: { - "Host": get_context_host(host), - "SkipTLSVerify": False + "Host": get_context_host(host, tls), + "SkipTLSVerify": not tls } } else: @@ -44,7 +45,7 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { - "Host": get_context_host(host), + "Host": get_context_host(host, not skip_tls_verify), "SkipTLSVerify": skip_tls_verify } if def_namespace: diff --git a/tests/unit/context_test.py b/tests/unit/context_test.py index 5e88c691..6d6d6726 100644 --- a/tests/unit/context_test.py +++ b/tests/unit/context_test.py @@ -37,6 +37,10 @@ class BaseContextTest(unittest.TestCase): def test_get_current_context(self): assert ContextAPI.get_current_context().Name == "default" + def test_https_host(self): + c = Context("test", host="tcp://testdomain:8080", tls=True) + assert c.Host == "https://testdomain:8080" + def test_context_inspect_without_params(self): ctx = ContextAPI.inspect_context() assert ctx["Name"] == "default" From 31276df6a31511f5d1654b98112f2ea02dea4a91 Mon Sep 17 00:00:00 2001 From: aiordache Date: Sat, 30 May 2020 11:01:22 +0200 Subject: [PATCH 066/211] Make orchestrator field optional Signed-off-by: aiordache --- docker/context/api.py | 16 +++++++--------- docker/context/context.py | 18 ++++++++++-------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/docker/context/api.py b/docker/context/api.py index fc7e8940..c45115bc 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -14,11 +14,11 @@ class ContextAPI(object): Contains methods for context management: create, list, remove, get, inspect. """ - DEFAULT_CONTEXT = Context("default") + DEFAULT_CONTEXT = Context("default", "swarm") @classmethod def create_context( - cls, name, orchestrator="swarm", host=None, tls_cfg=None, + cls, name, orchestrator=None, host=None, tls_cfg=None, default_namespace=None, skip_tls_verify=False): """Creates a new context. Returns: @@ -38,9 +38,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", - "Metadata": { - "StackOrchestrator": "swarm" - }, + "Metadata": {}, "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", @@ -57,7 +55,9 @@ class ContextAPI(object): ctx = Context.load_context(name) if ctx: raise errors.ContextAlreadyExists(name) - endpoint = "docker" if orchestrator == "swarm" else orchestrator + endpoint = "docker" + if orchestrator and orchestrator != "swarm": + endpoint = orchestrator ctx = Context(name, orchestrator) ctx.set_endpoint( endpoint, host, tls_cfg, @@ -79,9 +79,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", - "Metadata": { - "StackOrchestrator": "swarm" - }, + "Metadata": {}, "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", diff --git a/docker/context/context.py b/docker/context/context.py index fdc290a0..b2af20c6 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -11,7 +11,7 @@ from docker.context.config import get_context_host class Context: """A context.""" - def __init__(self, name, orchestrator="swarm", host=None, endpoints=None, + def __init__(self, name, orchestrator=None, host=None, endpoints=None, tls=False): if not name: raise Exception("Name not provided") @@ -19,7 +19,7 @@ class Context: self.orchestrator = orchestrator if not endpoints: default_endpoint = "docker" if ( - orchestrator == "swarm" + not orchestrator or orchestrator == "swarm" ) else orchestrator self.endpoints = { default_endpoint: { @@ -85,7 +85,8 @@ class Context: context {} : {}""".format(name, e)) return ( - metadata["Name"], metadata["Metadata"]["StackOrchestrator"], + metadata["Name"], + metadata["Metadata"].get("StackOrchestrator", None), metadata["Endpoints"]) return None, None, None @@ -162,7 +163,7 @@ class Context: @property def Host(self): - if self.orchestrator == "swarm": + if not self.orchestrator or self.orchestrator == "swarm": return self.endpoints["docker"]["Host"] return self.endpoints[self.orchestrator]["Host"] @@ -172,18 +173,19 @@ class Context: @property def Metadata(self): + meta = {} + if self.orchestrator: + meta = {"StackOrchestrator": self.orchestrator} return { "Name": self.name, - "Metadata": { - "StackOrchestrator": self.orchestrator - }, + "Metadata": meta, "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator - if key == "swarm": + if not key or key == "swarm": key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 67cad6842ceb9a49fbff70faa8dbff8b7ef20134 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 2 Jun 2020 10:45:52 +0200 Subject: [PATCH 067/211] add test for context load without orchestrator Signed-off-by: aiordache --- tests/integration/context_api_test.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/integration/context_api_test.py b/tests/integration/context_api_test.py index 60235ee7..a2a12a5c 100644 --- a/tests/integration/context_api_test.py +++ b/tests/integration/context_api_test.py @@ -50,3 +50,10 @@ class ContextLifecycleTest(BaseAPIIntegrationTest): ContextAPI.remove_context("test") with pytest.raises(errors.ContextNotFound): ContextAPI.inspect_context("test") + + def test_load_context_without_orchestrator(self): + ContextAPI.create_context("test") + ctx = ContextAPI.get_context("test") + assert ctx + assert ctx.Name == "test" + assert ctx.Orchestrator is None From 9923746095d9fd9a8fabf4a8ce5e895ad5a3e48c Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 2 Jun 2020 15:47:10 +0200 Subject: [PATCH 068/211] Bump 4.2.1 Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- docs/change-log.md | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index f0a31709..d69fbd0d 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.2.0" +version = "4.2.1" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 2f0a9ed6..4a37b594 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,16 @@ Change log ========== +4.2.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/65?closed=1) + +### Features + +- Add option on when to use `tls` on Context constructor +- Make context orchestrator field optional + 4.2.0 ----- From 309ce44052223d374d4e0174e163d28fe195fc5b Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 25 Jun 2020 16:27:07 +0200 Subject: [PATCH 069/211] Skip parsing non-docker endpoints Signed-off-by: aiordache --- docker/context/context.py | 97 +++++++++++++++++++++++++-------------- 1 file changed, 62 insertions(+), 35 deletions(-) diff --git a/docker/context/context.py b/docker/context/context.py index b2af20c6..2413b2ec 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -16,30 +16,42 @@ class Context: if not name: raise Exception("Name not provided") self.name = name + self.context_type = None self.orchestrator = orchestrator + self.endpoints = {} + self.tls_cfg = {} + self.meta_path = "IN MEMORY" + self.tls_path = "IN MEMORY" + if not endpoints: + # set default docker endpoint if no endpoint is set default_endpoint = "docker" if ( not orchestrator or orchestrator == "swarm" ) else orchestrator + self.endpoints = { default_endpoint: { "Host": get_context_host(host, tls), "SkipTLSVerify": not tls } } - else: - for k, v in endpoints.items(): - ekeys = v.keys() - for param in ["Host", "SkipTLSVerify"]: - if param not in ekeys: - raise ContextException( - "Missing parameter {} from endpoint {}".format( - param, k)) - self.endpoints = endpoints + return - self.tls_cfg = {} - self.meta_path = "IN MEMORY" - self.tls_path = "IN MEMORY" + # check docker endpoints + for k, v in endpoints.items(): + if not isinstance(v, dict): + # unknown format + raise ContextException("""Unknown endpoint format for + context {}: {}""".format(name, v)) + + self.endpoints[k] = v + if k != "docker": + continue + + self.endpoints[k]["Host"] = v.get("Host", get_context_host( + host, tls)) + self.endpoints[k]["SkipTLSVerify"] = bool(v.get( + "SkipTLSVerify", not tls)) def set_endpoint( self, name="docker", host=None, tls_cfg=None, @@ -59,9 +71,13 @@ class Context: @classmethod def load_context(cls, name): - name, orchestrator, endpoints = Context._load_meta(name) - if name: - instance = cls(name, orchestrator, endpoints=endpoints) + meta = Context._load_meta(name) + if meta: + instance = cls( + meta["Name"], + orchestrator=meta["Metadata"].get("StackOrchestrator", None), + endpoints=meta.get("Endpoints", None)) + instance.context_type = meta["Metadata"].get("Type", None) instance._load_certs() instance.meta_path = get_meta_dir(name) return instance @@ -69,26 +85,30 @@ class Context: @classmethod def _load_meta(cls, name): - metadata = {} meta_file = get_meta_file(name) - if os.path.isfile(meta_file): - with open(meta_file) as f: - try: - with open(meta_file) as f: - metadata = json.load(f) - for k, v in metadata["Endpoints"].items(): - metadata["Endpoints"][k]["SkipTLSVerify"] = bool( - v["SkipTLSVerify"]) - except (IOError, KeyError, ValueError) as e: - # unknown format - raise Exception("""Detected corrupted meta file for - context {} : {}""".format(name, e)) + if not os.path.isfile(meta_file): + return None - return ( - metadata["Name"], - metadata["Metadata"].get("StackOrchestrator", None), - metadata["Endpoints"]) - return None, None, None + metadata = {} + try: + with open(meta_file) as f: + metadata = json.load(f) + except (IOError, KeyError, ValueError) as e: + # unknown format + raise Exception("""Detected corrupted meta file for + context {} : {}""".format(name, e)) + + # for docker endpoints, set defaults for + # Host and SkipTLSVerify fields + for k, v in metadata["Endpoints"].items(): + if k != "docker": + continue + metadata["Endpoints"][k]["Host"] = v.get( + "Host", get_context_host(None, False)) + metadata["Endpoints"][k]["SkipTLSVerify"] = bool( + v.get("SkipTLSVerify", True)) + + return metadata def _load_certs(self): certs = {} @@ -157,6 +177,9 @@ class Context: result.update(self.Storage) return result + def is_docker_host(self): + return self.context_type is None + @property def Name(self): return self.name @@ -164,8 +187,12 @@ class Context: @property def Host(self): if not self.orchestrator or self.orchestrator == "swarm": - return self.endpoints["docker"]["Host"] - return self.endpoints[self.orchestrator]["Host"] + endpoint = self.endpoints.get("docker", None) + if endpoint: + return endpoint.get("Host", None) + return None + + return self.endpoints[self.orchestrator].get("Host", None) @property def Orchestrator(self): From 2c68b382a8847118fb11f40675823602d653357d Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Wed, 3 Jun 2020 10:26:41 +0200 Subject: [PATCH 070/211] Update test engine version to 19.03.12 Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 4 ++-- Makefile | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 8777214c..88c21592 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,7 +31,7 @@ def buildImages = { -> } def getDockerVersions = { -> - def dockerVersions = ["19.03.5"] + def dockerVersions = ["19.03.12"] wrappedNode(label: "amd64 && ubuntu-1804 && overlay2") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ @@ -66,7 +66,7 @@ def runTests = { Map settings -> throw new Exception("Need test image object, e.g.: `runTests(testImage: img)`") } if (!dockerVersion) { - throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '1.12.3')`") + throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`") } if (!pythonVersion) { throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py2.7')`") diff --git a/Makefile b/Makefile index 551868ec..4795c63c 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,7 @@ integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} TEST_API_VERSION ?= 1.35 -TEST_ENGINE_VERSION ?= 19.03.5 +TEST_ENGINE_VERSION ?= 19.03.12 .PHONY: setup-network setup-network: From e18a64b6302a24d18f291d14af102917304f330f Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 30 Jun 2020 17:22:00 +0200 Subject: [PATCH 071/211] Bump 4.2.2 Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- docs/change-log.md | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index a7546092..06d6cc73 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.3.0-dev" +version = "4.2.2" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index ab7065a9..84ed013f 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,15 @@ Change log ========== +4.2.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1) + +### Bugfixes + +- Fix context load for non-docker endpoints + 4.2.1 ----- From 6d9847838aec3895a00caf0963c5b5b33b303aab Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 30 Jun 2020 18:30:49 +0200 Subject: [PATCH 072/211] Update version to 4.3.0-dev Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 06d6cc73..a7546092 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.2.2" +version = "4.3.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From c65d437843310b46ccb93b1c418ff7da547a5fec Mon Sep 17 00:00:00 2001 From: Ofek Lev Date: Wed, 10 Jun 2020 15:31:19 -0400 Subject: [PATCH 073/211] Upgrade Windows dependency Signed-off-by: Ofek Lev --- requirements.txt | 3 +-- setup.py | 5 +---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 804a78a0..340e4312 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,8 +11,7 @@ paramiko==2.4.2 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 -pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' -pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' +pywin32==227; sys_platform == 'win32' requests==2.20.0 six==1.10.0 urllib3==1.24.3 diff --git a/setup.py b/setup.py index c29787b6..c7022950 100644 --- a/setup.py +++ b/setup.py @@ -24,10 +24,7 @@ extras_require = { ':python_version < "3.3"': 'ipaddress >= 1.0.16', # win32 APIs if on Windows (required for npipe support) - # Python 3.6 is only compatible with v220 ; Python < 3.5 is not supported - # on v220 ; ALL versions are broken for v222 (as of 2018-01-26) - ':sys_platform == "win32" and python_version < "3.6"': 'pypiwin32==219', - ':sys_platform == "win32" and python_version >= "3.6"': 'pypiwin32==223', + ':sys_platform == "win32"': 'pywin32==227', # If using docker-py over TLS, highly recommend this option is # pip-installed or pinned. From 26d8045ffa99ec402e451cde67415b14b13cc95a Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 17 Jul 2020 14:25:27 +0200 Subject: [PATCH 074/211] Fix CreateContainerTest.test_invalid_log_driver_raises_exception This test was updated in 7d92fbdee1b8621f54faa595ba53d7ef78ef1acc, but omitted the "error" prefix in the message, causing the test to fail; _________ CreateContainerTest.test_invalid_log_driver_raises_exception _________ tests/integration/api_container_test.py:293: in test_invalid_log_driver_raises_exception assert excinfo.value.explanation in expected_msgs E AssertionError: assert 'error looking up logging plugin asdf: plugin "asdf" not found' in ["logger: no log driver named 'asdf' is registered", 'looking up logging plugin asdf: plugin "asdf" not found'] E + where 'error looking up logging plugin asdf: plugin "asdf" not found' = APIError(HTTPError('400 Client Error: Bad Request for url: http+docker://localhost/v1.39/containers/create')).explanation E + where APIError(HTTPError('400 Client Error: Bad Request for url: http+docker://localhost/v1.39/containers/create')) = .value Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_container_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 411d4c2e..65e611b2 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -279,7 +279,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): expected_msgs = [ "logger: no log driver named 'asdf' is registered", - "looking up logging plugin asdf: plugin \"asdf\" not found", + "error looking up logging plugin asdf: plugin \"asdf\" not found", ] with pytest.raises(docker.errors.APIError) as excinfo: # raises an internal server error 500 From dd0450a14c407050db141af486cc2ed9639ffd8d Mon Sep 17 00:00:00 2001 From: Lucidiot Date: Fri, 7 Aug 2020 13:58:35 +0200 Subject: [PATCH 075/211] Add device requests (#2471) * Add DeviceRequest type Signed-off-by: Erwan Rouchet * Add device_requests kwarg in host config Signed-off-by: Erwan Rouchet * Add unit test for device requests Signed-off-by: Erwan Rouchet * Fix unit test Signed-off-by: Erwan Rouchet * Use parentheses for multiline import Signed-off-by: Erwan Rouchet * Create 1.40 client for device-requests test Signed-off-by: Laurie O Co-authored-by: Laurie O Co-authored-by: Bastien Abadie --- docker/api/container.py | 3 + docker/models/containers.py | 4 ++ docker/types/__init__.py | 4 +- docker/types/containers.py | 113 ++++++++++++++++++++++++++++++- tests/unit/api_container_test.py | 64 ++++++++++++++++- 5 files changed, 185 insertions(+), 3 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 9df22a52..2ba08e53 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -480,6 +480,9 @@ class ContainerApiMixin(object): For example, ``/dev/sda:/dev/xvda:rwm`` allows the container to have read-write access to the host's ``/dev/sda`` via a node named ``/dev/xvda`` inside the container. + device_requests (:py:class:`list`): Expose host resources such as + GPUs to the container, as a list of + :py:class:`docker.types.DeviceRequest` instances. dns (:py:class:`list`): Set custom DNS servers. dns_opt (:py:class:`list`): Additional options to be added to the container's ``resolv.conf`` file diff --git a/docker/models/containers.py b/docker/models/containers.py index d1f275f7..e8082ba4 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -579,6 +579,9 @@ class ContainerCollection(Collection): For example, ``/dev/sda:/dev/xvda:rwm`` allows the container to have read-write access to the host's ``/dev/sda`` via a node named ``/dev/xvda`` inside the container. + device_requests (:py:class:`list`): Expose host resources such as + GPUs to the container, as a list of + :py:class:`docker.types.DeviceRequest` instances. dns (:py:class:`list`): Set custom DNS servers. dns_opt (:py:class:`list`): Additional options to be added to the container's ``resolv.conf`` file. @@ -998,6 +1001,7 @@ RUN_HOST_CONFIG_KWARGS = [ 'device_write_bps', 'device_write_iops', 'devices', + 'device_requests', 'dns_opt', 'dns_search', 'dns', diff --git a/docker/types/__init__.py b/docker/types/__init__.py index 5db330e2..b425746e 100644 --- a/docker/types/__init__.py +++ b/docker/types/__init__.py @@ -1,5 +1,7 @@ # flake8: noqa -from .containers import ContainerConfig, HostConfig, LogConfig, Ulimit +from .containers import ( + ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest +) from .daemon import CancellableStream from .healthcheck import Healthcheck from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig diff --git a/docker/types/containers.py b/docker/types/containers.py index fd8cab49..149b85df 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -154,6 +154,104 @@ class Ulimit(DictType): self['Hard'] = value +class DeviceRequest(DictType): + """ + Create a device request to be used with + :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`. + + Args: + + driver (str): Which driver to use for this device. Optional. + count (int): Number or devices to request. Optional. + Set to -1 to request all available devices. + device_ids (list): List of strings for device IDs. Optional. + Set either ``count`` or ``device_ids``. + capabilities (list): List of lists of strings to request + capabilities. Optional. The global list acts like an OR, + and the sub-lists are AND. The driver will try to satisfy + one of the sub-lists. + Available capabilities for the ``nvidia`` driver can be found + `here `_. + options (dict): Driver-specific options. Optional. + """ + + def __init__(self, **kwargs): + driver = kwargs.get('driver', kwargs.get('Driver')) + count = kwargs.get('count', kwargs.get('Count')) + device_ids = kwargs.get('device_ids', kwargs.get('DeviceIDs')) + capabilities = kwargs.get('capabilities', kwargs.get('Capabilities')) + options = kwargs.get('options', kwargs.get('Options')) + + if driver is None: + driver = '' + elif not isinstance(driver, six.string_types): + raise ValueError('DeviceRequest.driver must be a string') + if count is None: + count = 0 + elif not isinstance(count, int): + raise ValueError('DeviceRequest.count must be an integer') + if device_ids is None: + device_ids = [] + elif not isinstance(device_ids, list): + raise ValueError('DeviceRequest.device_ids must be a list') + if capabilities is None: + capabilities = [] + elif not isinstance(capabilities, list): + raise ValueError('DeviceRequest.capabilities must be a list') + if options is None: + options = {} + elif not isinstance(options, dict): + raise ValueError('DeviceRequest.options must be a dict') + + super(DeviceRequest, self).__init__({ + 'Driver': driver, + 'Count': count, + 'DeviceIDs': device_ids, + 'Capabilities': capabilities, + 'Options': options + }) + + @property + def driver(self): + return self['Driver'] + + @driver.setter + def driver(self, value): + self['Driver'] = value + + @property + def count(self): + return self['Count'] + + @count.setter + def count(self, value): + self['Count'] = value + + @property + def device_ids(self): + return self['DeviceIDs'] + + @device_ids.setter + def device_ids(self, value): + self['DeviceIDs'] = value + + @property + def capabilities(self): + return self['Capabilities'] + + @capabilities.setter + def capabilities(self, value): + self['Capabilities'] = value + + @property + def options(self): + return self['Options'] + + @options.setter + def options(self, value): + self['Options'] = value + + class HostConfig(dict): def __init__(self, version, binds=None, port_bindings=None, lxc_conf=None, publish_all_ports=False, links=None, @@ -176,7 +274,7 @@ class HostConfig(dict): volume_driver=None, cpu_count=None, cpu_percent=None, nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None, cpu_rt_period=None, cpu_rt_runtime=None, - device_cgroup_rules=None): + device_cgroup_rules=None, device_requests=None): if mem_limit is not None: self['Memory'] = parse_bytes(mem_limit) @@ -536,6 +634,19 @@ class HostConfig(dict): ) self['DeviceCgroupRules'] = device_cgroup_rules + if device_requests is not None: + if version_lt(version, '1.40'): + raise host_config_version_error('device_requests', '1.40') + if not isinstance(device_requests, list): + raise host_config_type_error( + 'device_requests', device_requests, 'list' + ) + self['DeviceRequests'] = [] + for req in device_requests: + if not isinstance(req, DeviceRequest): + req = DeviceRequest(**req) + self['DeviceRequests'].append(req) + def host_config_type_error(param, param_value, expected): error_msg = 'Invalid type for {0} param: expected {1} but found {2}' diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index a7e183c8..8a0577e7 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -5,6 +5,7 @@ import json import signal import docker +from docker.api import APIClient import pytest import six @@ -12,7 +13,7 @@ from . import fake_api from ..helpers import requires_api_version from .api_test import ( BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, - fake_inspect_container + fake_inspect_container, url_base ) try: @@ -767,6 +768,67 @@ class CreateContainerTest(BaseAPIClientTest): assert args[1]['headers'] == {'Content-Type': 'application/json'} assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS + def test_create_container_with_device_requests(self): + client = APIClient(version='1.40') + fake_api.fake_responses.setdefault( + '{0}/v1.40/containers/create'.format(fake_api.prefix), + fake_api.post_fake_create_container, + ) + client.create_container( + 'busybox', 'true', host_config=client.create_host_config( + device_requests=[ + { + 'device_ids': [ + '0', + 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a' + ] + }, + { + 'driver': 'nvidia', + 'Count': -1, + 'capabilities': [ + ['gpu', 'utility'] + ], + 'options': { + 'key': 'value' + } + } + ] + ) + ) + + args = fake_request.call_args + assert args[0][1] == url_base + 'v1.40/' + 'containers/create' + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = client.create_host_config() + expected_payload['HostConfig']['DeviceRequests'] = [ + { + 'Driver': '', + 'Count': 0, + 'DeviceIDs': [ + '0', + 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a' + ], + 'Capabilities': [], + 'Options': {} + }, + { + 'Driver': 'nvidia', + 'Count': -1, + 'DeviceIDs': [], + 'Capabilities': [ + ['gpu', 'utility'] + ], + 'Options': { + 'key': 'value' + } + } + ] + assert json.loads(args[1]['data']) == expected_payload + assert args[1]['headers']['Content-Type'] == 'application/json' + assert set(args[1]['headers']) <= {'Content-Type', 'User-Agent'} + assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS + def test_create_container_with_labels_dict(self): labels_dict = { six.text_type('foo'): six.text_type('1'), From 631abd156ad11433c9c09d957ebcb1868f738a58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Fri, 7 Aug 2020 15:33:19 +0300 Subject: [PATCH 076/211] Spelling fixes (#2571) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Ville Skyttä --- docker/api/container.py | 2 +- docs/change-log.md | 4 ++-- tests/unit/utils_build_test.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 2ba08e53..ee3b4c3f 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1125,7 +1125,7 @@ class ContainerApiMixin(object): else: if decode: raise errors.InvalidArgument( - "decode is only available in conjuction with stream=True" + "decode is only available in conjunction with stream=True" ) return self._result(self._get(url, params={'stream': False}), json=True) diff --git a/docs/change-log.md b/docs/change-log.md index 84ed013f..f0be8ac1 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -129,7 +129,7 @@ Change log ### Bugfixes -* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the +* Fix base_url to keep TCP protocol on utils.py by letting the responsibility of changing the protocol to `parse_host` afterwards, letting `base_url` with the original value. * XFAIL test_attach_stream_and_cancel on TLS @@ -1233,7 +1233,7 @@ like the others (`Client.volumes`, `Client.create_volume`, `Client.inspect_volume`, `Client.remove_volume`). * Added support for the `group_add` parameter in `create_host_config`. -* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameteres +* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameters in `create_host_config`. * Added support for the archive API endpoint (`Client.get_archive`, `Client.put_archive`). diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py index 012f15b4..bc6fb5f4 100644 --- a/tests/unit/utils_build_test.py +++ b/tests/unit/utils_build_test.py @@ -335,7 +335,7 @@ class ExcludePathsTest(unittest.TestCase): # Dockerignore reference stipulates that absolute paths are # equivalent to relative paths, hence /../foo should be # equivalent to ../foo. It also stipulates that paths are run - # through Go's filepath.Clean, which explicitely "replace + # through Go's filepath.Clean, which explicitly "replace # "/.." by "/" at the beginning of a path". assert exclude_paths( base, From b4beaaac8cafcec9fe9eb3d6903addd5d9bac4f2 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 7 Aug 2020 16:45:20 +0200 Subject: [PATCH 077/211] Update default API version to v1.39 (#2512) * Update default API version to v1.39 When running the docker-py integration tests in the Moby repository, some tests were skipped because the API version used was too low: SKIPPED [1] tests/integration/api_service_test.py:882: API version is too low (< 1.38) SKIPPED [1] tests/integration/api_swarm_test.py:59: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:38: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:45: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:52: API version is too low (< 1.39) While it's possible to override the API version to use for testing using the `DOCKER_TEST_API_VERSION` environment variable, we may want to set the default to a version that supports all features that were added. This patch updates the default API version to v1.39, which is the minimum version required for those features, and corresponds with Docker 18.09. Note that the API version of the current (19.03) Docker release is v1.40, but using that version as default would exclude users that did not update their Docker version yet (and would not be needed yet for the features provided). Signed-off-by: Sebastiaan van Stijn * Makefile: set DOCKER_TEST_API_VERSION to v1.39 Signed-off-by: Sebastiaan van Stijn --- Makefile | 2 +- docker/constants.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 4795c63c..6765d4d7 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ integration-test: build integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} -TEST_API_VERSION ?= 1.35 +TEST_API_VERSION ?= 1.39 TEST_ENGINE_VERSION ?= 19.03.12 .PHONY: setup-network diff --git a/docker/constants.py b/docker/constants.py index e4daed5d..c09eedab 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -1,7 +1,7 @@ import sys from .version import version -DEFAULT_DOCKER_API_VERSION = '1.35' +DEFAULT_DOCKER_API_VERSION = '1.39' MINIMUM_DOCKER_API_VERSION = '1.21' DEFAULT_TIMEOUT_SECONDS = 60 STREAM_HEADER_SIZE_BYTES = 8 From 0be75d54cada06d1c9bd0ac66d118e9007defe09 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 12 Jul 2019 01:28:41 +0200 Subject: [PATCH 078/211] Update credentials-helpers to v0.6.2 Signed-off-by: Sebastiaan van Stijn --- tests/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/Dockerfile b/tests/Dockerfile index df8468ab..27a12673 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -4,7 +4,7 @@ FROM python:${PYTHON_VERSION} ARG APT_MIRROR RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \ - && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list + && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list RUN apt-get update && apt-get -y install \ gnupg2 \ From 70cdb08f9ad2e462423b0c4b42af3e6307cd392a Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 10:38:07 +0200 Subject: [PATCH 079/211] set logging level of paramiko to warn Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 7de0e590..57b55c9e 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,7 +2,10 @@ import paramiko import requests.adapters import six import logging +<<<<<<< HEAD import os +======= +>>>>>>> 2dc569a... set logging level of paramiko to warn from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants From fcd0093050714de06bcaf05781a995f044f20ce2 Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 13:52:12 +0200 Subject: [PATCH 080/211] obey Hostname Username Port and ProxyCommand settings from .ssh/config Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 57b55c9e..7de0e590 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,10 +2,7 @@ import paramiko import requests.adapters import six import logging -<<<<<<< HEAD import os -======= ->>>>>>> 2dc569a... set logging level of paramiko to warn from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants From 087b3f0a4956f059ea5998c6dfb34ec581c25c6b Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Fri, 17 Jan 2020 19:25:55 +0100 Subject: [PATCH 081/211] Implement context management, lifecycle and unittests. Signed-off-by: Anca Iordache --- docker/context/api.py | 16 ++++++++++++ docker/context/context.py | 53 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/docker/context/api.py b/docker/context/api.py index c45115bc..d903d9c6 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -38,7 +38,13 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", +<<<<<<< HEAD "Metadata": {}, +======= + "Metadata": { + "StackOrchestrator": "swarm" + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", @@ -55,9 +61,13 @@ class ContextAPI(object): ctx = Context.load_context(name) if ctx: raise errors.ContextAlreadyExists(name) +<<<<<<< HEAD endpoint = "docker" if orchestrator and orchestrator != "swarm": endpoint = orchestrator +======= + endpoint = "docker" if orchestrator == "swarm" else orchestrator +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. ctx = Context(name, orchestrator) ctx.set_endpoint( endpoint, host, tls_cfg, @@ -79,7 +89,13 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", +<<<<<<< HEAD "Metadata": {}, +======= + "Metadata": { + "StackOrchestrator": "swarm" + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", diff --git a/docker/context/context.py b/docker/context/context.py index 2413b2ec..3859db2b 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,7 +57,11 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { +<<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), +======= + "Host": get_context_host(host), +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "SkipTLSVerify": skip_tls_verify } if def_namespace: @@ -71,6 +75,7 @@ class Context: @classmethod def load_context(cls, name): +<<<<<<< HEAD meta = Context._load_meta(name) if meta: instance = cls( @@ -78,6 +83,11 @@ class Context: orchestrator=meta["Metadata"].get("StackOrchestrator", None), endpoints=meta.get("Endpoints", None)) instance.context_type = meta["Metadata"].get("Type", None) +======= + name, orchestrator, endpoints = Context._load_meta(name) + if name: + instance = cls(name, orchestrator, endpoints=endpoints) +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. instance._load_certs() instance.meta_path = get_meta_dir(name) return instance @@ -85,6 +95,7 @@ class Context: @classmethod def _load_meta(cls, name): +<<<<<<< HEAD meta_file = get_meta_file(name) if not os.path.isfile(meta_file): return None @@ -109,6 +120,27 @@ class Context: v.get("SkipTLSVerify", True)) return metadata +======= + metadata = {} + meta_file = get_meta_file(name) + if os.path.isfile(meta_file): + with open(meta_file) as f: + try: + with open(meta_file) as f: + metadata = json.load(f) + for k, v in metadata["Endpoints"].items(): + metadata["Endpoints"][k]["SkipTLSVerify"] = bool( + v["SkipTLSVerify"]) + except (IOError, KeyError, ValueError) as e: + # unknown format + raise Exception("""Detected corrupted meta file for + context {} : {}""".format(name, e)) + + return ( + metadata["Name"], metadata["Metadata"]["StackOrchestrator"], + metadata["Endpoints"]) + return None, None, None +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. def _load_certs(self): certs = {} @@ -177,15 +209,19 @@ class Context: result.update(self.Storage) return result +<<<<<<< HEAD def is_docker_host(self): return self.context_type is None +======= +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Name(self): return self.name @property def Host(self): +<<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) if endpoint: @@ -193,6 +229,11 @@ class Context: return None return self.endpoints[self.orchestrator].get("Host", None) +======= + if self.orchestrator == "swarm": + return self.endpoints["docker"]["Host"] + return self.endpoints[self.orchestrator]["Host"] +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Orchestrator(self): @@ -200,19 +241,31 @@ class Context: @property def Metadata(self): +<<<<<<< HEAD meta = {} if self.orchestrator: meta = {"StackOrchestrator": self.orchestrator} return { "Name": self.name, "Metadata": meta, +======= + return { + "Name": self.name, + "Metadata": { + "StackOrchestrator": self.orchestrator + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator +<<<<<<< HEAD if not key or key == "swarm": +======= + if key == "swarm": +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 67b77f2fa30ad97e8699c744ac43fafdbad6127c Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 7 Feb 2020 01:00:18 +0100 Subject: [PATCH 082/211] Post release 4.2.0 update: - Changelog - Next Version Signed-off-by: Ulysses Souza --- docs/change-log.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/change-log.md b/docs/change-log.md index f0be8ac1..829333b7 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -36,6 +36,7 @@ Change log - Add support for docker contexts through `docker.ContextAPI` + 4.1.0 ----- From 2e274d00b3009e800e4bfa135b0875eb8a7127a3 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 May 2020 20:53:45 +0200 Subject: [PATCH 083/211] Specify when to use `tls` on Context constructor Signed-off-by: Ulysses Souza --- docker/context/context.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/context/context.py b/docker/context/context.py index 3859db2b..026a6941 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,11 +57,15 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { +<<<<<<< HEAD <<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), ======= "Host": get_context_host(host), >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + "Host": get_context_host(host, not skip_tls_verify), +>>>>>>> 3ce2d89... Specify when to use `tls` on Context constructor "SkipTLSVerify": skip_tls_verify } if def_namespace: From 3999707fb3fb5f7cea7d3d92eeda5cf160abb496 Mon Sep 17 00:00:00 2001 From: aiordache Date: Sat, 30 May 2020 11:01:22 +0200 Subject: [PATCH 084/211] Make orchestrator field optional Signed-off-by: aiordache --- docker/context/api.py | 16 ----------- docker/context/context.py | 56 ++++++++++++--------------------------- 2 files changed, 17 insertions(+), 55 deletions(-) diff --git a/docker/context/api.py b/docker/context/api.py index d903d9c6..c45115bc 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -38,13 +38,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", -<<<<<<< HEAD "Metadata": {}, -======= - "Metadata": { - "StackOrchestrator": "swarm" - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", @@ -61,13 +55,9 @@ class ContextAPI(object): ctx = Context.load_context(name) if ctx: raise errors.ContextAlreadyExists(name) -<<<<<<< HEAD endpoint = "docker" if orchestrator and orchestrator != "swarm": endpoint = orchestrator -======= - endpoint = "docker" if orchestrator == "swarm" else orchestrator ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. ctx = Context(name, orchestrator) ctx.set_endpoint( endpoint, host, tls_cfg, @@ -89,13 +79,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", -<<<<<<< HEAD "Metadata": {}, -======= - "Metadata": { - "StackOrchestrator": "swarm" - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", diff --git a/docker/context/context.py b/docker/context/context.py index 026a6941..8158803d 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,15 +57,7 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { -<<<<<<< HEAD -<<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), -======= - "Host": get_context_host(host), ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - "Host": get_context_host(host, not skip_tls_verify), ->>>>>>> 3ce2d89... Specify when to use `tls` on Context constructor "SkipTLSVerify": skip_tls_verify } if def_namespace: @@ -79,7 +71,6 @@ class Context: @classmethod def load_context(cls, name): -<<<<<<< HEAD meta = Context._load_meta(name) if meta: instance = cls( @@ -87,11 +78,6 @@ class Context: orchestrator=meta["Metadata"].get("StackOrchestrator", None), endpoints=meta.get("Endpoints", None)) instance.context_type = meta["Metadata"].get("Type", None) -======= - name, orchestrator, endpoints = Context._load_meta(name) - if name: - instance = cls(name, orchestrator, endpoints=endpoints) ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. instance._load_certs() instance.meta_path = get_meta_dir(name) return instance @@ -99,7 +85,6 @@ class Context: @classmethod def _load_meta(cls, name): -<<<<<<< HEAD meta_file = get_meta_file(name) if not os.path.isfile(meta_file): return None @@ -124,27 +109,6 @@ class Context: v.get("SkipTLSVerify", True)) return metadata -======= - metadata = {} - meta_file = get_meta_file(name) - if os.path.isfile(meta_file): - with open(meta_file) as f: - try: - with open(meta_file) as f: - metadata = json.load(f) - for k, v in metadata["Endpoints"].items(): - metadata["Endpoints"][k]["SkipTLSVerify"] = bool( - v["SkipTLSVerify"]) - except (IOError, KeyError, ValueError) as e: - # unknown format - raise Exception("""Detected corrupted meta file for - context {} : {}""".format(name, e)) - - return ( - metadata["Name"], metadata["Metadata"]["StackOrchestrator"], - metadata["Endpoints"]) - return None, None, None ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. def _load_certs(self): certs = {} @@ -213,18 +177,16 @@ class Context: result.update(self.Storage) return result -<<<<<<< HEAD def is_docker_host(self): return self.context_type is None -======= ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Name(self): return self.name @property def Host(self): +<<<<<<< HEAD <<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) @@ -235,6 +197,9 @@ class Context: return self.endpoints[self.orchestrator].get("Host", None) ======= if self.orchestrator == "swarm": +======= + if not self.orchestrator or self.orchestrator == "swarm": +>>>>>>> 1e11ece... Make orchestrator field optional return self.endpoints["docker"]["Host"] return self.endpoints[self.orchestrator]["Host"] >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @@ -245,6 +210,7 @@ class Context: @property def Metadata(self): +<<<<<<< HEAD <<<<<<< HEAD meta = {} if self.orchestrator: @@ -259,17 +225,29 @@ class Context: "StackOrchestrator": self.orchestrator }, >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + meta = {} + if self.orchestrator: + meta = {"StackOrchestrator": self.orchestrator} + return { + "Name": self.name, + "Metadata": meta, +>>>>>>> 1e11ece... Make orchestrator field optional "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator +<<<<<<< HEAD <<<<<<< HEAD if not key or key == "swarm": ======= if key == "swarm": >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + if not key or key == "swarm": +>>>>>>> 1e11ece... Make orchestrator field optional key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 746a2509ab7cb0656f62de0440be9ce442775e8c Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 15:29:34 +0200 Subject: [PATCH 085/211] Prepare release 4.3.0 Signed-off-by: aiordache --- docker/version.py | 2 +- docs/change-log.md | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index a7546092..29c6b00e 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.3.0-dev" +version = "4.3.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 829333b7..c753ffd3 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,21 @@ Change log ========== +4.3.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1) + +### Features +- Add `DeviceRequest` type to expose host resources such as GPUs +- Add support for `DriverOpts` in EndpointConfig +- Disable compression by default when using container.get_archive method + +### Miscellaneous +- Update default API version to v1.39 +- Update test engine version to 19.03.12 + + 4.2.2 ----- From 8080fbb4ed7a5c25fa0bf1deb39bcdf4cdfb7ddd Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 18:15:18 +0200 Subject: [PATCH 086/211] Fix merge Signed-off-by: aiordache --- docker/context/context.py | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/docker/context/context.py b/docker/context/context.py index 8158803d..2413b2ec 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -186,8 +186,6 @@ class Context: @property def Host(self): -<<<<<<< HEAD -<<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) if endpoint: @@ -195,14 +193,6 @@ class Context: return None return self.endpoints[self.orchestrator].get("Host", None) -======= - if self.orchestrator == "swarm": -======= - if not self.orchestrator or self.orchestrator == "swarm": ->>>>>>> 1e11ece... Make orchestrator field optional - return self.endpoints["docker"]["Host"] - return self.endpoints[self.orchestrator]["Host"] ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Orchestrator(self): @@ -210,44 +200,19 @@ class Context: @property def Metadata(self): -<<<<<<< HEAD -<<<<<<< HEAD meta = {} if self.orchestrator: meta = {"StackOrchestrator": self.orchestrator} return { "Name": self.name, "Metadata": meta, -======= - return { - "Name": self.name, - "Metadata": { - "StackOrchestrator": self.orchestrator - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - meta = {} - if self.orchestrator: - meta = {"StackOrchestrator": self.orchestrator} - return { - "Name": self.name, - "Metadata": meta, ->>>>>>> 1e11ece... Make orchestrator field optional "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator -<<<<<<< HEAD -<<<<<<< HEAD if not key or key == "swarm": -======= - if key == "swarm": ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - if not key or key == "swarm": ->>>>>>> 1e11ece... Make orchestrator field optional key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 9579b7ac0e43eadf6e3a62d61ed67aff25839143 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 18:21:57 +0200 Subject: [PATCH 087/211] Fix changelog merge Signed-off-by: aiordache --- docs/change-log.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/change-log.md b/docs/change-log.md index c753ffd3..cecce9d8 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -4,7 +4,7 @@ Change log 4.3.0 ----- -[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1) +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/64?closed=1) ### Features - Add `DeviceRequest` type to expose host resources such as GPUs @@ -51,7 +51,6 @@ Change log - Add support for docker contexts through `docker.ContextAPI` - 4.1.0 ----- From 5cdbbab3eedeea5b73151c604209d576722fb24d Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 19:21:44 +0200 Subject: [PATCH 088/211] Update version to the next dev version Signed-off-by: aiordache --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 29c6b00e..f40347aa 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.3.0" +version = "4.4.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 2c522fb362247a692c0493f0b47a33988eb2f3e3 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Mon, 17 Aug 2020 18:32:48 +0200 Subject: [PATCH 089/211] Fix memory conversion to bytes (#2645) * Fix memory conversion to bytes Co-authored-by: Ulysses Souza Signed-off-by: aiordache --- docker/api/container.py | 6 +++--- docker/utils/utils.py | 2 +- tests/unit/utils_test.py | 24 +++++++----------------- 3 files changed, 11 insertions(+), 21 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index cf5caebb..24eb9c1c 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -506,7 +506,7 @@ class ContainerApiMixin(object): bytes) or a string with a units identification char (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is specified without a units character, bytes are assumed as an - mem_reservation (int or str): Memory soft limit. + mem_reservation (float or str): Memory soft limit. mem_swappiness (int): Tune a container's memory swappiness behavior. Accepts number between 0 and 100. memswap_limit (str or int): Maximum amount of memory + swap a @@ -1219,8 +1219,8 @@ class ContainerApiMixin(object): cpu_shares (int): CPU shares (relative weight) cpuset_cpus (str): CPUs in which to allow execution cpuset_mems (str): MEMs in which to allow execution - mem_limit (int or str): Memory limit - mem_reservation (int or str): Memory soft limit + mem_limit (float or str): Memory limit + mem_reservation (float or str): Memory soft limit memswap_limit (int or str): Total memory (memory + swap), -1 to disable swap kernel_memory (int or str): Kernel memory limit diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 447760b4..1b195e27 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -412,7 +412,7 @@ def parse_bytes(s): if suffix in units.keys() or suffix.isdigit(): try: - digits = int(digits_part) + digits = float(digits_part) except ValueError: raise errors.DockerException( 'Failed converting the string value for memory ({0}) to' diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index d9cb0028..07209a1b 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -5,27 +5,21 @@ import json import os import os.path import shutil -import sys import tempfile import unittest - +import pytest +import six from docker.api.client import APIClient from docker.constants import IS_WINDOWS_PLATFORM from docker.errors import DockerException -from docker.utils import ( - convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env, - parse_bytes, parse_devices, parse_env_file, parse_host, - parse_repository_tag, split_command, update_headers, -) - +from docker.utils import (convert_filters, convert_volume_binds, + decode_json_header, kwargs_from_env, parse_bytes, + parse_devices, parse_env_file, parse_host, + parse_repository_tag, split_command, update_headers) from docker.utils.ports import build_port_bindings, split_port from docker.utils.utils import format_environment -import pytest - -import six - TEST_CERT_DIR = os.path.join( os.path.dirname(__file__), 'testdata/certs', @@ -447,11 +441,7 @@ class ParseBytesTest(unittest.TestCase): parse_bytes("127.0.0.1K") def test_parse_bytes_float(self): - with pytest.raises(DockerException): - parse_bytes("1.5k") - - def test_parse_bytes_maxint(self): - assert parse_bytes("{0}k".format(sys.maxsize)) == sys.maxsize * 1024 + assert parse_bytes("1.5k") == 1536 class UtilsTest(unittest.TestCase): From 0dfae33ce800d17d23a43ebde064c146d2c99781 Mon Sep 17 00:00:00 2001 From: Yuval Goldberg Date: Sun, 16 Aug 2020 18:51:11 +0300 Subject: [PATCH 090/211] Add file environment variable to integration-dind Signed-off-by: Yuval Goldberg --- Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 6765d4d7..9f30166d 100644 --- a/Makefile +++ b/Makefile @@ -57,7 +57,7 @@ integration-dind-py2: build setup-network docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py2:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python py.test tests/integration + --network dpy-tests docker-sdk-python py.test tests/integration/${file} docker rm -vf dpy-dind-py2 .PHONY: integration-dind-py3 @@ -66,7 +66,7 @@ integration-dind-py3: build-py3 setup-network docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python3 py.test tests/integration + --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} docker rm -vf dpy-dind-py3 .PHONY: integration-dind-ssl @@ -81,10 +81,10 @@ integration-dind-ssl: build-dind-certs build build-py3 --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python py.test tests/integration + --network dpy-tests docker-sdk-python py.test tests/integration/${file} docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python3 py.test tests/integration + --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} docker rm -vf dpy-dind-ssl dpy-dind-certs .PHONY: flake8 From 727080b3cca846a28d5436bed861359c9742c7e1 Mon Sep 17 00:00:00 2001 From: aiordache Date: Wed, 19 Aug 2020 14:19:29 +0200 Subject: [PATCH 091/211] set version to 'auto' to avoid breaking on old engine versions Signed-off-by: aiordache --- docker/api/client.py | 44 +++++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index 35dc84e7..51abaedf 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -7,6 +7,19 @@ import requests.exceptions import six import websocket +from .. import auth +from ..constants import (DEFAULT_DOCKER_API_VERSION, DEFAULT_NUM_POOLS, + DEFAULT_NUM_POOLS_SSH, DEFAULT_TIMEOUT_SECONDS, + DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, + MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES) +from ..errors import (DockerException, InvalidVersion, TLSParameterError, + create_api_error_from_http_exception) +from ..tls import TLSConfig +from ..transport import SSLHTTPAdapter, UnixHTTPAdapter +from ..utils import check_resource, config, update_headers, utils +from ..utils.json_stream import json_stream +from ..utils.proxy import ProxyConfig +from ..utils.socket import consume_socket_output, demux_adaptor, frames_iter from .build import BuildApiMixin from .config import ConfigApiMixin from .container import ContainerApiMixin @@ -19,22 +32,7 @@ from .secret import SecretApiMixin from .service import ServiceApiMixin from .swarm import SwarmApiMixin from .volume import VolumeApiMixin -from .. import auth -from ..constants import ( - DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, - DEFAULT_DOCKER_API_VERSION, MINIMUM_DOCKER_API_VERSION, - STREAM_HEADER_SIZE_BYTES, DEFAULT_NUM_POOLS_SSH, DEFAULT_NUM_POOLS -) -from ..errors import ( - DockerException, InvalidVersion, TLSParameterError, - create_api_error_from_http_exception -) -from ..tls import TLSConfig -from ..transport import SSLHTTPAdapter, UnixHTTPAdapter -from ..utils import utils, check_resource, update_headers, config -from ..utils.socket import frames_iter, consume_socket_output, demux_adaptor -from ..utils.json_stream import json_stream -from ..utils.proxy import ProxyConfig + try: from ..transport import NpipeHTTPAdapter except ImportError: @@ -183,14 +181,14 @@ class APIClient( self.base_url = base_url # version detection needs to be after unix adapter mounting - if version is None: - self._version = DEFAULT_DOCKER_API_VERSION - elif isinstance(version, six.string_types): - if version.lower() == 'auto': - self._version = self._retrieve_server_version() - else: - self._version = version + if version is None or ( + isinstance(version, six.string_types) and + version.lower()) == 'auto': + self._version = self._retrieve_server_version() else: + self._version = version + + if not isinstance(self._version, six.string_types): raise DockerException( 'Version parameter must be a string or None. Found {0}'.format( type(version).__name__ From c7c5b551fcbbdcbf33eabf51007d0f9494637edb Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 20 Aug 2020 14:59:36 +0200 Subject: [PATCH 092/211] set engine version for unit tests to avoid querying the engine Signed-off-by: aiordache --- docker/api/client.py | 16 +++++------ docker/client.py | 2 +- tests/unit/api_test.py | 51 +++++++++++++++++++++++------------ tests/unit/client_test.py | 32 +++++++++++----------- tests/unit/fake_api.py | 3 ++- tests/unit/fake_api_client.py | 8 +++--- tests/unit/utils_test.py | 6 +++-- 7 files changed, 71 insertions(+), 47 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index 51abaedf..43e309b5 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -8,10 +8,10 @@ import six import websocket from .. import auth -from ..constants import (DEFAULT_DOCKER_API_VERSION, DEFAULT_NUM_POOLS, - DEFAULT_NUM_POOLS_SSH, DEFAULT_TIMEOUT_SECONDS, - DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, - MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES) +from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH, + DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, + IS_WINDOWS_PLATFORM, MINIMUM_DOCKER_API_VERSION, + STREAM_HEADER_SIZE_BYTES) from ..errors import (DockerException, InvalidVersion, TLSParameterError, create_api_error_from_http_exception) from ..tls import TLSConfig @@ -181,13 +181,13 @@ class APIClient( self.base_url = base_url # version detection needs to be after unix adapter mounting - if version is None or ( - isinstance(version, six.string_types) and - version.lower()) == 'auto': + if version is None or (isinstance( + version, + six.string_types + ) and version.lower() == 'auto'): self._version = self._retrieve_server_version() else: self._version = version - if not isinstance(self._version, six.string_types): raise DockerException( 'Version parameter must be a string or None. Found {0}'.format( diff --git a/docker/client.py b/docker/client.py index 99ae1962..6c397da0 100644 --- a/docker/client.py +++ b/docker/client.py @@ -62,7 +62,7 @@ class DockerClient(object): Args: version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.35`` + automatically detect the server's version. Default: ``auto`` timeout (int): Default timeout for API calls, in seconds. ssl_version (int): A valid `SSL version`_. assert_hostname (bool): Verify the hostname of the server. diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index f4d220a2..cb14b74e 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -1,26 +1,26 @@ import datetime -import json import io +import json import os import re import shutil import socket +import struct import tempfile import threading import time import unittest import docker -from docker.api import APIClient +import pytest import requests -from requests.packages import urllib3 import six -import struct +from docker.api import APIClient +from docker.constants import DEFAULT_DOCKER_API_VERSION +from requests.packages import urllib3 from . import fake_api -import pytest - try: from unittest import mock except ImportError: @@ -105,7 +105,7 @@ class BaseAPIClientTest(unittest.TestCase): _read_from_socket=fake_read_from_socket ) self.patcher.start() - self.client = APIClient() + self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION) def tearDown(self): self.client.close() @@ -282,27 +282,37 @@ class DockerApiTest(BaseAPIClientTest): return socket_adapter.socket_path def test_url_compatibility_unix(self): - c = APIClient(base_url="unix://socket") + c = APIClient( + base_url="unix://socket", + version=DEFAULT_DOCKER_API_VERSION) assert self._socket_path_for_client_session(c) == '/socket' def test_url_compatibility_unix_triple_slash(self): - c = APIClient(base_url="unix:///socket") + c = APIClient( + base_url="unix:///socket", + version=DEFAULT_DOCKER_API_VERSION) assert self._socket_path_for_client_session(c) == '/socket' def test_url_compatibility_http_unix_triple_slash(self): - c = APIClient(base_url="http+unix:///socket") + c = APIClient( + base_url="http+unix:///socket", + version=DEFAULT_DOCKER_API_VERSION) assert self._socket_path_for_client_session(c) == '/socket' def test_url_compatibility_http(self): - c = APIClient(base_url="http://hostname:1234") + c = APIClient( + base_url="http://hostname:1234", + version=DEFAULT_DOCKER_API_VERSION) assert c.base_url == "http://hostname:1234" def test_url_compatibility_tcp(self): - c = APIClient(base_url="tcp://hostname:1234") + c = APIClient( + base_url="tcp://hostname:1234", + version=DEFAULT_DOCKER_API_VERSION) assert c.base_url == "http://hostname:1234" @@ -447,7 +457,9 @@ class UnixSocketStreamTest(unittest.TestCase): b'\r\n' ) + b'\r\n'.join(lines) - with APIClient(base_url="http+unix://" + self.socket_file) as client: + with APIClient( + base_url="http+unix://" + self.socket_file, + version=DEFAULT_DOCKER_API_VERSION) as client: for i in range(5): try: stream = client.build( @@ -532,7 +544,10 @@ class TCPSocketStreamTest(unittest.TestCase): def request(self, stream=None, tty=None, demux=None): assert stream is not None and tty is not None and demux is not None - with APIClient(base_url=self.address) as client: + with APIClient( + base_url=self.address, + version=DEFAULT_DOCKER_API_VERSION + ) as client: if tty: url = client._url('/tty') else: @@ -597,7 +612,7 @@ class UserAgentTest(unittest.TestCase): self.patcher.stop() def test_default_user_agent(self): - client = APIClient() + client = APIClient(version=DEFAULT_DOCKER_API_VERSION) client.version() assert self.mock_send.call_count == 1 @@ -606,7 +621,9 @@ class UserAgentTest(unittest.TestCase): assert headers['User-Agent'] == expected def test_custom_user_agent(self): - client = APIClient(user_agent='foo/bar') + client = APIClient( + user_agent='foo/bar', + version=DEFAULT_DOCKER_API_VERSION) client.version() assert self.mock_send.call_count == 1 @@ -626,7 +643,7 @@ class DisableSocketTest(unittest.TestCase): return self.timeout def setUp(self): - self.client = APIClient() + self.client = APIClient(version=DEFAULT_DOCKER_API_VERSION) def test_disable_socket_timeout(self): """Test that the timeout is disabled on a generic socket object.""" diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index cce99c53..cc9ff8f2 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -1,14 +1,14 @@ import datetime -import docker -from docker.utils import kwargs_from_env -from docker.constants import ( - DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS -) import os import unittest -from . import fake_api +import docker import pytest +from docker.constants import ( + DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS) +from docker.utils import kwargs_from_env + +from . import fake_api try: from unittest import mock @@ -25,33 +25,33 @@ class ClientTest(unittest.TestCase): def test_events(self, mock_func): since = datetime.datetime(2016, 1, 1, 0, 0) mock_func.return_value = fake_api.get_fake_events()[1] - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.events(since=since) == mock_func.return_value mock_func.assert_called_with(since=since) @mock.patch('docker.api.APIClient.info') def test_info(self, mock_func): mock_func.return_value = fake_api.get_fake_info()[1] - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.info() == mock_func.return_value mock_func.assert_called_with() @mock.patch('docker.api.APIClient.ping') def test_ping(self, mock_func): mock_func.return_value = True - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.ping() is True mock_func.assert_called_with() @mock.patch('docker.api.APIClient.version') def test_version(self, mock_func): mock_func.return_value = fake_api.get_fake_version()[1] - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.version() == mock_func.return_value mock_func.assert_called_with() def test_call_api_client_method(self): - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) with pytest.raises(AttributeError) as cm: client.create_container() s = cm.exconly() @@ -65,7 +65,9 @@ class ClientTest(unittest.TestCase): assert "this method is now on the object APIClient" not in s def test_call_containers(self): - client = docker.DockerClient(**kwargs_from_env()) + client = docker.DockerClient( + version=DEFAULT_DOCKER_API_VERSION, + **kwargs_from_env()) with pytest.raises(TypeError) as cm: client.containers() @@ -90,7 +92,7 @@ class FromEnvTest(unittest.TestCase): os.environ.update(DOCKER_HOST='tcp://192.168.59.103:2376', DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='1') - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api.base_url == "https://192.168.59.103:2376" def test_from_env_with_version(self): @@ -102,11 +104,11 @@ class FromEnvTest(unittest.TestCase): assert client.api._version == '2.32' def test_from_env_without_version_uses_default(self): - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api._version == DEFAULT_DOCKER_API_VERSION def test_from_env_without_timeout_uses_default(self): - client = docker.from_env() + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index e609b64e..27e463d2 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -1,6 +1,7 @@ -from . import fake_stat from docker import constants +from . import fake_stat + CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) FAKE_CONTAINER_ID = '3cc2351ab11b' diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index 2147bfdf..e85001db 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -1,6 +1,7 @@ import copy -import docker +import docker +from docker.constants import DEFAULT_DOCKER_API_VERSION from . import fake_api try: @@ -30,7 +31,7 @@ def make_fake_api_client(overrides=None): if overrides is None: overrides = {} - api_client = docker.APIClient() + api_client = docker.APIClient(version=DEFAULT_DOCKER_API_VERSION) mock_attrs = { 'build.return_value': fake_api.FAKE_IMAGE_ID, 'commit.return_value': fake_api.post_fake_commit()[1], @@ -50,6 +51,7 @@ def make_fake_api_client(overrides=None): 'networks.return_value': fake_api.get_fake_network_list()[1], 'start.return_value': None, 'wait.return_value': {'StatusCode': 0}, + 'version.return_value': fake_api.get_fake_version() } mock_attrs.update(overrides) mock_client = CopyReturnMagicMock(**mock_attrs) @@ -62,6 +64,6 @@ def make_fake_client(overrides=None): """ Returns a Client with a fake APIClient. """ - client = docker.DockerClient() + client = docker.DockerClient(version=DEFAULT_DOCKER_API_VERSION) client.api = make_fake_api_client(overrides) return client diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 07209a1b..a53151cb 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -11,7 +11,7 @@ import unittest import pytest import six from docker.api.client import APIClient -from docker.constants import IS_WINDOWS_PLATFORM +from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION from docker.errors import DockerException from docker.utils import (convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env, parse_bytes, @@ -35,7 +35,7 @@ class DecoratorsTest(unittest.TestCase): def f(self, headers=None): return headers - client = APIClient() + client = APIClient(version=DEFAULT_DOCKER_API_VERSION) client._general_configs = {} g = update_headers(f) @@ -86,6 +86,7 @@ class KwargsFromEnvTest(unittest.TestCase): assert kwargs['tls'].verify parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) + kwargs['version'] = DEFAULT_DOCKER_API_VERSION try: client = APIClient(**kwargs) assert parsed_host == client.base_url @@ -106,6 +107,7 @@ class KwargsFromEnvTest(unittest.TestCase): assert kwargs['tls'].assert_hostname is True assert kwargs['tls'].verify is False parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) + kwargs['version'] = DEFAULT_DOCKER_API_VERSION try: client = APIClient(**kwargs) assert parsed_host == client.base_url From ed46fb0143020621c68bd2e62bf5a0780552c1fb Mon Sep 17 00:00:00 2001 From: aiordache Date: Fri, 21 Aug 2020 10:43:12 +0200 Subject: [PATCH 093/211] Add release 4.3.1 information to changelog Signed-off-by: aiordache --- docs/change-log.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index cecce9d8..11c055fd 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,16 @@ Change log ========== +4.3.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/68?closed=1) + +### Miscellaneous +- Set default API version to `auto` +- Fix conversion to bytes for `float` +- Support OpenSSH `identityfile` option + 4.3.0 ----- From 84857a896cf9ed7f67973db260ae5450ff79e394 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 15 Sep 2020 15:33:04 +0200 Subject: [PATCH 094/211] Add github supported CODEOWNERS file Signed-off-by: Ulysses Souza --- .github/CODEOWNERS | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..5df30149 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,6 @@ +# GitHub code owners +# See https://help.github.com/articles/about-codeowners/ +# +# KEEP THIS FILE SORTED. Order is important. Last match takes precedence. + +* @aiordache @ulyssessouza From cec152db5f679bc61c2093959bd9109cb9abb169 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 15 Sep 2020 18:42:19 +0200 Subject: [PATCH 095/211] Set image default tag on pull Signed-off-by: aiordache --- docker/api/image.py | 11 ++++++++--- docker/models/images.py | 21 +++++++++++---------- tests/integration/api_image_test.py | 4 ++-- tests/integration/models_images_test.py | 2 +- tests/unit/api_image_test.py | 4 ++-- tests/unit/models_containers_test.py | 2 +- tests/unit/models_images_test.py | 24 ++++++++++++++++++++---- 7 files changed, 45 insertions(+), 23 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index 11c8cf75..dcce0acb 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -343,7 +343,7 @@ class ImageApiMixin(object): return self._result(self._post(url, params=params), True) def pull(self, repository, tag=None, stream=False, auth_config=None, - decode=False, platform=None): + decode=False, platform=None, all_tags=False): """ Pulls an image. Similar to the ``docker pull`` command. @@ -358,6 +358,7 @@ class ImageApiMixin(object): decode (bool): Decode the JSON data from the server into dicts. Only applies with ``stream=True`` platform (str): Platform in the format ``os[/arch[/variant]]`` + all_tags (bool): Pull all image tags. Returns: (generator or str): The output @@ -382,8 +383,12 @@ class ImageApiMixin(object): } """ - if not tag: - repository, tag = utils.parse_repository_tag(repository) + repository, image_tag = utils.parse_repository_tag(repository) + tag = tag or image_tag or 'latest' + + if all_tags: + tag = None + registry, repo_name = auth.resolve_repository_name(repository) params = { diff --git a/docker/models/images.py b/docker/models/images.py index 757a5a47..d2c5835a 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -395,12 +395,12 @@ class ImageCollection(Collection): return [self.get(i) for i in images] - def pull(self, repository, tag=None, **kwargs): + def pull(self, repository, tag=None, all_tags=False, **kwargs): """ Pull an image of the given name and return it. Similar to the ``docker pull`` command. - If no tag is specified, all tags from that repository will be - pulled. + If ``all_tags`` is set, the ``tag`` parameter is ignored and all image + tags will be pulled. If you want to get the raw pull output, use the :py:meth:`~docker.api.image.ImageApiMixin.pull` method in the @@ -413,10 +413,11 @@ class ImageCollection(Collection): config for this request. ``auth_config`` should contain the ``username`` and ``password`` keys to be valid. platform (str): Platform in the format ``os[/arch[/variant]]`` + all_tags (bool): Pull all image tags Returns: (:py:class:`Image` or list): The image that has been pulled. - If no ``tag`` was specified, the method will return a list + If ``tag`` is None, the method will return a list of :py:class:`Image` objects belonging to this repository. Raises: @@ -426,13 +427,13 @@ class ImageCollection(Collection): Example: >>> # Pull the image tagged `latest` in the busybox repo - >>> image = client.images.pull('busybox:latest') + >>> image = client.images.pull('busybox') >>> # Pull all tags in the busybox repo - >>> images = client.images.pull('busybox') + >>> images = client.images.pull('busybox', all_tags=True) """ - if not tag: - repository, tag = parse_repository_tag(repository) + repository, image_tag = parse_repository_tag(repository) + tag = tag or image_tag or 'latest' if 'stream' in kwargs: warnings.warn( @@ -442,14 +443,14 @@ class ImageCollection(Collection): del kwargs['stream'] pull_log = self.client.api.pull( - repository, tag=tag, stream=True, **kwargs + repository, tag=tag, stream=True, all_tags=all_tags, **kwargs ) for _ in pull_log: # We don't do anything with the logs, but we need # to keep the connection alive and wait for the image # to be pulled. pass - if tag: + if not all_tags: return self.get('{0}{2}{1}'.format( repository, tag, '@' if tag.startswith('sha256:') else ':' )) diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index 2bc96abf..37e26a3f 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -42,7 +42,7 @@ class PullImageTest(BaseAPIIntegrationTest): self.client.remove_image('hello-world') except docker.errors.APIError: pass - res = self.client.pull('hello-world', tag='latest') + res = self.client.pull('hello-world') self.tmp_imgs.append('hello-world') assert type(res) == six.text_type assert len(self.client.images('hello-world')) >= 1 @@ -55,7 +55,7 @@ class PullImageTest(BaseAPIIntegrationTest): except docker.errors.APIError: pass stream = self.client.pull( - 'hello-world', tag='latest', stream=True, decode=True) + 'hello-world', stream=True, decode=True) self.tmp_imgs.append('hello-world') for chunk in stream: assert isinstance(chunk, dict) diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 223d102f..0d60f37b 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -86,7 +86,7 @@ class ImageCollectionTest(BaseIntegrationTest): def test_pull_multiple(self): client = docker.from_env(version=TEST_API_VERSION) - images = client.images.pull('hello-world') + images = client.images.pull('hello-world', all_tags=True) assert len(images) >= 1 assert any([ 'hello-world:latest' in img.attrs['RepoTags'] for img in images diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 1e2315db..4b4fb977 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -67,7 +67,7 @@ class ImageTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][1] == url_prefix + 'images/create' assert args[1]['params'] == { - 'tag': None, 'fromImage': 'joffrey/test001' + 'tag': 'latest', 'fromImage': 'joffrey/test001' } assert not args[1]['stream'] @@ -77,7 +77,7 @@ class ImageTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][1] == url_prefix + 'images/create' assert args[1]['params'] == { - 'tag': None, 'fromImage': 'joffrey/test001' + 'tag': 'latest', 'fromImage': 'joffrey/test001' } assert args[1]['stream'] diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index c9f73f37..c7aa46b2 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -233,7 +233,7 @@ class ContainerCollectionTest(unittest.TestCase): assert container.id == FAKE_CONTAINER_ID client.api.pull.assert_called_with( - 'alpine', platform=None, tag=None, stream=True + 'alpine', platform=None, tag='latest', all_tags=False, stream=True ) def test_run_with_error(self): diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py index fd894ab7..e3d070c0 100644 --- a/tests/unit/models_images_test.py +++ b/tests/unit/models_images_test.py @@ -44,9 +44,25 @@ class ImageCollectionTest(unittest.TestCase): def test_pull(self): client = make_fake_client() - image = client.images.pull('test_image:latest') + image = client.images.pull('test_image:test') client.api.pull.assert_called_with( - 'test_image', tag='latest', stream=True + 'test_image', tag='test', all_tags=False, stream=True + ) + client.api.inspect_image.assert_called_with('test_image:test') + assert isinstance(image, Image) + assert image.id == FAKE_IMAGE_ID + + def test_pull_tag_precedence(self): + client = make_fake_client() + image = client.images.pull('test_image:latest', tag='test') + client.api.pull.assert_called_with( + 'test_image', tag='test', all_tags=False, stream=True + ) + client.api.inspect_image.assert_called_with('test_image:test') + + image = client.images.pull('test_image') + client.api.pull.assert_called_with( + 'test_image', tag='latest', all_tags=False, stream=True ) client.api.inspect_image.assert_called_with('test_image:latest') assert isinstance(image, Image) @@ -54,9 +70,9 @@ class ImageCollectionTest(unittest.TestCase): def test_pull_multiple(self): client = make_fake_client() - images = client.images.pull('test_image') + images = client.images.pull('test_image', all_tags=True) client.api.pull.assert_called_with( - 'test_image', tag=None, stream=True + 'test_image', tag='latest', all_tags=True, stream=True ) client.api.images.assert_called_with( all=False, name='test_image', filters=None From ea093a75dd5c8330d2267a7be9d513dad19f66b7 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 17 Sep 2020 16:54:53 +0200 Subject: [PATCH 096/211] Fix url of examples in ulimits Signed-off-by: Ulysses Souza --- docker/types/containers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/types/containers.py b/docker/types/containers.py index 149b85df..44bfcfd8 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -97,8 +97,8 @@ class Ulimit(DictType): Args: - name (str): Which ulimit will this apply to. A list of valid names can - be found `here `_. + name (str): Which ulimit will this apply to. The valid names can be + found in '/etc/security/limits.conf' on a gnu/linux system. soft (int): The soft limit for this ulimit. Optional. hard (int): The hard limit for this ulimit. Optional. From aed570098518cbc73dc1450b001520a0c5bf5046 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 21 Sep 2020 10:23:29 +0200 Subject: [PATCH 097/211] update `pull` method docs Signed-off-by: aiordache --- docker/api/image.py | 6 ++++-- docker/models/images.py | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index dcce0acb..4082bfb3 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -349,7 +349,8 @@ class ImageApiMixin(object): Args: repository (str): The repository to pull - tag (str): The tag to pull + tag (str): The tag to pull. If ``tag`` is ``None`` or empty, it + is set to ``latest``. stream (bool): Stream the output as a generator. Make sure to consume the generator, otherwise pull might get cancelled. auth_config (dict): Override the credentials that are found in the @@ -358,7 +359,8 @@ class ImageApiMixin(object): decode (bool): Decode the JSON data from the server into dicts. Only applies with ``stream=True`` platform (str): Platform in the format ``os[/arch[/variant]]`` - all_tags (bool): Pull all image tags. + all_tags (bool): Pull all image tags, the ``tag`` parameter is + ignored. Returns: (generator or str): The output diff --git a/docker/models/images.py b/docker/models/images.py index d2c5835a..e6355885 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -399,6 +399,7 @@ class ImageCollection(Collection): """ Pull an image of the given name and return it. Similar to the ``docker pull`` command. + If ``tag`` is ``None`` or empty, it is set to ``latest``. If ``all_tags`` is set, the ``tag`` parameter is ignored and all image tags will be pulled. @@ -417,7 +418,7 @@ class ImageCollection(Collection): Returns: (:py:class:`Image` or list): The image that has been pulled. - If ``tag`` is None, the method will return a list + If ``all_tags`` is True, the method will return a list of :py:class:`Image` objects belonging to this repository. Raises: From 910cc124238aa4c5b56bfafd7c9b72492b2df7d4 Mon Sep 17 00:00:00 2001 From: Ian Fijolek Date: Fri, 9 Oct 2020 18:12:00 -0700 Subject: [PATCH 098/211] Fix plugin model upgrade When upgrading a plugin via the model interface, it would yield the following error: AttributeError: 'Plugin' object has no attribute '_reload' It appears that the proper method is `self.reload()`. This is what is used by all other methods in the class and base. I'm not finding any references to `_reload` apart from this instance in the project either. I've verified that this patch fixes the issue on my machine and all tests pass. Signed-off-by: Ian Fijolek --- docker/models/plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/plugins.py b/docker/models/plugins.py index 06880181..ae5851c9 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -119,7 +119,7 @@ class Plugin(Model): privileges = self.client.api.plugin_privileges(remote) for d in self.client.api.upgrade_plugin(self.name, remote, privileges): yield d - self._reload() + self.reload() class PluginCollection(Collection): From 180414dcbbde807f85695a03a5c12d5ffc3aa6f3 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 22 Sep 2020 10:20:18 +0200 Subject: [PATCH 099/211] Shell out to SSH client for an ssh connection Signed-off-by: aiordache --- Dockerfile | 4 + Jenkinsfile | 32 +- Makefile | 33 +- docker/api/client.py | 8 +- docker/client.py | 12 +- docker/transport/sshconn.py | 177 +++++++-- tests/Dockerfile | 9 +- tests/Dockerfile-ssh-dind | 23 ++ tests/integration/api_build_test.py | 1 - tests/ssh-keys/authorized_keys | 1 + tests/ssh-keys/config | 3 + tests/ssh-keys/id_rsa | 38 ++ tests/ssh-keys/id_rsa.pub | 1 + tests/ssh/__init__.py | 0 tests/ssh/api_build_test.py | 595 ++++++++++++++++++++++++++++ tests/ssh/base.py | 130 ++++++ 16 files changed, 1006 insertions(+), 61 deletions(-) create mode 100644 tests/Dockerfile-ssh-dind create mode 100755 tests/ssh-keys/authorized_keys create mode 100644 tests/ssh-keys/config create mode 100644 tests/ssh-keys/id_rsa create mode 100644 tests/ssh-keys/id_rsa.pub create mode 100644 tests/ssh/__init__.py create mode 100644 tests/ssh/api_build_test.py create mode 100644 tests/ssh/base.py diff --git a/Dockerfile b/Dockerfile index 124f68cd..7309a83e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,6 +2,10 @@ ARG PYTHON_VERSION=2.7 FROM python:${PYTHON_VERSION} +# Add SSH keys and set permissions +COPY tests/ssh-keys /root/.ssh +RUN chmod -R 600 /root/.ssh + RUN mkdir /src WORKDIR /src diff --git a/Jenkinsfile b/Jenkinsfile index 88c21592..fc716d80 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -3,6 +3,7 @@ def imageNameBase = "dockerbuildbot/docker-py" def imageNamePy2 def imageNamePy3 +def imageDindSSH def images = [:] def buildImage = { name, buildargs, pyTag -> @@ -13,7 +14,7 @@ def buildImage = { name, buildargs, pyTag -> img = docker.build(name, buildargs) img.push() } - images[pyTag] = img.id + if (pyTag?.trim()) images[pyTag] = img.id } def buildImages = { -> @@ -23,7 +24,9 @@ def buildImages = { -> imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}" imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" + imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}" + buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "") buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") } @@ -81,22 +84,37 @@ def runTests = { Map settings -> def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" try { sh """docker network create ${testNetwork}""" - sh """docker run -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ - docker:${dockerVersion}-dind dockerd -H tcp://0.0.0.0:2375 + sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ + ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375 """ - sh """docker run \\ + sh """docker run --rm \\ --name ${testContainerName} \\ -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\ -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ --network ${testNetwork} \\ --volumes-from ${dindContainerName} \\ ${testImage} \\ - py.test -v -rxs --cov=docker tests/ + py.test -v -rxs --cov=docker --ignore=tests/ssh tests/ + """ + sh """docker stop ${dindContainerName}""" + + // start DIND container with SSH + sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ + ${imageDindSSH} dockerd --experimental""" + sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """ + // run SSH tests only + sh """docker run --rm \\ + --name ${testContainerName} \\ + -e "DOCKER_HOST=ssh://${dindContainerName}:22" \\ + -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ + --network ${testNetwork} \\ + --volumes-from ${dindContainerName} \\ + ${testImage} \\ + py.test -v -rxs --cov=docker tests/ssh """ } finally { sh """ - docker stop ${dindContainerName} ${testContainerName} - docker rm -vf ${dindContainerName} ${testContainerName} + docker stop ${dindContainerName} docker network rm ${testNetwork} """ } diff --git a/Makefile b/Makefile index 9f30166d..70d7083e 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,6 @@ +TEST_API_VERSION ?= 1.39 +TEST_ENGINE_VERSION ?= 19.03.13 + .PHONY: all all: test @@ -10,6 +13,10 @@ clean: build: docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . +.PHONY: build-dind-ssh +build-dind-ssh: + docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR . + .PHONY: build-py3 build-py3: docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . @@ -41,9 +48,6 @@ integration-test: build integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} -TEST_API_VERSION ?= 1.39 -TEST_ENGINE_VERSION ?= 19.03.12 - .PHONY: setup-network setup-network: docker network inspect dpy-tests || docker network create dpy-tests @@ -69,6 +73,29 @@ integration-dind-py3: build-py3 setup-network --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} docker rm -vf dpy-dind-py3 +.PHONY: integration-ssh-py2 +integration-ssh-py2: build-dind-ssh build setup-network + docker rm -vf dpy-dind-py2 || : + docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ + docker-dind-ssh dockerd --experimental + # start SSH daemon + docker exec dpy-dind-py2 sh -c "/usr/sbin/sshd" + docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py2" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ + --network dpy-tests docker-sdk-python py.test tests/ssh/${file} + docker rm -vf dpy-dind-py2 + +.PHONY: integration-ssh-py3 +integration-ssh-py3: build-dind-ssh build-py3 setup-network + docker rm -vf dpy-dind-py3 || : + docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ + docker-dind-ssh dockerd --experimental + # start SSH daemon + docker exec dpy-dind-py3 sh -c "/usr/sbin/sshd" + docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py3" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ + --network dpy-tests docker-sdk-python3 py.test tests/ssh/${file} + docker rm -vf dpy-dind-py3 + + .PHONY: integration-dind-ssl integration-dind-ssl: build-dind-certs build build-py3 docker rm -vf dpy-dind-certs dpy-dind-ssl || : diff --git a/docker/api/client.py b/docker/api/client.py index 43e309b5..1edd4345 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -89,6 +89,9 @@ class APIClient( user_agent (str): Set a custom user agent for requests to the server. credstore_env (dict): Override environment variables when calling the credential store process. + use_ssh_client (bool): If set to `True`, an ssh connection is made + via shelling out to the ssh client. Ensure the ssh client is + installed and configured on the host. """ __attrs__ = requests.Session.__attrs__ + ['_auth_configs', @@ -100,7 +103,7 @@ class APIClient( def __init__(self, base_url=None, version=None, timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, user_agent=DEFAULT_USER_AGENT, num_pools=None, - credstore_env=None): + credstore_env=None, use_ssh_client=False): super(APIClient, self).__init__() if tls and not base_url: @@ -161,7 +164,8 @@ class APIClient( elif base_url.startswith('ssh://'): try: self._custom_adapter = SSHHTTPAdapter( - base_url, timeout, pool_connections=num_pools + base_url, timeout, pool_connections=num_pools, + shell_out=use_ssh_client ) except NameError: raise DockerException( diff --git a/docker/client.py b/docker/client.py index 6c397da0..1fea69e6 100644 --- a/docker/client.py +++ b/docker/client.py @@ -35,6 +35,9 @@ class DockerClient(object): user_agent (str): Set a custom user agent for requests to the server. credstore_env (dict): Override environment variables when calling the credential store process. + use_ssh_client (bool): If set to `True`, an ssh connection is made + via shelling out to the ssh client. Ensure the ssh client is + installed and configured on the host. """ def __init__(self, *args, **kwargs): self.api = APIClient(*args, **kwargs) @@ -70,6 +73,9 @@ class DockerClient(object): from. Default: the value of ``os.environ`` credstore_env (dict): Override environment variables when calling the credential store process. + use_ssh_client (bool): If set to `True`, an ssh connection is + made via shelling out to the ssh client. Ensure the ssh + client is installed and configured on the host. Example: @@ -81,8 +87,12 @@ class DockerClient(object): """ timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) version = kwargs.pop('version', None) + use_ssh_client = kwargs.pop('use_ssh_client', False) return cls( - timeout=timeout, version=version, **kwargs_from_env(**kwargs) + timeout=timeout, + version=version, + use_ssh_client=use_ssh_client, + **kwargs_from_env(**kwargs) ) # Resources diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 9cfd9980..42d1ef96 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -1,8 +1,11 @@ +import io import paramiko import requests.adapters import six import logging import os +import socket +import subprocess from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants @@ -20,33 +23,140 @@ except ImportError: RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer +def create_paramiko_client(base_url): + logging.getLogger("paramiko").setLevel(logging.WARNING) + ssh_client = paramiko.SSHClient() + base_url = six.moves.urllib_parse.urlparse(base_url) + ssh_params = { + "hostname": base_url.hostname, + "port": base_url.port, + "username": base_url.username + } + ssh_config_file = os.path.expanduser("~/.ssh/config") + if os.path.exists(ssh_config_file): + conf = paramiko.SSHConfig() + with open(ssh_config_file) as f: + conf.parse(f) + host_config = conf.lookup(base_url.hostname) + ssh_conf = host_config + if 'proxycommand' in host_config: + ssh_params["sock"] = paramiko.ProxyCommand( + ssh_conf['proxycommand'] + ) + if 'hostname' in host_config: + ssh_params['hostname'] = host_config['hostname'] + if 'identityfile' in host_config: + ssh_params['key_filename'] = host_config['identityfile'] + if base_url.port is None and 'port' in host_config: + ssh_params['port'] = ssh_conf['port'] + if base_url.username is None and 'user' in host_config: + ssh_params['username'] = ssh_conf['user'] + + ssh_client.load_system_host_keys() + ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) + return ssh_client, ssh_params + + +class SSHSocket(socket.socket): + def __init__(self, host): + super(SSHSocket, self).__init__( + socket.AF_INET, socket.SOCK_STREAM) + self.host = host + self.port = None + if ':' in host: + self.host, self.port = host.split(':') + self.proc = None + + def connect(self, **kwargs): + port = '' if not self.port else '-p {}'.format(self.port) + args = [ + 'ssh', + '-q', + self.host, + port, + 'docker system dial-stdio' + ] + self.proc = subprocess.Popen( + ' '.join(args), + shell=True, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE) + + def _write(self, data): + if not self.proc or self.proc.stdin.closed: + raise Exception('SSH subprocess not initiated.' + 'connect() must be called first.') + written = self.proc.stdin.write(data) + self.proc.stdin.flush() + return written + + def sendall(self, data): + self._write(data) + + def send(self, data): + return self._write(data) + + def recv(self): + if not self.proc: + raise Exception('SSH subprocess not initiated.' + 'connect() must be called first.') + return self.proc.stdout.read() + + def makefile(self, mode): + if not self.proc or self.proc.stdout.closed: + buf = io.BytesIO() + buf.write(b'\n\n') + return buf + return self.proc.stdout + + def close(self): + if not self.proc or self.proc.stdin.closed: + return + self.proc.stdin.write(b'\n\n') + self.proc.stdin.flush() + self.proc.terminate() + + class SSHConnection(httplib.HTTPConnection, object): - def __init__(self, ssh_transport, timeout=60): + def __init__(self, ssh_transport=None, timeout=60, host=None): super(SSHConnection, self).__init__( 'localhost', timeout=timeout ) self.ssh_transport = ssh_transport self.timeout = timeout + self.host = host def connect(self): - sock = self.ssh_transport.open_session() - sock.settimeout(self.timeout) - sock.exec_command('docker system dial-stdio') + if self.ssh_transport: + sock = self.ssh_transport.open_session() + sock.settimeout(self.timeout) + sock.exec_command('docker system dial-stdio') + else: + sock = SSHSocket(self.host) + sock.settimeout(self.timeout) + sock.connect() + self.sock = sock class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): scheme = 'ssh' - def __init__(self, ssh_client, timeout=60, maxsize=10): + def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): super(SSHConnectionPool, self).__init__( 'localhost', timeout=timeout, maxsize=maxsize ) - self.ssh_transport = ssh_client.get_transport() + self.ssh_transport = None + if ssh_client: + self.ssh_transport = ssh_client.get_transport() self.timeout = timeout + self.host = host + self.port = None + if ':' in host: + self.host, self.port = host.split(':') def _new_conn(self): - return SSHConnection(self.ssh_transport, self.timeout) + return SSHConnection(self.ssh_transport, self.timeout, self.host) # When re-using connections, urllib3 calls fileno() on our # SSH channel instance, quickly overloading our fd limit. To avoid this, @@ -78,39 +188,14 @@ class SSHHTTPAdapter(BaseHTTPAdapter): ] def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS): - logging.getLogger("paramiko").setLevel(logging.WARNING) - self.ssh_client = paramiko.SSHClient() - base_url = six.moves.urllib_parse.urlparse(base_url) - self.ssh_params = { - "hostname": base_url.hostname, - "port": base_url.port, - "username": base_url.username - } - ssh_config_file = os.path.expanduser("~/.ssh/config") - if os.path.exists(ssh_config_file): - conf = paramiko.SSHConfig() - with open(ssh_config_file) as f: - conf.parse(f) - host_config = conf.lookup(base_url.hostname) - self.ssh_conf = host_config - if 'proxycommand' in host_config: - self.ssh_params["sock"] = paramiko.ProxyCommand( - self.ssh_conf['proxycommand'] - ) - if 'hostname' in host_config: - self.ssh_params['hostname'] = host_config['hostname'] - if 'identityfile' in host_config: - self.ssh_params['key_filename'] = host_config['identityfile'] - if base_url.port is None and 'port' in host_config: - self.ssh_params['port'] = self.ssh_conf['port'] - if base_url.username is None and 'user' in host_config: - self.ssh_params['username'] = self.ssh_conf['user'] - - self.ssh_client.load_system_host_keys() - self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) - - self._connect() + pool_connections=constants.DEFAULT_NUM_POOLS, + shell_out=True): + self.ssh_client = None + if not shell_out: + self.ssh_client, self.ssh_params = create_paramiko_client(base_url) + self._connect() + base_url = base_url.lstrip('ssh://') + self.host = base_url self.timeout = timeout self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() @@ -118,7 +203,8 @@ class SSHHTTPAdapter(BaseHTTPAdapter): super(SSHHTTPAdapter, self).__init__() def _connect(self): - self.ssh_client.connect(**self.ssh_params) + if self.ssh_client: + self.ssh_client.connect(**self.ssh_params) def get_connection(self, url, proxies=None): with self.pools.lock: @@ -127,11 +213,13 @@ class SSHHTTPAdapter(BaseHTTPAdapter): return pool # Connection is closed try a reconnect - if not self.ssh_client.get_transport(): + if self.ssh_client and not self.ssh_client.get_transport(): self._connect() pool = SSHConnectionPool( - self.ssh_client, self.timeout + ssh_client=self.ssh_client, + timeout=self.timeout, + host=self.host ) self.pools[url] = pool @@ -139,4 +227,5 @@ class SSHHTTPAdapter(BaseHTTPAdapter): def close(self): super(SSHHTTPAdapter, self).close() - self.ssh_client.close() + if self.ssh_client: + self.ssh_client.close() diff --git a/tests/Dockerfile b/tests/Dockerfile index 27a12673..3236f387 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -6,10 +6,13 @@ ARG APT_MIRROR RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \ && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list -RUN apt-get update && apt-get -y install \ +RUN apt-get update && apt-get -y install --no-install-recommends \ gnupg2 \ - pass \ - curl + pass + +# Add SSH keys and set permissions +COPY tests/ssh-keys /root/.ssh +RUN chmod -R 600 /root/.ssh COPY ./tests/gpg-keys /gpg-keys RUN gpg2 --import gpg-keys/secret diff --git a/tests/Dockerfile-ssh-dind b/tests/Dockerfile-ssh-dind new file mode 100644 index 00000000..9d8f0eab --- /dev/null +++ b/tests/Dockerfile-ssh-dind @@ -0,0 +1,23 @@ +ARG API_VERSION=1.39 +ARG ENGINE_VERSION=19.03.12 + +FROM docker:${ENGINE_VERSION}-dind + +RUN apk add --no-cache \ + openssh + +# Add the keys and set permissions +RUN ssh-keygen -A + +# copy the test SSH config +RUN echo "IgnoreUserKnownHosts yes" >> /etc/ssh/sshd_config && \ + echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \ + echo "PermitRootLogin yes" >> /etc/ssh/sshd_config + +# set authorized keys for client paswordless connection +COPY tests/ssh-keys/authorized_keys /root/.ssh/authorized_keys +RUN chmod 600 /root/.ssh/authorized_keys + +RUN echo "root:root" | chpasswd +RUN ln -s /usr/local/bin/docker /usr/bin/docker +EXPOSE 22 diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index 57128124..b830a106 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -339,7 +339,6 @@ class BuildTest(BaseAPIIntegrationTest): assert self.client.inspect_image(img_name) ctnr = self.run_container(img_name, 'cat /hosts-file') - self.tmp_containers.append(ctnr) logs = self.client.logs(ctnr) if six.PY3: logs = logs.decode('utf-8') diff --git a/tests/ssh-keys/authorized_keys b/tests/ssh-keys/authorized_keys new file mode 100755 index 00000000..33252fe5 --- /dev/null +++ b/tests/ssh-keys/authorized_keys @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0= diff --git a/tests/ssh-keys/config b/tests/ssh-keys/config new file mode 100644 index 00000000..8dd13540 --- /dev/null +++ b/tests/ssh-keys/config @@ -0,0 +1,3 @@ +Host * + StrictHostKeyChecking no + UserKnownHostsFile=/dev/null diff --git a/tests/ssh-keys/id_rsa b/tests/ssh-keys/id_rsa new file mode 100644 index 00000000..0ec063e2 --- /dev/null +++ b/tests/ssh-keys/id_rsa @@ -0,0 +1,38 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn +NhAAAAAwEAAQAAAYEAvwYl5Gy/aBGxNzyb9UtqddlyuQR1t6kE+UX/gmBtAE2MjDyFTOvi +F1cn90DcaZ7z172zwUCQrNKh3rj8GcthrG7d+UJ5pYK3MxT4l16LAg9jfsK3DkD2Rri40M +lFD9siUVUky6afM5NhfMN5WhiAdyZNYVHDFBMXpisUGJPy+NG+a1ypGqy5OWsAbonI0UrT +K3IT0R2dp+9eUxvs0r3/LQf1B0VymD6movyXuXoh98hlMwmOM5/rhKKgBW+FfJaSI/EcNx +F5gmFcBtL4PuOECENoCZyIU5XJscJMp72Z/e57oODS5RiUPrAwpyLzGqcnB3xpDZQc93xb +bvzkbMT6WW0zYP/Z6Gt2X/DqSMLxPxRzT6g3LRpbcMRIEMY+XxN+MdH2JxdPLXowFCSQmR +N2LBoDWm7EuKQ/pEYSPN3hWb4I90NQHkytFfW0TO47o3HPUc/lfRm+c2BBzf5fD8RFZY9D +pVEX/WZZJzUCvMUYefe4w1031UCgjDv50Wlh9m6tAAAFeM2kMyHNpDMhAAAAB3NzaC1yc2 +EAAAGBAL8GJeRsv2gRsTc8m/VLanXZcrkEdbepBPlF/4JgbQBNjIw8hUzr4hdXJ/dA3Gme +89e9s8FAkKzSod64/BnLYaxu3flCeaWCtzMU+JdeiwIPY37Ctw5A9ka4uNDJRQ/bIlFVJM +umnzOTYXzDeVoYgHcmTWFRwxQTF6YrFBiT8vjRvmtcqRqsuTlrAG6JyNFK0ytyE9Ednafv +XlMb7NK9/y0H9QdFcpg+pqL8l7l6IffIZTMJjjOf64SioAVvhXyWkiPxHDcReYJhXAbS+D +7jhAhDaAmciFOVybHCTKe9mf3ue6Dg0uUYlD6wMKci8xqnJwd8aQ2UHPd8W2785GzE+llt +M2D/2ehrdl/w6kjC8T8Uc0+oNy0aW3DESBDGPl8TfjHR9icXTy16MBQkkJkTdiwaA1puxL +ikP6RGEjzd4Vm+CPdDUB5MrRX1tEzuO6Nxz1HP5X0ZvnNgQc3+Xw/ERWWPQ6VRF/1mWSc1 +ArzFGHn3uMNdN9VAoIw7+dFpYfZurQAAAAMBAAEAAAGBAKtnotyiz+Vb6r57vh2OvEpfAd +gOrmpMWVArhSfBykz5SOIU9C+fgVIcPJpaMuz7WiX97Ku9eZP5tJGbP2sN2ejV2ovtICZp +cmV9rcp1ZRpGIKr/oS5DEDlJS1zdHQErSlHcqpWqPzQSTOmcpOk5Dxza25g1u2vp7dCG2x +NqvhySZ+ECViK/Vby1zL9jFzTlhTJ4vFtpzauA2AyPBCPdpHkNqMoLgNYncXLSYHpnos8p +m9T+AAFGwBhVrGz0Mr0mhRDnV/PgbKplKT7l+CGceb8LuWmj/vzuP5Wv6dglw3hJnT2V5p +nTBp3dJ6R006+yvr5T/Xb+ObGqFfgfenjLfHjqbJ/gZdGWt4Le84g8tmSkjJBJ2Yj3kynQ +sdfv9k7JJ4t5euoje0XW0YVN1ih5DdyO4hHDRD1lSTFYT5Gl2sCTt28qsMC12rWzFkezJo +Fhewq2Ddtg4AK6SxqH4rFQCmgOR/ci7jv9TXS9xEQxYliyN5aNymRTyXmwqBIzjNKR6QAA +AMEAxpme2upng9LS6Epa83d1gnWUilYPbpb1C8+1FgpnBv9zkjFE1vY0Vu4i9LcLGlCQ0x +PB1Z16TQlEluqiSuSA0eyaWSQBF9NyGsOCOZ63lpJs/2FRBfcbUvHhv8/g1fv/xvI+FnE+ +DoAhz8V3byU8HUZer7pQY3hSxisdYdsaromxC8DSSPFQoxpxwh7WuP4c3veWkdL13h4fSN +khGr3G1XGfsZOu6V6F1i7yMU6OcwBAxzPsHqZv66sT8lE6n4xjAAAAwQDzAaVaJqZ2ROoF +loltJZUtE7o+zpoDzjOJyGYaCYTU4dHPN1aeYBjw8QfmJhdmZfJp9AeJDB/W0wzoHi2ONI +chnQ1EdbCLk9pvA7rhfVdZaxPeHwniDp2iA/wZKTRG3hav9nEzS72uXuZprCsbBvGXeR0z +iuIx5odVXG8qyuI9lDY6B/IoLg7zd+V6iw9mqWYlLLsgHiAvg32LAT4j0KoTufOqpnxqTQ +P2EguTmxDWkfQmbEHdJvbD2tLQ90zMlwMAAADBAMk88wOA1i/TibH5gm/lAtKPcNKbrHfk +7O9gdSZd2HL0fLjptpOplS89Y7muTElsRDRGiKq+7KV/sxQRNcITkxdTKu8CKnftFWHrLk +9WHWVHXbu9h8ttsKeUr9i27ojxpe5I82of8k7fJTg1LxMnGzuDZfq1BGsQnOWrY7r1Yjcd +8EtSrwOB+J/S4U+rR6kwUEFYeBkhE599P1EtHTCm8kWh368di9Q+Y/VIOa3qRx4hxuiCLI +qj4ZpdVMk2cCNcjwAAAAAB +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/ssh-keys/id_rsa.pub b/tests/ssh-keys/id_rsa.pub new file mode 100644 index 00000000..33252fe5 --- /dev/null +++ b/tests/ssh-keys/id_rsa.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0= diff --git a/tests/ssh/__init__.py b/tests/ssh/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py new file mode 100644 index 00000000..b830a106 --- /dev/null +++ b/tests/ssh/api_build_test.py @@ -0,0 +1,595 @@ +import io +import os +import shutil +import tempfile + +from docker import errors +from docker.utils.proxy import ProxyConfig + +import pytest +import six + +from .base import BaseAPIIntegrationTest, TEST_IMG +from ..helpers import random_name, requires_api_version, requires_experimental + + +class BuildTest(BaseAPIIntegrationTest): + def test_build_with_proxy(self): + self.client._proxy_configs = ProxyConfig( + ftp='a', http='b', https='c', no_proxy='d' + ) + + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN env | grep "FTP_PROXY=a"', + 'RUN env | grep "ftp_proxy=a"', + 'RUN env | grep "HTTP_PROXY=b"', + 'RUN env | grep "http_proxy=b"', + 'RUN env | grep "HTTPS_PROXY=c"', + 'RUN env | grep "https_proxy=c"', + 'RUN env | grep "NO_PROXY=d"', + 'RUN env | grep "no_proxy=d"', + ]).encode('ascii')) + + self.client.build(fileobj=script, decode=True) + + def test_build_with_proxy_and_buildargs(self): + self.client._proxy_configs = ProxyConfig( + ftp='a', http='b', https='c', no_proxy='d' + ) + + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN env | grep "FTP_PROXY=XXX"', + 'RUN env | grep "ftp_proxy=xxx"', + 'RUN env | grep "HTTP_PROXY=b"', + 'RUN env | grep "http_proxy=b"', + 'RUN env | grep "HTTPS_PROXY=c"', + 'RUN env | grep "https_proxy=c"', + 'RUN env | grep "NO_PROXY=d"', + 'RUN env | grep "no_proxy=d"', + ]).encode('ascii')) + + self.client.build( + fileobj=script, + decode=True, + buildargs={'FTP_PROXY': 'XXX', 'ftp_proxy': 'xxx'} + ) + + def test_build_streaming(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ]).encode('ascii')) + stream = self.client.build(fileobj=script, decode=True) + logs = [] + for chunk in stream: + logs.append(chunk) + assert len(logs) > 0 + + def test_build_from_stringio(self): + if six.PY3: + return + script = io.StringIO(six.text_type('\n').join([ + 'FROM busybox', + 'RUN mkdir -p /tmp/test', + 'EXPOSE 8080', + 'ADD https://dl.dropboxusercontent.com/u/20637798/silence.tar.gz' + ' /tmp/silence.tar.gz' + ])) + stream = self.client.build(fileobj=script) + logs = '' + for chunk in stream: + if six.PY3: + chunk = chunk.decode('utf-8') + logs += chunk + assert logs != '' + + def test_build_with_dockerignore(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + + with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: + f.write("\n".join([ + 'FROM busybox', + 'ADD . /test', + ])) + + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write("\n".join([ + 'ignored', + 'Dockerfile', + '.dockerignore', + '!ignored/subdir/excepted-file', + '', # empty line, + '#*', # comment line + ])) + + with open(os.path.join(base_dir, 'not-ignored'), 'w') as f: + f.write("this file should not be ignored") + + with open(os.path.join(base_dir, '#file.txt'), 'w') as f: + f.write('this file should not be ignored') + + subdir = os.path.join(base_dir, 'ignored', 'subdir') + os.makedirs(subdir) + with open(os.path.join(subdir, 'file'), 'w') as f: + f.write("this file should be ignored") + + with open(os.path.join(subdir, 'excepted-file'), 'w') as f: + f.write("this file should not be ignored") + + tag = 'docker-py-test-build-with-dockerignore' + stream = self.client.build( + path=base_dir, + tag=tag, + ) + for chunk in stream: + pass + + c = self.client.create_container(tag, ['find', '/test', '-type', 'f']) + self.client.start(c) + self.client.wait(c) + logs = self.client.logs(c) + + if six.PY3: + logs = logs.decode('utf-8') + + assert sorted(list(filter(None, logs.split('\n')))) == sorted([ + '/test/#file.txt', + '/test/ignored/subdir/excepted-file', + '/test/not-ignored' + ]) + + def test_build_with_buildargs(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + 'ARG test', + 'USER $test' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, tag='buildargs', buildargs={'test': 'OK'} + ) + self.tmp_imgs.append('buildargs') + for chunk in stream: + pass + + info = self.client.inspect_image('buildargs') + assert info['Config']['User'] == 'OK' + + @requires_api_version('1.22') + def test_build_shmsize(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + 'CMD sh -c "echo \'Hello, World!\'"', + ]).encode('ascii')) + + tag = 'shmsize' + shmsize = 134217728 + + stream = self.client.build( + fileobj=script, tag=tag, shmsize=shmsize + ) + self.tmp_imgs.append(tag) + for chunk in stream: + pass + + # There is currently no way to get the shmsize + # that was used to build the image + + @requires_api_version('1.24') + def test_build_isolation(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + 'CMD sh -c "echo \'Deaf To All But The Song\'' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, tag='isolation', + isolation='default' + ) + + for chunk in stream: + pass + + @requires_api_version('1.23') + def test_build_labels(self): + script = io.BytesIO('\n'.join([ + 'FROM scratch', + ]).encode('ascii')) + + labels = {'test': 'OK'} + + stream = self.client.build( + fileobj=script, tag='labels', labels=labels + ) + self.tmp_imgs.append('labels') + for chunk in stream: + pass + + info = self.client.inspect_image('labels') + assert info['Config']['Labels'] == labels + + @requires_api_version('1.25') + def test_build_with_cache_from(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'ENV FOO=bar', + 'RUN touch baz', + 'RUN touch bax', + ]).encode('ascii')) + + stream = self.client.build(fileobj=script, tag='build1') + self.tmp_imgs.append('build1') + for chunk in stream: + pass + + stream = self.client.build( + fileobj=script, tag='build2', cache_from=['build1'], + decode=True + ) + self.tmp_imgs.append('build2') + counter = 0 + for chunk in stream: + if 'Using cache' in chunk.get('stream', ''): + counter += 1 + assert counter == 3 + self.client.remove_image('build2') + + counter = 0 + stream = self.client.build( + fileobj=script, tag='build2', cache_from=['nosuchtag'], + decode=True + ) + for chunk in stream: + if 'Using cache' in chunk.get('stream', ''): + counter += 1 + assert counter == 0 + + @requires_api_version('1.29') + def test_build_container_with_target(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox as first', + 'RUN mkdir -p /tmp/test', + 'RUN touch /tmp/silence.tar.gz', + 'FROM alpine:latest', + 'WORKDIR /root/' + 'COPY --from=first /tmp/silence.tar.gz .', + 'ONBUILD RUN echo "This should not be in the final image"' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, target='first', tag='build1' + ) + self.tmp_imgs.append('build1') + for chunk in stream: + pass + + info = self.client.inspect_image('build1') + assert not info['Config']['OnBuild'] + + @requires_api_version('1.25') + def test_build_with_network_mode(self): + # Set up pingable endpoint on custom network + network = self.client.create_network(random_name())['Id'] + self.tmp_networks.append(network) + container = self.client.create_container(TEST_IMG, 'top') + self.tmp_containers.append(container) + self.client.start(container) + self.client.connect_container_to_network( + container, network, aliases=['pingtarget.docker'] + ) + + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN ping -c1 pingtarget.docker' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, network_mode=network, + tag='dockerpytest_customnetbuild' + ) + + self.tmp_imgs.append('dockerpytest_customnetbuild') + for chunk in stream: + pass + + assert self.client.inspect_image('dockerpytest_customnetbuild') + + script.seek(0) + stream = self.client.build( + fileobj=script, network_mode='none', + tag='dockerpytest_nonebuild', nocache=True, decode=True + ) + + self.tmp_imgs.append('dockerpytest_nonebuild') + logs = [chunk for chunk in stream] + assert 'errorDetail' in logs[-1] + assert logs[-1]['errorDetail']['code'] == 1 + + with pytest.raises(errors.NotFound): + self.client.inspect_image('dockerpytest_nonebuild') + + @requires_api_version('1.27') + def test_build_with_extra_hosts(self): + img_name = 'dockerpytest_extrahost_build' + self.tmp_imgs.append(img_name) + + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN ping -c1 hello.world.test', + 'RUN ping -c1 extrahost.local.test', + 'RUN cp /etc/hosts /hosts-file' + ]).encode('ascii')) + + stream = self.client.build( + fileobj=script, tag=img_name, + extra_hosts={ + 'extrahost.local.test': '127.0.0.1', + 'hello.world.test': '127.0.0.1', + }, decode=True + ) + for chunk in stream: + if 'errorDetail' in chunk: + pytest.fail(chunk) + + assert self.client.inspect_image(img_name) + ctnr = self.run_container(img_name, 'cat /hosts-file') + logs = self.client.logs(ctnr) + if six.PY3: + logs = logs.decode('utf-8') + assert '127.0.0.1\textrahost.local.test' in logs + assert '127.0.0.1\thello.world.test' in logs + + @requires_experimental(until=None) + @requires_api_version('1.25') + def test_build_squash(self): + script = io.BytesIO('\n'.join([ + 'FROM busybox', + 'RUN echo blah > /file_1', + 'RUN echo blahblah > /file_2', + 'RUN echo blahblahblah > /file_3' + ]).encode('ascii')) + + def build_squashed(squash): + tag = 'squash' if squash else 'nosquash' + stream = self.client.build( + fileobj=script, tag=tag, squash=squash + ) + self.tmp_imgs.append(tag) + for chunk in stream: + pass + + return self.client.inspect_image(tag) + + non_squashed = build_squashed(False) + squashed = build_squashed(True) + assert len(non_squashed['RootFS']['Layers']) == 4 + assert len(squashed['RootFS']['Layers']) == 2 + + def test_build_stderr_data(self): + control_chars = ['\x1b[91m', '\x1b[0m'] + snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' + script = io.BytesIO(b'\n'.join([ + b'FROM busybox', + 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + ])) + + stream = self.client.build( + fileobj=script, decode=True, nocache=True + ) + lines = [] + for chunk in stream: + lines.append(chunk.get('stream')) + expected = '{0}{2}\n{1}'.format( + control_chars[0], control_chars[1], snippet + ) + assert any([line == expected for line in lines]) + + def test_build_gzip_encoding(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + + with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: + f.write("\n".join([ + 'FROM busybox', + 'ADD . /test', + ])) + + stream = self.client.build( + path=base_dir, decode=True, nocache=True, + gzip=True + ) + + lines = [] + for chunk in stream: + lines.append(chunk) + + assert 'Successfully built' in lines[-1]['stream'] + + def test_build_with_dockerfile_empty_lines(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'Dockerfile'), 'w') as f: + f.write('FROM busybox\n') + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write('\n'.join([ + ' ', + '', + '\t\t', + '\t ', + ])) + + stream = self.client.build( + path=base_dir, decode=True, nocache=True + ) + + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully built' in lines[-1]['stream'] + + def test_build_gzip_custom_encoding(self): + with pytest.raises(errors.DockerException): + self.client.build(path='.', gzip=True, encoding='text/html') + + @requires_api_version('1.32') + @requires_experimental(until=None) + def test_build_invalid_platform(self): + script = io.BytesIO('FROM busybox\n'.encode('ascii')) + + with pytest.raises(errors.APIError) as excinfo: + stream = self.client.build(fileobj=script, platform='foobar') + for _ in stream: + pass + + # Some API versions incorrectly returns 500 status; assert 4xx or 5xx + assert excinfo.value.is_error() + assert 'unknown operating system' in excinfo.exconly() \ + or 'invalid platform' in excinfo.exconly() + + def test_build_out_of_context_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(os.path.join(base_dir, '.dockerignore'), 'w') as f: + f.write('.dockerignore\n') + df_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, df_dir) + df_name = os.path.join(df_dir, 'Dockerfile') + with open(df_name, 'wb') as df: + df.write(('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])).encode('utf-8')) + df.flush() + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile=df_name, tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 3 + assert sorted([b'.', b'..', b'file.txt']) == sorted(lsdata) + + def test_build_in_context_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(os.path.join(base_dir, 'custom.dockerfile'), 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile='custom.dockerfile', tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'custom.dockerfile'] + ) == sorted(lsdata) + + def test_build_in_context_nested_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + subdir = os.path.join(base_dir, 'hello', 'world') + os.makedirs(subdir) + with open(os.path.join(subdir, 'custom.dockerfile'), 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile='hello/world/custom.dockerfile', + tag=img_name, decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'hello'] + ) == sorted(lsdata) + + def test_build_in_context_abs_dockerfile(self): + base_dir = tempfile.mkdtemp() + self.addCleanup(shutil.rmtree, base_dir) + abs_dockerfile_path = os.path.join(base_dir, 'custom.dockerfile') + with open(os.path.join(base_dir, 'file.txt'), 'w') as f: + f.write('hello world') + with open(abs_dockerfile_path, 'w') as df: + df.write('\n'.join([ + 'FROM busybox', + 'COPY . /src', + 'WORKDIR /src', + ])) + img_name = random_name() + self.tmp_imgs.append(img_name) + stream = self.client.build( + path=base_dir, dockerfile=abs_dockerfile_path, tag=img_name, + decode=True + ) + lines = [] + for chunk in stream: + lines.append(chunk) + assert 'Successfully tagged' in lines[-1]['stream'] + + ctnr = self.client.create_container(img_name, 'ls -a') + self.tmp_containers.append(ctnr) + self.client.start(ctnr) + lsdata = self.client.logs(ctnr).strip().split(b'\n') + assert len(lsdata) == 4 + assert sorted( + [b'.', b'..', b'file.txt', b'custom.dockerfile'] + ) == sorted(lsdata) + + @requires_api_version('1.31') + @pytest.mark.xfail( + True, + reason='Currently fails on 18.09: ' + 'https://github.com/moby/moby/issues/37920' + ) + def test_prune_builds(self): + prune_result = self.client.prune_builds() + assert 'SpaceReclaimed' in prune_result + assert isinstance(prune_result['SpaceReclaimed'], int) diff --git a/tests/ssh/base.py b/tests/ssh/base.py new file mode 100644 index 00000000..c723d823 --- /dev/null +++ b/tests/ssh/base.py @@ -0,0 +1,130 @@ +import os +import shutil +import unittest + +import docker +from .. import helpers +from docker.utils import kwargs_from_env + +TEST_IMG = 'alpine:3.10' +TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION') + + +class BaseIntegrationTest(unittest.TestCase): + """ + A base class for integration test cases. It cleans up the Docker server + after itself. + """ + + def setUp(self): + self.tmp_imgs = [] + self.tmp_containers = [] + self.tmp_folders = [] + self.tmp_volumes = [] + self.tmp_networks = [] + self.tmp_plugins = [] + self.tmp_secrets = [] + self.tmp_configs = [] + + def tearDown(self): + client = docker.from_env(version=TEST_API_VERSION, use_ssh_client=True) + try: + for img in self.tmp_imgs: + try: + client.api.remove_image(img) + except docker.errors.APIError: + pass + for container in self.tmp_containers: + try: + client.api.remove_container(container, force=True, v=True) + except docker.errors.APIError: + pass + for network in self.tmp_networks: + try: + client.api.remove_network(network) + except docker.errors.APIError: + pass + for volume in self.tmp_volumes: + try: + client.api.remove_volume(volume) + except docker.errors.APIError: + pass + + for secret in self.tmp_secrets: + try: + client.api.remove_secret(secret) + except docker.errors.APIError: + pass + + for config in self.tmp_configs: + try: + client.api.remove_config(config) + except docker.errors.APIError: + pass + + for folder in self.tmp_folders: + shutil.rmtree(folder) + finally: + client.close() + + +class BaseAPIIntegrationTest(BaseIntegrationTest): + """ + A test case for `APIClient` integration tests. It sets up an `APIClient` + as `self.client`. + """ + @classmethod + def setUpClass(cls): + cls.client = cls.get_client_instance() + cls.client.pull(TEST_IMG) + + def tearDown(self): + super(BaseAPIIntegrationTest, self).tearDown() + self.client.close() + + @staticmethod + def get_client_instance(): + return docker.APIClient( + version=TEST_API_VERSION, + timeout=60, + use_ssh_client=True, + **kwargs_from_env() + ) + + @staticmethod + def _init_swarm(client, **kwargs): + return client.init_swarm( + '127.0.0.1', listen_addr=helpers.swarm_listen_addr(), **kwargs + ) + + def run_container(self, *args, **kwargs): + container = self.client.create_container(*args, **kwargs) + self.tmp_containers.append(container) + self.client.start(container) + exitcode = self.client.wait(container)['StatusCode'] + + if exitcode != 0: + output = self.client.logs(container) + raise Exception( + "Container exited with code {}:\n{}" + .format(exitcode, output)) + + return container + + def create_and_start(self, image=TEST_IMG, command='top', **kwargs): + container = self.client.create_container( + image=image, command=command, **kwargs) + self.tmp_containers.append(container) + self.client.start(container) + return container + + def execute(self, container, cmd, exit_code=0, **kwargs): + exc = self.client.exec_create(container, cmd, **kwargs) + output = self.client.exec_start(exc) + actual_exit_code = self.client.exec_inspect(exc)['ExitCode'] + msg = "Expected `{}` to exit with code {} but returned {}:\n{}".format( + " ".join(cmd), exit_code, actual_exit_code, output) + assert actual_exit_code == exit_code, msg + + def init_swarm(self, **kwargs): + return self._init_swarm(self.client, **kwargs) From 3766f77c20e1e14d5ad49bdcf7314f3f8459927d Mon Sep 17 00:00:00 2001 From: Yuval Goldberg Date: Sun, 16 Aug 2020 18:54:14 +0300 Subject: [PATCH 100/211] Add response url to Server Error and Client Error messages Signed-off-by: Yuval Goldberg --- docker/errors.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docker/errors.py b/docker/errors.py index e5d07a5b..ab30a290 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -46,12 +46,14 @@ class APIError(requests.exceptions.HTTPError, DockerException): message = super(APIError, self).__str__() if self.is_client_error(): - message = '{0} Client Error: {1}'.format( - self.response.status_code, self.response.reason) + message = '{0} Client Error for {1}: {2}'.format( + self.response.status_code, self.response.url, + self.response.reason) elif self.is_server_error(): - message = '{0} Server Error: {1}'.format( - self.response.status_code, self.response.reason) + message = '{0} Server Error for {1}: {2}'.format( + self.response.status_code, self.response.url, + self.response.reason) if self.explanation: message = '{0} ("{1}")'.format(message, self.explanation) From 93bcc0497d8302aa2d78bd7ef756fc2ff3fd0912 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Oct 2020 21:18:15 +0000 Subject: [PATCH 101/211] Bump cryptography from 2.3 to 3.2 Bumps [cryptography](https://github.com/pyca/cryptography) from 2.3 to 3.2. - [Release notes](https://github.com/pyca/cryptography/releases) - [Changelog](https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/2.3...3.2) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 340e4312..0edd4e1e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ appdirs==1.4.3 asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.10.0 -cryptography==2.3 +cryptography==3.2 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 From 9c53024eade661d98d5a344bc08b2dc4ed386903 Mon Sep 17 00:00:00 2001 From: Daeseok Youn Date: Fri, 7 Feb 2020 18:11:00 +0900 Subject: [PATCH 102/211] raise an error for binding specific ports in 'host' mode of network The binding ports are ignored where the network mode is 'host'. It could be a problem in case of using these options together on Mac or Windows OS. Because the limitation that could not use the 'host' in network_mode on Mac and Windows. When 'host' mode is set on network_mode, the specific ports in 'ports' are ignored so the network is not able to be accessed through defined ports by developer. Signed-off-by: Daeseok Youn --- docker/api/container.py | 7 ++++++- docker/models/containers.py | 5 +++++ docker/types/containers.py | 12 ++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/docker/api/container.py b/docker/api/container.py index 24eb9c1c..cfd51470 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -523,6 +523,10 @@ class ContainerApiMixin(object): - ``container:`` Reuse another container's network stack. - ``host`` Use the host network stack. + This mode is incompatible with ``port_bindings``. + If ``host`` is used as network_mode, all of listed up to + ``port_bindings``` are ignored in running container. + oom_kill_disable (bool): Whether to disable OOM killer. oom_score_adj (int): An integer value containing the score given to the container in order to tune OOM killer preferences. @@ -531,7 +535,8 @@ class ContainerApiMixin(object): pids_limit (int): Tune a container's pids limit. Set ``-1`` for unlimited. port_bindings (dict): See :py:meth:`create_container` - for more information. + for more information. The binding ports are ignored in + ``host`` as network mode. privileged (bool): Give extended privileges to this container. publish_all_ports (bool): Publish all ports to the host. read_only (bool): Mount the container's root filesystem as read diff --git a/docker/models/containers.py b/docker/models/containers.py index 0c2b855a..bcd78017 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -649,6 +649,9 @@ class ContainerCollection(Collection): - ``container:`` Reuse another container's network stack. - ``host`` Use the host network stack. + This mode is incompatible with ``ports``. If ``host`` is + used as network_mode, all of listed up to ``ports``` are + ignored in running container. Incompatible with ``network``. oom_kill_disable (bool): Whether to disable OOM killer. @@ -667,6 +670,8 @@ class ContainerCollection(Collection): ``port/protocol``, where the protocol is either ``tcp``, ``udp``, or ``sctp``. + Ports are ignored to bind with ``host`` as network mode. + The values of the dictionary are the corresponding ports to open on the host, which can be either: diff --git a/docker/types/containers.py b/docker/types/containers.py index 44bfcfd8..1df3fff1 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -334,6 +334,11 @@ class HostConfig(dict): if dns_search: self['DnsSearch'] = dns_search + if network_mode is 'host' and port_bindings is not None: + raise host_config_incompatible_error( + 'network_mode', 'host', 'port_bindings' + ) + if network_mode: self['NetworkMode'] = network_mode elif network_mode is None: @@ -664,6 +669,13 @@ def host_config_value_error(param, param_value): return ValueError(error_msg.format(param, param_value)) +def host_config_incompatible_error(param, param_value, incompatible_param): + error_msg = 'Incompatible {1} in {0} is not compatible with {2}' + return errors.InvalidArgument( + error_msg.format(param, param_value, incompatible_param) + ) + + class ContainerConfig(dict): def __init__( self, version, image, command, hostname=None, user=None, detach=False, From 433264d04b56ef234998be9ec33745d087d3caba Mon Sep 17 00:00:00 2001 From: Daeseok Youn Date: Tue, 17 Nov 2020 21:25:00 +0900 Subject: [PATCH 103/211] Correct comments on ports_binding and host mode as network_mode Signed-off-by: Daeseok Youn --- docker/api/container.py | 6 ++---- docker/models/containers.py | 7 ++----- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index cfd51470..754b5dc6 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -524,8 +524,6 @@ class ContainerApiMixin(object): stack. - ``host`` Use the host network stack. This mode is incompatible with ``port_bindings``. - If ``host`` is used as network_mode, all of listed up to - ``port_bindings``` are ignored in running container. oom_kill_disable (bool): Whether to disable OOM killer. oom_score_adj (int): An integer value containing the score given @@ -535,8 +533,8 @@ class ContainerApiMixin(object): pids_limit (int): Tune a container's pids limit. Set ``-1`` for unlimited. port_bindings (dict): See :py:meth:`create_container` - for more information. The binding ports are ignored in - ``host`` as network mode. + for more information. + Imcompatible with ``host`` in ``network_mode``. privileged (bool): Give extended privileges to this container. publish_all_ports (bool): Publish all ports to the host. read_only (bool): Mount the container's root filesystem as read diff --git a/docker/models/containers.py b/docker/models/containers.py index bcd78017..120386a1 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -649,9 +649,7 @@ class ContainerCollection(Collection): - ``container:`` Reuse another container's network stack. - ``host`` Use the host network stack. - This mode is incompatible with ``ports``. If ``host`` is - used as network_mode, all of listed up to ``ports``` are - ignored in running container. + This mode is incompatible with ``ports``. Incompatible with ``network``. oom_kill_disable (bool): Whether to disable OOM killer. @@ -670,8 +668,6 @@ class ContainerCollection(Collection): ``port/protocol``, where the protocol is either ``tcp``, ``udp``, or ``sctp``. - Ports are ignored to bind with ``host`` as network mode. - The values of the dictionary are the corresponding ports to open on the host, which can be either: @@ -687,6 +683,7 @@ class ContainerCollection(Collection): to a single container port. For example, ``{'1111/tcp': [1234, 4567]}``. + Imcompatible with ``host`` in ``network_mode``. privileged (bool): Give extended privileges to this container. publish_all_ports (bool): Publish all ports to the host. read_only (bool): Mount the container's root filesystem as read From bb1c528ab3c67ac6ceb3f8a65a7cc0f919cf83fe Mon Sep 17 00:00:00 2001 From: Mariano Scazzariello Date: Tue, 17 Nov 2020 15:42:36 +0100 Subject: [PATCH 104/211] Add max_pool_size parameter (#2699) * Add max_pool_size parameter Signed-off-by: Mariano Scazzariello * Add client version to tests Signed-off-by: Mariano Scazzariello * Fix parameter position Signed-off-by: Mariano Scazzariello --- docker/api/client.py | 19 ++-- docker/client.py | 8 +- docker/constants.py | 2 + docker/transport/npipeconn.py | 10 ++- docker/transport/sshconn.py | 5 +- docker/transport/unixconn.py | 10 ++- tests/unit/client_test.py | 158 +++++++++++++++++++++++++++++++++- 7 files changed, 195 insertions(+), 17 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index 1edd4345..fbf7ad45 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -9,9 +9,9 @@ import websocket from .. import auth from ..constants import (DEFAULT_NUM_POOLS, DEFAULT_NUM_POOLS_SSH, - DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, - IS_WINDOWS_PLATFORM, MINIMUM_DOCKER_API_VERSION, - STREAM_HEADER_SIZE_BYTES) + DEFAULT_MAX_POOL_SIZE, DEFAULT_TIMEOUT_SECONDS, + DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, + MINIMUM_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES) from ..errors import (DockerException, InvalidVersion, TLSParameterError, create_api_error_from_http_exception) from ..tls import TLSConfig @@ -92,6 +92,8 @@ class APIClient( use_ssh_client (bool): If set to `True`, an ssh connection is made via shelling out to the ssh client. Ensure the ssh client is installed and configured on the host. + max_pool_size (int): The maximum number of connections + to save in the pool. """ __attrs__ = requests.Session.__attrs__ + ['_auth_configs', @@ -103,7 +105,8 @@ class APIClient( def __init__(self, base_url=None, version=None, timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, user_agent=DEFAULT_USER_AGENT, num_pools=None, - credstore_env=None, use_ssh_client=False): + credstore_env=None, use_ssh_client=False, + max_pool_size=DEFAULT_MAX_POOL_SIZE): super(APIClient, self).__init__() if tls and not base_url: @@ -139,7 +142,8 @@ class APIClient( if base_url.startswith('http+unix://'): self._custom_adapter = UnixHTTPAdapter( - base_url, timeout, pool_connections=num_pools + base_url, timeout, pool_connections=num_pools, + max_pool_size=max_pool_size ) self.mount('http+docker://', self._custom_adapter) self._unmount('http://', 'https://') @@ -153,7 +157,8 @@ class APIClient( ) try: self._custom_adapter = NpipeHTTPAdapter( - base_url, timeout, pool_connections=num_pools + base_url, timeout, pool_connections=num_pools, + max_pool_size=max_pool_size ) except NameError: raise DockerException( @@ -165,7 +170,7 @@ class APIClient( try: self._custom_adapter = SSHHTTPAdapter( base_url, timeout, pool_connections=num_pools, - shell_out=use_ssh_client + max_pool_size=max_pool_size, shell_out=use_ssh_client ) except NameError: raise DockerException( diff --git a/docker/client.py b/docker/client.py index 1fea69e6..5add5d73 100644 --- a/docker/client.py +++ b/docker/client.py @@ -1,5 +1,5 @@ from .api.client import APIClient -from .constants import DEFAULT_TIMEOUT_SECONDS +from .constants import (DEFAULT_TIMEOUT_SECONDS, DEFAULT_MAX_POOL_SIZE) from .models.configs import ConfigCollection from .models.containers import ContainerCollection from .models.images import ImageCollection @@ -38,6 +38,8 @@ class DockerClient(object): use_ssh_client (bool): If set to `True`, an ssh connection is made via shelling out to the ssh client. Ensure the ssh client is installed and configured on the host. + max_pool_size (int): The maximum number of connections + to save in the pool. """ def __init__(self, *args, **kwargs): self.api = APIClient(*args, **kwargs) @@ -67,6 +69,8 @@ class DockerClient(object): version (str): The version of the API to use. Set to ``auto`` to automatically detect the server's version. Default: ``auto`` timeout (int): Default timeout for API calls, in seconds. + max_pool_size (int): The maximum number of connections + to save in the pool. ssl_version (int): A valid `SSL version`_. assert_hostname (bool): Verify the hostname of the server. environment (dict): The environment to read environment variables @@ -86,10 +90,12 @@ class DockerClient(object): https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLSv1 """ timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT_SECONDS) + max_pool_size = kwargs.pop('max_pool_size', DEFAULT_MAX_POOL_SIZE) version = kwargs.pop('version', None) use_ssh_client = kwargs.pop('use_ssh_client', False) return cls( timeout=timeout, + max_pool_size=max_pool_size, version=version, use_ssh_client=use_ssh_client, **kwargs_from_env(**kwargs) diff --git a/docker/constants.py b/docker/constants.py index c09eedab..43fce613 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -36,6 +36,8 @@ DEFAULT_NUM_POOLS = 25 # For more details see: https://github.com/docker/docker-py/issues/2246 DEFAULT_NUM_POOLS_SSH = 9 +DEFAULT_MAX_POOL_SIZE = 10 + DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048 DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8'] diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index aa05538d..70d8519d 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -73,12 +73,15 @@ class NpipeHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path', 'pools', - 'timeout'] + 'timeout', + 'max_pool_size'] def __init__(self, base_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS): + pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): self.npipe_path = base_url.replace('npipe://', '') self.timeout = timeout + self.max_pool_size = max_pool_size self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) @@ -91,7 +94,8 @@ class NpipeHTTPAdapter(BaseHTTPAdapter): return pool pool = NpipeHTTPConnectionPool( - self.npipe_path, self.timeout + self.npipe_path, self.timeout, + maxsize=self.max_pool_size ) self.pools[url] = pool diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 42d1ef96..cdeeae4e 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -184,11 +184,12 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): class SSHHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [ - 'pools', 'timeout', 'ssh_client', 'ssh_params' + 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size' ] def __init__(self, base_url, timeout=60, pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, shell_out=True): self.ssh_client = None if not shell_out: @@ -197,6 +198,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): base_url = base_url.lstrip('ssh://') self.host = base_url self.timeout = timeout + self.max_pool_size = max_pool_size self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) @@ -219,6 +221,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): pool = SSHConnectionPool( ssh_client=self.ssh_client, timeout=self.timeout, + maxsize=self.max_pool_size, host=self.host ) self.pools[url] = pool diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index b6191032..3e040c5a 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -74,15 +74,18 @@ class UnixHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['pools', 'socket_path', - 'timeout'] + 'timeout', + 'max_pool_size'] def __init__(self, socket_url, timeout=60, - pool_connections=constants.DEFAULT_NUM_POOLS): + pool_connections=constants.DEFAULT_NUM_POOLS, + max_pool_size=constants.DEFAULT_MAX_POOL_SIZE): socket_path = socket_url.replace('http+unix://', '') if not socket_path.startswith('/'): socket_path = '/' + socket_path self.socket_path = socket_path self.timeout = timeout + self.max_pool_size = max_pool_size self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) @@ -95,7 +98,8 @@ class UnixHTTPAdapter(BaseHTTPAdapter): return pool pool = UnixHTTPConnectionPool( - url, self.socket_path, self.timeout + url, self.socket_path, self.timeout, + maxsize=self.max_pool_size ) self.pools[url] = pool diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index cc9ff8f2..ad88e845 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -5,7 +5,9 @@ import unittest import docker import pytest from docker.constants import ( - DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS) + DEFAULT_DOCKER_API_VERSION, DEFAULT_TIMEOUT_SECONDS, + DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM +) from docker.utils import kwargs_from_env from . import fake_api @@ -15,8 +17,8 @@ try: except ImportError: import mock - TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') +POOL_SIZE = 20 class ClientTest(unittest.TestCase): @@ -76,6 +78,84 @@ class ClientTest(unittest.TestCase): assert "'ContainerCollection' object is not callable" in s assert "docker.APIClient" in s + @pytest.mark.skipif( + IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' + ) + @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") + def test_default_pool_size_unix(self, mock_obj): + client = docker.DockerClient( + version=DEFAULT_DOCKER_API_VERSION + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + base_url = "{base_url}/v{version}/_ping".format( + base_url=client.api.base_url, + version=client.api._version + ) + + mock_obj.assert_called_once_with(base_url, + "/var/run/docker.sock", + 60, + maxsize=DEFAULT_MAX_POOL_SIZE + ) + + @pytest.mark.skipif( + not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' + ) + @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") + def test_default_pool_size_win(self, mock_obj): + client = docker.DockerClient( + version=DEFAULT_DOCKER_API_VERSION + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + mock_obj.assert_called_once_with("//./pipe/docker_engine", + 60, + maxsize=DEFAULT_MAX_POOL_SIZE + ) + + @pytest.mark.skipif( + IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' + ) + @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") + def test_pool_size_unix(self, mock_obj): + client = docker.DockerClient( + version=DEFAULT_DOCKER_API_VERSION, + max_pool_size=POOL_SIZE + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + base_url = "{base_url}/v{version}/_ping".format( + base_url=client.api.base_url, + version=client.api._version + ) + + mock_obj.assert_called_once_with(base_url, + "/var/run/docker.sock", + 60, + maxsize=POOL_SIZE + ) + + @pytest.mark.skipif( + not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' + ) + @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") + def test_pool_size_win(self, mock_obj): + client = docker.DockerClient( + version=DEFAULT_DOCKER_API_VERSION, + max_pool_size=POOL_SIZE + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + mock_obj.assert_called_once_with("//./pipe/docker_engine", + 60, + maxsize=POOL_SIZE + ) + class FromEnvTest(unittest.TestCase): @@ -112,3 +192,77 @@ class FromEnvTest(unittest.TestCase): client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) assert client.api.timeout == DEFAULT_TIMEOUT_SECONDS + + @pytest.mark.skipif( + IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' + ) + @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") + def test_default_pool_size_from_env_unix(self, mock_obj): + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + base_url = "{base_url}/v{version}/_ping".format( + base_url=client.api.base_url, + version=client.api._version + ) + + mock_obj.assert_called_once_with(base_url, + "/var/run/docker.sock", + 60, + maxsize=DEFAULT_MAX_POOL_SIZE + ) + + @pytest.mark.skipif( + not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' + ) + @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") + def test_default_pool_size_from_env_win(self, mock_obj): + client = docker.from_env(version=DEFAULT_DOCKER_API_VERSION) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + mock_obj.assert_called_once_with("//./pipe/docker_engine", + 60, + maxsize=DEFAULT_MAX_POOL_SIZE + ) + + @pytest.mark.skipif( + IS_WINDOWS_PLATFORM, reason='Unix Connection Pool only on Linux' + ) + @mock.patch("docker.transport.unixconn.UnixHTTPConnectionPool") + def test_pool_size_from_env_unix(self, mock_obj): + client = docker.from_env( + version=DEFAULT_DOCKER_API_VERSION, + max_pool_size=POOL_SIZE + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + base_url = "{base_url}/v{version}/_ping".format( + base_url=client.api.base_url, + version=client.api._version + ) + + mock_obj.assert_called_once_with(base_url, + "/var/run/docker.sock", + 60, + maxsize=POOL_SIZE + ) + + @pytest.mark.skipif( + not IS_WINDOWS_PLATFORM, reason='Npipe Connection Pool only on Windows' + ) + @mock.patch("docker.transport.npipeconn.NpipeHTTPConnectionPool") + def test_pool_size_from_env_win(self, mock_obj): + client = docker.from_env( + version=DEFAULT_DOCKER_API_VERSION, + max_pool_size=POOL_SIZE + ) + mock_obj.return_value.urlopen.return_value.status = 200 + client.ping() + + mock_obj.assert_called_once_with("//./pipe/docker_engine", + 60, + maxsize=POOL_SIZE + ) From f5531a94e1096f4c8456264f6511dfe89e1c825e Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 20 Oct 2020 10:05:07 +0200 Subject: [PATCH 105/211] Fix ssh connection - don't override the host and port of the http pool Signed-off-by: aiordache --- docker/transport/sshconn.py | 111 +++++++++++++++++++----------------- tests/Dockerfile-ssh-dind | 2 +- 2 files changed, 59 insertions(+), 54 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index cdeeae4e..5cdaa275 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -1,9 +1,9 @@ -import io import paramiko import requests.adapters import six import logging import os +import signal import socket import subprocess @@ -23,40 +23,6 @@ except ImportError: RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer -def create_paramiko_client(base_url): - logging.getLogger("paramiko").setLevel(logging.WARNING) - ssh_client = paramiko.SSHClient() - base_url = six.moves.urllib_parse.urlparse(base_url) - ssh_params = { - "hostname": base_url.hostname, - "port": base_url.port, - "username": base_url.username - } - ssh_config_file = os.path.expanduser("~/.ssh/config") - if os.path.exists(ssh_config_file): - conf = paramiko.SSHConfig() - with open(ssh_config_file) as f: - conf.parse(f) - host_config = conf.lookup(base_url.hostname) - ssh_conf = host_config - if 'proxycommand' in host_config: - ssh_params["sock"] = paramiko.ProxyCommand( - ssh_conf['proxycommand'] - ) - if 'hostname' in host_config: - ssh_params['hostname'] = host_config['hostname'] - if 'identityfile' in host_config: - ssh_params['key_filename'] = host_config['identityfile'] - if base_url.port is None and 'port' in host_config: - ssh_params['port'] = ssh_conf['port'] - if base_url.username is None and 'user' in host_config: - ssh_params['username'] = ssh_conf['user'] - - ssh_client.load_system_host_keys() - ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) - return ssh_client, ssh_params - - class SSHSocket(socket.socket): def __init__(self, host): super(SSHSocket, self).__init__( @@ -80,7 +46,8 @@ class SSHSocket(socket.socket): ' '.join(args), shell=True, stdout=subprocess.PIPE, - stdin=subprocess.PIPE) + stdin=subprocess.PIPE, + preexec_fn=lambda: signal.signal(signal.SIGINT, signal.SIG_IGN)) def _write(self, data): if not self.proc or self.proc.stdin.closed: @@ -96,17 +63,18 @@ class SSHSocket(socket.socket): def send(self, data): return self._write(data) - def recv(self): + def recv(self, n): if not self.proc: raise Exception('SSH subprocess not initiated.' 'connect() must be called first.') - return self.proc.stdout.read() + return self.proc.stdout.read(n) def makefile(self, mode): - if not self.proc or self.proc.stdout.closed: - buf = io.BytesIO() - buf.write(b'\n\n') - return buf + if not self.proc: + self.connect() + if six.PY3: + self.proc.stdout.channel = self + return self.proc.stdout def close(self): @@ -124,7 +92,7 @@ class SSHConnection(httplib.HTTPConnection, object): ) self.ssh_transport = ssh_transport self.timeout = timeout - self.host = host + self.ssh_host = host def connect(self): if self.ssh_transport: @@ -132,7 +100,7 @@ class SSHConnection(httplib.HTTPConnection, object): sock.settimeout(self.timeout) sock.exec_command('docker system dial-stdio') else: - sock = SSHSocket(self.host) + sock = SSHSocket(self.ssh_host) sock.settimeout(self.timeout) sock.connect() @@ -147,16 +115,16 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): 'localhost', timeout=timeout, maxsize=maxsize ) self.ssh_transport = None + self.timeout = timeout if ssh_client: self.ssh_transport = ssh_client.get_transport() - self.timeout = timeout - self.host = host - self.port = None + self.ssh_host = host + self.ssh_port = None if ':' in host: - self.host, self.port = host.split(':') + self.ssh_host, self.ssh_port = host.split(':') def _new_conn(self): - return SSHConnection(self.ssh_transport, self.timeout, self.host) + return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host) # When re-using connections, urllib3 calls fileno() on our # SSH channel instance, quickly overloading our fd limit. To avoid this, @@ -193,10 +161,10 @@ class SSHHTTPAdapter(BaseHTTPAdapter): shell_out=True): self.ssh_client = None if not shell_out: - self.ssh_client, self.ssh_params = create_paramiko_client(base_url) + self._create_paramiko_client(base_url) self._connect() - base_url = base_url.lstrip('ssh://') - self.host = base_url + + self.ssh_host = base_url.lstrip('ssh://') self.timeout = timeout self.max_pool_size = max_pool_size self.pools = RecentlyUsedContainer( @@ -204,11 +172,48 @@ class SSHHTTPAdapter(BaseHTTPAdapter): ) super(SSHHTTPAdapter, self).__init__() + def _create_paramiko_client(self, base_url): + logging.getLogger("paramiko").setLevel(logging.WARNING) + self.ssh_client = paramiko.SSHClient() + base_url = six.moves.urllib_parse.urlparse(base_url) + self.ssh_params = { + "hostname": base_url.hostname, + "port": base_url.port, + "username": base_url.username + } + ssh_config_file = os.path.expanduser("~/.ssh/config") + if os.path.exists(ssh_config_file): + conf = paramiko.SSHConfig() + with open(ssh_config_file) as f: + conf.parse(f) + host_config = conf.lookup(base_url.hostname) + self.ssh_conf = host_config + if 'proxycommand' in host_config: + self.ssh_params["sock"] = paramiko.ProxyCommand( + self.ssh_conf['proxycommand'] + ) + if 'hostname' in host_config: + self.ssh_params['hostname'] = host_config['hostname'] + if base_url.port is None and 'port' in host_config: + self.ssh_params['port'] = self.ssh_conf['port'] + if base_url.username is None and 'user' in host_config: + self.ssh_params['username'] = self.ssh_conf['user'] + + self.ssh_client.load_system_host_keys() + self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) + def _connect(self): if self.ssh_client: self.ssh_client.connect(**self.ssh_params) def get_connection(self, url, proxies=None): + if not self.ssh_client: + return SSHConnectionPool( + ssh_client=self.ssh_client, + timeout=self.timeout, + maxsize=self.max_pool_size, + host=self.ssh_host + ) with self.pools.lock: pool = self.pools.get(url) if pool: @@ -222,7 +227,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): ssh_client=self.ssh_client, timeout=self.timeout, maxsize=self.max_pool_size, - host=self.host + host=self.ssh_host ) self.pools[url] = pool diff --git a/tests/Dockerfile-ssh-dind b/tests/Dockerfile-ssh-dind index 9d8f0eab..aba9bb34 100644 --- a/tests/Dockerfile-ssh-dind +++ b/tests/Dockerfile-ssh-dind @@ -10,7 +10,7 @@ RUN apk add --no-cache \ RUN ssh-keygen -A # copy the test SSH config -RUN echo "IgnoreUserKnownHosts yes" >> /etc/ssh/sshd_config && \ +RUN echo "IgnoreUserKnownHosts yes" > /etc/ssh/sshd_config && \ echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \ echo "PermitRootLogin yes" >> /etc/ssh/sshd_config From db9af44e5b36b81d2dc6643dda5d674a1ae462c9 Mon Sep 17 00:00:00 2001 From: aiordache Date: Wed, 18 Nov 2020 19:00:27 +0100 Subject: [PATCH 106/211] Fix docs typo Signed-off-by: aiordache --- docker/models/containers.py | 2 +- docker/types/containers.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 120386a1..36cbbc41 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -683,7 +683,7 @@ class ContainerCollection(Collection): to a single container port. For example, ``{'1111/tcp': [1234, 4567]}``. - Imcompatible with ``host`` in ``network_mode``. + Incompatible with ``host`` network mode. privileged (bool): Give extended privileges to this container. publish_all_ports (bool): Publish all ports to the host. read_only (bool): Mount the container's root filesystem as read diff --git a/docker/types/containers.py b/docker/types/containers.py index 1df3fff1..d1938c91 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -334,7 +334,7 @@ class HostConfig(dict): if dns_search: self['DnsSearch'] = dns_search - if network_mode is 'host' and port_bindings is not None: + if network_mode is 'host' and port_bindings: raise host_config_incompatible_error( 'network_mode', 'host', 'port_bindings' ) @@ -670,7 +670,7 @@ def host_config_value_error(param, param_value): def host_config_incompatible_error(param, param_value, incompatible_param): - error_msg = 'Incompatible {1} in {0} is not compatible with {2}' + error_msg = '\"{1}\" {0} is incompatible with {2}' return errors.InvalidArgument( error_msg.format(param, param_value, incompatible_param) ) From a0c51be2289dfa1ff05eb1834452e5808714b64a Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 19 Nov 2020 15:38:26 +0100 Subject: [PATCH 107/211] Syntax warning fix Signed-off-by: aiordache --- docker/types/containers.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/docker/types/containers.py b/docker/types/containers.py index d1938c91..9fa4656a 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -334,15 +334,11 @@ class HostConfig(dict): if dns_search: self['DnsSearch'] = dns_search - if network_mode is 'host' and port_bindings: + if network_mode == 'host' and port_bindings: raise host_config_incompatible_error( 'network_mode', 'host', 'port_bindings' ) - - if network_mode: - self['NetworkMode'] = network_mode - elif network_mode is None: - self['NetworkMode'] = 'default' + self['NetworkMode'] = network_mode or 'default' if restart_policy: if not isinstance(restart_policy, dict): From 260114229a9adf47bf76ed0ed7e9da9364a7d30f Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 19 Nov 2020 18:51:59 +0100 Subject: [PATCH 108/211] Update Jenkinsfile with docker registry credentials Signed-off-by: aiordache --- Jenkinsfile | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index fc716d80..b5ea7a4b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,10 +25,11 @@ def buildImages = { -> imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}" imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}" - - buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "") - buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") - buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "") + buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") + buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") + } } } } From c854aba15e14cdb6b84f34770e0ee8398f54b393 Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 19 Nov 2020 19:33:24 +0100 Subject: [PATCH 109/211] Mount docker config to DIND containers for authentication Signed-off-by: aiordache --- Jenkinsfile | 73 ++++++++++++++++++++++++++++------------------------- 1 file changed, 38 insertions(+), 35 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index b5ea7a4b..4f970770 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -83,41 +83,44 @@ def runTests = { Map settings -> def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" def testContainerName = "dpy-tests-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" - try { - sh """docker network create ${testNetwork}""" - sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ - ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375 - """ - sh """docker run --rm \\ - --name ${testContainerName} \\ - -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\ - -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ - --network ${testNetwork} \\ - --volumes-from ${dindContainerName} \\ - ${testImage} \\ - py.test -v -rxs --cov=docker --ignore=tests/ssh tests/ - """ - sh """docker stop ${dindContainerName}""" - - // start DIND container with SSH - sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ - ${imageDindSSH} dockerd --experimental""" - sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """ - // run SSH tests only - sh """docker run --rm \\ - --name ${testContainerName} \\ - -e "DOCKER_HOST=ssh://${dindContainerName}:22" \\ - -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ - --network ${testNetwork} \\ - --volumes-from ${dindContainerName} \\ - ${testImage} \\ - py.test -v -rxs --cov=docker tests/ssh - """ - } finally { - sh """ - docker stop ${dindContainerName} - docker network rm ${testNetwork} - """ + withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { + try { + sh """docker network create ${testNetwork}""" + sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ + ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375 + """ + sh """docker run --rm \\ + --name ${testContainerName} \\ + -e "DOCKER_HOST=tcp://${dindContainerName}:2375" \\ + -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ + --network ${testNetwork} \\ + --volumes-from ${dindContainerName} \\ + -v ~/.docker/config.json:/root/.docker/config.json \\ + ${testImage} \\ + py.test -v -rxs --cov=docker --ignore=tests/ssh tests/ + """ + sh """docker stop ${dindContainerName}""" + // start DIND container with SSH + sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ + ${imageDindSSH} dockerd --experimental""" + sh """docker exec ${dindContainerName} sh -c /usr/sbin/sshd """ + // run SSH tests only + sh """docker run --rm \\ + --name ${testContainerName} \\ + -e "DOCKER_HOST=ssh://${dindContainerName}:22" \\ + -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ + --network ${testNetwork} \\ + --volumes-from ${dindContainerName} \\ + -v ~/.docker/config.json:/root/.docker/config.json \\ + ${testImage} \\ + py.test -v -rxs --cov=docker tests/ssh + """ + } finally { + sh """ + docker stop ${dindContainerName} + docker network rm ${testNetwork} + """ + } } } } From 990ef4904c22aac2d35a7ae3a583b57d170cdfd7 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 23 Nov 2020 13:16:23 +0100 Subject: [PATCH 110/211] Post-release v4.4.0 Signed-off-by: aiordache --- docker/version.py | 2 +- docs/change-log.md | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index f40347aa..bc09e637 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.4.0-dev" +version = "4.5.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 11c055fd..fe3dc71b 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,24 @@ Change log ========== +4.4.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/67?closed=1) + +### Features +- Add an alternative SSH connection to the paramiko one, based on shelling out to the SSh client. Similar to the behaviour of Docker cli +- Default image tag to `latest` on `pull` + +### Bugfixes +- Fix plugin model upgrade +- Fix examples URL in ulimits + +### Miscellaneous +- Improve exception messages for server and client errors +- Bump cryptography from 2.3 to 3.2 + + 4.3.1 ----- From 1757c974fa3a05b0e9b783af85242b18df09d05d Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Tue, 24 Nov 2020 12:04:42 +0100 Subject: [PATCH 111/211] docker/api/image: replace use of deprecated "filter" argument The "filter" argument was deprecated in docker 1.13 (API version 1.25), and removed from API v1.41 and up. See https://github.com/docker/cli/blob/v20.10.0-rc1/docs/deprecated.md#filter-param-for-imagesjson-endpoint This patch applies the name as "reference" filter, instead of "filter" for API 1.25 and up. Signed-off-by: Sebastiaan van Stijn --- docker/api/image.py | 10 +++++++++- tests/unit/api_image_test.py | 19 +++++++++++++++---- 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index 4082bfb3..56c5448e 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -81,10 +81,18 @@ class ImageApiMixin(object): If the server returns an error. """ params = { - 'filter': name, 'only_ids': 1 if quiet else 0, 'all': 1 if all else 0, } + if name: + if utils.version_lt(self._version, '1.25'): + # only use "filter" on API 1.24 and under, as it is deprecated + params['filter'] = name + else: + if filters: + filters['reference'] = name + else: + filters = {'reference': name} if filters: params['filters'] = utils.convert_filters(filters) res = self._result(self._get(self._url("/images/json"), params=params), diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 4b4fb977..0b60df43 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -26,7 +26,18 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', url_prefix + 'images/json', - params={'filter': None, 'only_ids': 0, 'all': 1}, + params={'only_ids': 0, 'all': 1}, + timeout=DEFAULT_TIMEOUT_SECONDS + ) + + def test_images_name(self): + self.client.images('foo:bar') + + fake_request.assert_called_with( + 'GET', + url_prefix + 'images/json', + params={'only_ids': 0, 'all': 0, + 'filters': '{"reference": ["foo:bar"]}'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -36,7 +47,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', url_prefix + 'images/json', - params={'filter': None, 'only_ids': 1, 'all': 1}, + params={'only_ids': 1, 'all': 1}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -46,7 +57,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', url_prefix + 'images/json', - params={'filter': None, 'only_ids': 1, 'all': 0}, + params={'only_ids': 1, 'all': 0}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -56,7 +67,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', url_prefix + 'images/json', - params={'filter': None, 'only_ids': 0, 'all': 0, + params={'only_ids': 0, 'all': 0, 'filters': '{"dangling": ["true"]}'}, timeout=DEFAULT_TIMEOUT_SECONDS ) From d8bbbf23517007083a4e9f66e6e3028061c0e5ed Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Mon, 14 Dec 2020 14:06:23 -0300 Subject: [PATCH 112/211] Add Github Actions Signed-off-by: Ulysses Souza --- .github/workflows/ci.yml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/ci.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..9fb9a455 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,27 @@ +name: Python package + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + max-parallel: 1 + matrix: + python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9] + + steps: + - uses: actions/checkout@v1 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions==2.2.0 + - name: Test with tox + run: | + docker logout + rm -rf ~/.docker + tox From ab0d65e2e0f09d47886b79f874e344da9a523286 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Mon, 14 Dec 2020 14:17:52 -0300 Subject: [PATCH 113/211] Remove travis Signed-off-by: Ulysses Souza --- .travis.yml | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 7b3d7248..00000000 --- a/.travis.yml +++ /dev/null @@ -1,20 +0,0 @@ -sudo: false -language: python -matrix: - include: - - python: 2.7 - env: TOXENV=py27 - - python: 3.5 - env: TOXENV=py35 - - python: 3.6 - env: TOXENV=py36 - - python: 3.7 - env: TOXENV=py37 - dist: xenial - sudo: true - - env: TOXENV=flake8 - -install: - - pip install tox==2.9.1 -script: - - tox From 4757eea80c49b7d593537ea9a0b9e5b398570745 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 18 Dec 2020 11:51:55 -0300 Subject: [PATCH 114/211] Trigger GHA on pull_request Signed-off-by: Ulysses Souza --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9fb9a455..c3c786a4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ name: Python package -on: [push] +on: [push, pull_request] jobs: build: From 3ec7fee7362eecfd7bcfd62edcfac3380422fb64 Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 17 Dec 2020 12:09:27 +0100 Subject: [PATCH 115/211] Avoid setting unsuported parameter for subprocess.Popen on Windows Signed-off-by: aiordache --- docker/transport/sshconn.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 5cdaa275..7f4b2a2d 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -42,12 +42,17 @@ class SSHSocket(socket.socket): port, 'docker system dial-stdio' ] + + preexec_func = None + if not constants.IS_WINDOWS_PLATFORM: + preexec_func = lambda: signal.signal(signal.SIGINT, signal.SIG_IGN) + self.proc = subprocess.Popen( ' '.join(args), shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, - preexec_fn=lambda: signal.signal(signal.SIGINT, signal.SIG_IGN)) + preexec_fn=preexec_func) def _write(self, data): if not self.proc or self.proc.stdin.closed: From 2f3e0f9fc441d2a637bfc0816d7eb6d814a7cd72 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 22 Dec 2020 17:19:24 -0300 Subject: [PATCH 116/211] Prepare release 4.4.1 Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- docs/change-log.md | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index bc09e637..600d2454 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.5.0-dev" +version = "4.4.1" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index fe3dc71b..351e2c51 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,16 @@ Change log ========== +4.4.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/69?closed=1) + +### Bugfixes +- Avoid setting unsuported parameter for subprocess.Popen on Windows +- Replace use of deprecated "filter" argument on ""docker/api/image" + + 4.4.0 ----- From b72926b3822e018baa8d0a82a76c32881932d6a2 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 22 Dec 2020 17:50:19 -0300 Subject: [PATCH 117/211] Post 4.4.1 release Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 600d2454..bc09e637 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.4.1" +version = "4.5.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 2426a5ffd57c49c257988df498f402978e9d901a Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 24 Dec 2020 15:14:05 +0100 Subject: [PATCH 118/211] setup.py: Add support for Python 3.8 and 3.9 Signed-off-by: Christian Clauss --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index c7022950..330ab3e3 100644 --- a/setup.py +++ b/setup.py @@ -84,6 +84,8 @@ setup( 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Software Development', 'Topic :: Utilities', 'License :: OSI Approved :: Apache Software License', From f0ab0ed25d7b031b13226f1b44ce8d33a56d1ffa Mon Sep 17 00:00:00 2001 From: Piotr Wojciechowski Date: Fri, 25 Dec 2020 16:39:44 +0100 Subject: [PATCH 119/211] Support for docker.types.Placement.MaxReplicas (new in API 1.40) in Docker Swarm Service Signed-off-by: WojciechowskiPiotr --- docker/models/services.py | 3 +++ docker/types/services.py | 6 +++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index a35687b3..a29ff132 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -157,6 +157,8 @@ class ServiceCollection(Collection): constraints. preferences (list of tuple): :py:class:`~docker.types.Placement` preferences. + maxreplicas (int): :py:class:`~docker.types.Placement` maxreplicas + or (int) representing maximum number of replicas per node. platforms (list of tuple): A list of platform constraints expressed as ``(arch, os)`` tuples. container_labels (dict): Labels to apply to the container. @@ -319,6 +321,7 @@ PLACEMENT_KWARGS = [ 'constraints', 'preferences', 'platforms', + 'maxreplicas', ] diff --git a/docker/types/services.py b/docker/types/services.py index 05dda15d..3cde8592 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -659,10 +659,12 @@ class Placement(dict): are provided in order from highest to lowest precedence and are expressed as ``(strategy, descriptor)`` tuples. See :py:class:`PlacementPreference` for details. + maxreplicas (int): Maximum number of replicas per node platforms (:py:class:`list` of tuple): A list of platforms expressed as ``(arch, os)`` tuples """ - def __init__(self, constraints=None, preferences=None, platforms=None): + def __init__(self, constraints=None, preferences=None, maxreplicas=None, + platforms=None): if constraints is not None: self['Constraints'] = constraints if preferences is not None: @@ -671,6 +673,8 @@ class Placement(dict): if isinstance(pref, tuple): pref = PlacementPreference(*pref) self['Preferences'].append(pref) + if maxreplicas is not None: + self['MaxReplicas'] = maxreplicas if platforms: self['Platforms'] = [] for plat in platforms: From ce2669e3edfe5d3215ba501cc9771fc0ffad680a Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 24 Dec 2020 15:29:54 +0100 Subject: [PATCH 120/211] print() is a function in Python 3 Signed-off-by: Christian Clauss --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3ff124d7..8ce684b5 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ You can stream logs: ```python >>> for line in container.logs(stream=True): -... print line.strip() +... print(line.strip()) Reticulating spline 2... Reticulating spline 3... ... From 10ff4030793ac93ef5f4cc079dc8c71c6fa60a74 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 28 Dec 2020 18:51:17 +0100 Subject: [PATCH 121/211] print() is a function in Python 3 Like #2740 but for the docs Signed-off-by: Christian Clauss --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 63e85d36..93b30d4a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -58,7 +58,7 @@ You can stream logs: .. code-block:: python >>> for line in container.logs(stream=True): - ... print line.strip() + ... print(line.strip()) Reticulating spline 2... Reticulating spline 3... ... From 0edea80c415652f13a25b20e5937cd9c41e35063 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 8 Feb 2021 20:04:14 +0100 Subject: [PATCH 122/211] Update base image to `dockerpinata/docker-py` in Jenkinsfile Signed-off-by: aiordache --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4f970770..d99c6054 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,6 +1,6 @@ #!groovy -def imageNameBase = "dockerbuildbot/docker-py" +def imageNameBase = "dockerpinata/docker-py" def imageNamePy2 def imageNamePy3 def imageDindSSH From caab390696940dce51f93e04e43419a90595216e Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 8 Feb 2021 19:03:58 +0100 Subject: [PATCH 123/211] Fix host trimming and remove quiet flag for the ssh connection Signed-off-by: aiordache --- docker/transport/sshconn.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 7f4b2a2d..7593b5bd 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -29,26 +29,33 @@ class SSHSocket(socket.socket): socket.AF_INET, socket.SOCK_STREAM) self.host = host self.port = None + self.user = None if ':' in host: self.host, self.port = host.split(':') + if '@' in self.host: + self.user, self.host = host.split('@') + self.proc = None def connect(self, **kwargs): - port = '' if not self.port else '-p {}'.format(self.port) - args = [ - 'ssh', - '-q', - self.host, - port, - 'docker system dial-stdio' - ] + args = ['ssh'] + if self.user: + args = args + ['-l', self.user] + + if self.port: + args = args + ['-p', self.port] + + args = args + ['--', self.host, 'docker system dial-stdio'] preexec_func = None if not constants.IS_WINDOWS_PLATFORM: - preexec_func = lambda: signal.signal(signal.SIGINT, signal.SIG_IGN) + def f(): + signal.signal(signal.SIGINT, signal.SIG_IGN) + preexec_func = f self.proc = subprocess.Popen( ' '.join(args), + env=os.environ, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, @@ -124,9 +131,6 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): if ssh_client: self.ssh_transport = ssh_client.get_transport() self.ssh_host = host - self.ssh_port = None - if ':' in host: - self.ssh_host, self.ssh_port = host.split(':') def _new_conn(self): return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host) @@ -169,7 +173,10 @@ class SSHHTTPAdapter(BaseHTTPAdapter): self._create_paramiko_client(base_url) self._connect() - self.ssh_host = base_url.lstrip('ssh://') + self.ssh_host = base_url + if base_url.startswith('ssh://'): + self.ssh_host = base_url[len('ssh://'):] + self.timeout = timeout self.max_pool_size = max_pool_size self.pools = RecentlyUsedContainer( From 514f98a0d69c9350d8f19088e571ddbebfe89b5e Mon Sep 17 00:00:00 2001 From: WojciechowskiPiotr Date: Tue, 9 Feb 2021 19:45:52 +0100 Subject: [PATCH 124/211] Support for docker.types.Placement.MaxReplicas (new in API 1.40) in Docker Swarm Service Signed-off-by: WojciechowskiPiotr --- docker/types/services.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/types/services.py b/docker/types/services.py index 3cde8592..29498e97 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -663,8 +663,8 @@ class Placement(dict): platforms (:py:class:`list` of tuple): A list of platforms expressed as ``(arch, os)`` tuples """ - def __init__(self, constraints=None, preferences=None, maxreplicas=None, - platforms=None): + def __init__(self, constraints=None, preferences=None, platforms=None, + maxreplicas=None): if constraints is not None: self['Constraints'] = constraints if preferences is not None: From da32a2f1a2f21573627a5df0ea309048e4058b9f Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Thu, 24 Dec 2020 15:07:45 +0100 Subject: [PATCH 125/211] GitHub Actions: Upgrade actions/checkout https://github.com/actions/checkout/releases Signed-off-by: Christian Clauss --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c3c786a4..d8d55ea0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9] steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: From 9e007469ef0d206517618bcc66a379d024ed715c Mon Sep 17 00:00:00 2001 From: Stefan Scherer Date: Tue, 9 Feb 2021 20:39:54 +0100 Subject: [PATCH 126/211] Update CI to ubuntu-2004 Signed-off-by: Stefan Scherer --- Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d99c6054..e7c33215 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -18,7 +18,7 @@ def buildImage = { name, buildargs, pyTag -> } def buildImages = { -> - wrappedNode(label: "amd64 && ubuntu-1804 && overlay2", cleanWorkspace: true) { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { stage("build image") { checkout(scm) @@ -36,7 +36,7 @@ def buildImages = { -> def getDockerVersions = { -> def dockerVersions = ["19.03.12"] - wrappedNode(label: "amd64 && ubuntu-1804 && overlay2") { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ ${imageNamePy3} \\ @@ -77,7 +77,7 @@ def runTests = { Map settings -> } { -> - wrappedNode(label: "amd64 && ubuntu-1804 && overlay2", cleanWorkspace: true) { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { stage("test python=${pythonVersion} / docker=${dockerVersion}") { checkout(scm) def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" From 6d1dffe3e5738ebe02294c795a7e5e630f7913c2 Mon Sep 17 00:00:00 2001 From: WojciechowskiPiotr Date: Tue, 9 Feb 2021 21:37:26 +0100 Subject: [PATCH 127/211] Unit and integration tests added Signed-off-by: WojciechowskiPiotr --- tests/integration/api_service_test.py | 13 +++++++++++++ tests/unit/models_services_test.py | 2 ++ 2 files changed, 15 insertions(+) diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index b6b7ec53..7e5336e2 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -471,6 +471,19 @@ class ServiceTest(BaseAPIIntegrationTest): assert 'Placement' in svc_info['Spec']['TaskTemplate'] assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt + @requires_api_version('1.40') + def test_create_service_with_placement_maxreplicas(self): + container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) + placemt = docker.types.Placement(maxreplicas=1) + task_tmpl = docker.types.TaskTemplate( + container_spec, placement=placemt + ) + name = self.get_service_name() + svc_id = self.client.create_service(task_tmpl, name=name) + svc_info = self.client.inspect_service(svc_id) + assert 'Placement' in svc_info['Spec']['TaskTemplate'] + assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt + def test_create_service_with_endpoint_spec(self): container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py index a4ac50c3..07bb5897 100644 --- a/tests/unit/models_services_test.py +++ b/tests/unit/models_services_test.py @@ -28,6 +28,7 @@ class CreateServiceKwargsTest(unittest.TestCase): 'constraints': ['foo=bar'], 'preferences': ['bar=baz'], 'platforms': [('x86_64', 'linux')], + 'maxreplicas': 1 }) task_template = kwargs.pop('task_template') @@ -47,6 +48,7 @@ class CreateServiceKwargsTest(unittest.TestCase): 'Constraints': ['foo=bar'], 'Preferences': ['bar=baz'], 'Platforms': [{'Architecture': 'x86_64', 'OS': 'linux'}], + 'MaxReplicas': 1, } assert task_template['LogDriver'] == { 'Name': 'logdriver', From f520b4c4ebfe01484563681e7d8411de44e5ee85 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 9 Feb 2021 18:55:25 +0100 Subject: [PATCH 128/211] Update GH action step Signed-off-by: aiordache --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d8d55ea0..1f119c90 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,9 +19,9 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install tox tox-gh-actions==2.2.0 - - name: Test with tox + pip install -r test-requirements.txt -r requirements.txt + - name: Test with pytest run: | docker logout rm -rf ~/.docker - tox + py.test -v --cov=docker tests/unit From ccab78840e78ff768d2559c8539660e83e679fe2 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 9 Feb 2021 19:55:35 +0100 Subject: [PATCH 129/211] Bump cffi to 1.14.4 Signed-off-by: aiordache --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0edd4e1e..43a688fd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ appdirs==1.4.3 asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 -cffi==1.10.0 +cffi==1.14.4 cryptography==3.2 enum34==1.1.6 idna==2.5 From 9556b890f9a1a7488adac06792aecf767a20f1d3 Mon Sep 17 00:00:00 2001 From: Stefan Scherer Date: Wed, 10 Feb 2021 16:57:30 +0100 Subject: [PATCH 130/211] Remove wrappedNode Signed-off-by: Stefan Scherer --- Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index e7c33215..d333f425 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -18,7 +18,7 @@ def buildImage = { name, buildargs, pyTag -> } def buildImages = { -> - wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { + node("amd64 && ubuntu-2004 && overlay2") { stage("build image") { checkout(scm) @@ -36,7 +36,7 @@ def buildImages = { -> def getDockerVersions = { -> def dockerVersions = ["19.03.12"] - wrappedNode(label: "amd64 && ubuntu-2004 && overlay2") { + node("amd64 && ubuntu-2004 && overlay2") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ ${imageNamePy3} \\ @@ -77,7 +77,7 @@ def runTests = { Map settings -> } { -> - wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { + node("amd64 && ubuntu-2004 && overlay2") { stage("test python=${pythonVersion} / docker=${dockerVersion}") { checkout(scm) def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" From 56d4b09700cdd9a7a99b5840601af946a63f3bfa Mon Sep 17 00:00:00 2001 From: Vlad Romanenko Date: Mon, 30 Nov 2020 14:18:54 +0000 Subject: [PATCH 131/211] Fix doc formatting Signed-off-by: Vlad Romanenko --- docker/api/client.py | 2 +- docker/api/daemon.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index fbf7ad45..2b67291a 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -497,7 +497,7 @@ class APIClient( Args: dockercfg_path (str): Use a custom path for the Docker config file (default ``$HOME/.docker/config.json`` if present, - otherwise``$HOME/.dockercfg``) + otherwise ``$HOME/.dockercfg``) Returns: None diff --git a/docker/api/daemon.py b/docker/api/daemon.py index f715a131..6b719268 100644 --- a/docker/api/daemon.py +++ b/docker/api/daemon.py @@ -109,7 +109,7 @@ class DaemonApiMixin(object): the Docker server. dockercfg_path (str): Use a custom path for the Docker config file (default ``$HOME/.docker/config.json`` if present, - otherwise``$HOME/.dockercfg``) + otherwise ``$HOME/.dockercfg``) Returns: (dict): The response from the login request From 94d7983ef0e0a421fd1a84320dea960e78605ab3 Mon Sep 17 00:00:00 2001 From: Stefan Scherer Date: Thu, 11 Feb 2021 10:24:57 +0100 Subject: [PATCH 132/211] Revert back to wrappedNode Signed-off-by: Stefan Scherer --- Jenkinsfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index d333f425..e7c33215 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -18,7 +18,7 @@ def buildImage = { name, buildargs, pyTag -> } def buildImages = { -> - node("amd64 && ubuntu-2004 && overlay2") { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { stage("build image") { checkout(scm) @@ -36,7 +36,7 @@ def buildImages = { -> def getDockerVersions = { -> def dockerVersions = ["19.03.12"] - node("amd64 && ubuntu-2004 && overlay2") { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ ${imageNamePy3} \\ @@ -77,7 +77,7 @@ def runTests = { Map settings -> } { -> - node("amd64 && ubuntu-2004 && overlay2") { + wrappedNode(label: "amd64 && ubuntu-2004 && overlay2", cleanWorkspace: true) { stage("test python=${pythonVersion} / docker=${dockerVersion}") { checkout(scm) def dindContainerName = "dpy-dind-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" From 6de6936f5d2d4decb2150f1816d4dd94b73649f1 Mon Sep 17 00:00:00 2001 From: Stefan Scherer Date: Thu, 11 Feb 2021 17:52:56 +0100 Subject: [PATCH 133/211] Use DOCKER_CONFIG to have creds in dind environment Signed-off-by: Stefan Scherer --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index e7c33215..b85598f8 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -95,7 +95,7 @@ def runTests = { Map settings -> -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ --network ${testNetwork} \\ --volumes-from ${dindContainerName} \\ - -v ~/.docker/config.json:/root/.docker/config.json \\ + -v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\ ${testImage} \\ py.test -v -rxs --cov=docker --ignore=tests/ssh tests/ """ @@ -111,7 +111,7 @@ def runTests = { Map settings -> -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ --network ${testNetwork} \\ --volumes-from ${dindContainerName} \\ - -v ~/.docker/config.json:/root/.docker/config.json \\ + -v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\ ${testImage} \\ py.test -v -rxs --cov=docker tests/ssh """ From 00da4dc0eae7d491c16384077f4d4da9a58836b1 Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 11 Feb 2021 19:58:35 +0100 Subject: [PATCH 134/211] Run unit tests in a container with no .docker/config mount Signed-off-by: aiordache --- Jenkinsfile | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index b85598f8..471072bf 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -85,6 +85,13 @@ def runTests = { Map settings -> def testNetwork = "dpy-testnet-\$BUILD_NUMBER-\$EXECUTOR_NUMBER-${pythonVersion}-${dockerVersion}" withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { try { + // unit tests + sh """docker run --rm \\ + -e 'DOCKER_TEST_API_VERSION=${apiVersion}' \\ + ${testImage} \\ + py.test -v -rxs --cov=docker tests/unit + """ + // integration tests sh """docker network create ${testNetwork}""" sh """docker run --rm -d --name ${dindContainerName} -v /tmp --privileged --network ${testNetwork} \\ ${imageDindSSH} dockerd -H tcp://0.0.0.0:2375 @@ -97,7 +104,7 @@ def runTests = { Map settings -> --volumes-from ${dindContainerName} \\ -v $DOCKER_CONFIG/config.json:/root/.docker/config.json \\ ${testImage} \\ - py.test -v -rxs --cov=docker --ignore=tests/ssh tests/ + py.test -v -rxs --cov=docker tests/integration """ sh """docker stop ${dindContainerName}""" // start DIND container with SSH From 2807fde6c991596ac12853995c931c925128ee61 Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 18 Feb 2021 12:56:46 +0100 Subject: [PATCH 135/211] Fix SSH port parsing and add regression tests Signed-off-by: aiordache --- docker/transport/sshconn.py | 8 ++++---- tests/unit/sshadapter_test.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 4 deletions(-) create mode 100644 tests/unit/sshadapter_test.py diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 7593b5bd..fbfdf416 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -30,10 +30,10 @@ class SSHSocket(socket.socket): self.host = host self.port = None self.user = None - if ':' in host: - self.host, self.port = host.split(':') + if ':' in self.host: + self.host, self.port = self.host.split(':') if '@' in self.host: - self.user, self.host = host.split('@') + self.user, self.host = self.host.split('@') self.proc = None @@ -167,7 +167,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): def __init__(self, base_url, timeout=60, pool_connections=constants.DEFAULT_NUM_POOLS, max_pool_size=constants.DEFAULT_MAX_POOL_SIZE, - shell_out=True): + shell_out=False): self.ssh_client = None if not shell_out: self._create_paramiko_client(base_url) diff --git a/tests/unit/sshadapter_test.py b/tests/unit/sshadapter_test.py new file mode 100644 index 00000000..ddee5920 --- /dev/null +++ b/tests/unit/sshadapter_test.py @@ -0,0 +1,32 @@ +import unittest +import docker +from docker.transport.sshconn import SSHSocket + +class SSHAdapterTest(unittest.TestCase): + def test_ssh_hostname_prefix_trim(self): + conn = docker.transport.SSHHTTPAdapter(base_url="ssh://user@hostname:1234", shell_out=True) + assert conn.ssh_host == "user@hostname:1234" + + def test_ssh_parse_url(self): + c = SSHSocket(host="user@hostname:1234") + assert c.host == "hostname" + assert c.port == "1234" + assert c.user == "user" + + def test_ssh_parse_hostname_only(self): + c = SSHSocket(host="hostname") + assert c.host == "hostname" + assert c.port == None + assert c.user == None + + def test_ssh_parse_user_and_hostname(self): + c = SSHSocket(host="user@hostname") + assert c.host == "hostname" + assert c.port == None + assert c.user == "user" + + def test_ssh_parse_hostname_and_port(self): + c = SSHSocket(host="hostname:22") + assert c.host == "hostname" + assert c.port == "22" + assert c.user == None \ No newline at end of file From e6689e0bb9af849bd9d1509fd3b2db52e0d6d776 Mon Sep 17 00:00:00 2001 From: aiordache Date: Thu, 18 Feb 2021 10:23:20 +0100 Subject: [PATCH 136/211] Post-release 4.4.2 changelog updates Signed-off-by: aiordache --- docs/change-log.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/change-log.md b/docs/change-log.md index 351e2c51..f666697c 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,18 @@ Change log ========== +4.4.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/71?closed=1) + +### Bugfixes +- Fix SSH connection bug where the hostname was incorrectly trimmed and the error was hidden +- Fix docs example + +### Miscellaneous +- Add Python3.8 and 3.9 in setup.py classifier list + 4.4.1 ----- @@ -10,7 +22,6 @@ Change log - Avoid setting unsuported parameter for subprocess.Popen on Windows - Replace use of deprecated "filter" argument on ""docker/api/image" - 4.4.0 ----- @@ -28,7 +39,6 @@ Change log - Improve exception messages for server and client errors - Bump cryptography from 2.3 to 3.2 - 4.3.1 ----- @@ -53,7 +63,6 @@ Change log - Update default API version to v1.39 - Update test engine version to 19.03.12 - 4.2.2 ----- @@ -109,7 +118,6 @@ Change log - Adjust `--platform` tests for changes in docker engine - Update credentials-helpers to v0.6.3 - 4.0.2 ----- @@ -123,7 +131,6 @@ Change log - Bumped version of websocket-client - 4.0.1 ----- From fe995ae79f9ae464b39f33e1ce474d33999e867f Mon Sep 17 00:00:00 2001 From: aiordache Date: Fri, 19 Feb 2021 10:07:52 +0100 Subject: [PATCH 137/211] Update changelog post-release 4.4.3 Signed-off-by: aiordache --- docs/change-log.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index f666697c..546e071f 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,17 @@ Change log ========== +4.4.3 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/72?closed=1) + +### Features +- Add support for docker.types.Placement.MaxReplicas + +### Bugfixes +- Fix SSH port parsing when shelling out to the ssh client + 4.4.2 ----- From 43ca2f8ff958cc29d66ef6badae8121b81ee3434 Mon Sep 17 00:00:00 2001 From: aiordache Date: Tue, 23 Feb 2021 19:04:03 +0100 Subject: [PATCH 138/211] Drop LD_LIBRARY_PATH env var for SSH shellout Signed-off-by: aiordache --- docker/transport/sshconn.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index fbfdf416..a761ef51 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -53,9 +53,15 @@ class SSHSocket(socket.socket): signal.signal(signal.SIGINT, signal.SIG_IGN) preexec_func = f + env = dict(os.environ) + + # drop LD_LIBRARY_PATH and SSL_CERT_FILE + env.pop('LD_LIBRARY_PATH', None) + env.pop('SSL_CERT_FILE', None) + self.proc = subprocess.Popen( ' '.join(args), - env=os.environ, + env=env, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, From 148f9161e113a59914df941707c919b7319266dc Mon Sep 17 00:00:00 2001 From: aiordache Date: Wed, 24 Feb 2021 18:20:24 +0100 Subject: [PATCH 139/211] Update changelog for 4.4.4 Signed-off-by: aiordache --- docs/change-log.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 546e071f..8db3fc58 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,14 @@ Change log ========== +4.4.4 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/73?closed=1) + +### Bugfixes +- Remove `LD_LIBRARY_PATH` and `SSL_CERT_FILE` environment variables when shelling out to the ssh client + 4.4.3 ----- From 7d316641a3da5ece9a29471390ef965d13b160b7 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Wed, 4 Dec 2019 19:44:27 -0300 Subject: [PATCH 140/211] Add limit parameter to image search endpoint Signed-off-by: Felipe Ruhland --- docker/api/image.py | 9 +++++++-- tests/unit/models_images_test.py | 5 +++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index 56c5448e..4658cdee 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -509,13 +509,14 @@ class ImageApiMixin(object): res = self._delete(self._url("/images/{0}", image), params=params) return self._result(res, True) - def search(self, term): + def search(self, term, limit=None): """ Search for images on Docker Hub. Similar to the ``docker search`` command. Args: term (str): A term to search for. + limit (int): The maximum number of results to return. Returns: (list of dicts): The response of the search. @@ -524,8 +525,12 @@ class ImageApiMixin(object): :py:class:`docker.errors.APIError` If the server returns an error. """ + params = {'term': term} + if limit is not None: + params['limit'] = limit + return self._result( - self._get(self._url("/images/search"), params={'term': term}), + self._get(self._url("/images/search"), params=params), True ) diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py index e3d070c0..f3ca0be4 100644 --- a/tests/unit/models_images_test.py +++ b/tests/unit/models_images_test.py @@ -112,6 +112,11 @@ class ImageCollectionTest(unittest.TestCase): client.images.search('test') client.api.search.assert_called_with('test') + def test_search_limit(self): + client = make_fake_client() + client.images.search('test', limit=5) + client.api.search.assert_called_with('test', limit=5) + class ImageTest(unittest.TestCase): def test_short_id(self): From d836bb8703959128e5ce274e3b5186a797760303 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Fri, 26 Feb 2021 21:59:35 +0100 Subject: [PATCH 141/211] Fix continuous integration status badged https://docs.github.com/en/actions/managing-workflow-runs/ adding-a-workflow-status-badge Signed-off-by: Felipe Ruhland --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8ce684b5..4fc31f7d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Docker SDK for Python -[![Build Status](https://travis-ci.org/docker/docker-py.svg?branch=master)](https://travis-ci.org/docker/docker-py) +[![Build Status](https://github.com/docker/docker-py/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/docker/docker-py/actions/workflows/ci.yml/) A Python library for the Docker Engine API. It lets you do anything the `docker` command does, but from within Python apps – run containers, manage containers, manage Swarms, etc. From c239d66d5d261f0f956925705c679fffb61bdb05 Mon Sep 17 00:00:00 2001 From: Hakan Ardo Date: Wed, 3 Mar 2021 09:27:21 +0100 Subject: [PATCH 142/211] Verify TLS keys loaded from docker contexts This maches the behaviour of the docker cli when using contexts. Signed-off-by: Hakan Ardo --- docker/context/context.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docker/context/context.py b/docker/context/context.py index 2413b2ec..b1cacf92 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -127,8 +127,12 @@ class Context: elif filename.startswith("key"): key = os.path.join(tls_dir, endpoint, filename) if all([ca_cert, cert, key]): + verify = None + if endpoint == "docker": + if not self.endpoints["docker"].get("SkipTLSVerify", False): + verify = True certs[endpoint] = TLSConfig( - client_cert=(cert, key), ca_cert=ca_cert) + client_cert=(cert, key), ca_cert=ca_cert, verify=verify) self.tls_cfg = certs self.tls_path = tls_dir From 563124163a5d092e954846121150d896ddca0836 Mon Sep 17 00:00:00 2001 From: Nicolas De Loof Date: Thu, 4 Mar 2021 11:37:07 +0100 Subject: [PATCH 143/211] relax PORT_SPEC regex so it accept and ignore square brackets Signed-off-by: Nicolas De Loof --- docker/utils/ports.py | 2 +- tests/unit/utils_test.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/docker/utils/ports.py b/docker/utils/ports.py index a50cc029..10b19d74 100644 --- a/docker/utils/ports.py +++ b/docker/utils/ports.py @@ -3,7 +3,7 @@ import re PORT_SPEC = re.compile( "^" # Match full string "(" # External part - r"((?P[a-fA-F\d.:]+):)?" # Address + r"(\[?(?P[a-fA-F\d.:]+)\]?:)?" # Address r"(?P[\d]*)(-(?P[\d]+))?:" # External range ")?" r"(?P[\d]+)(-(?P[\d]+))?" # Internal range diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index a53151cb..0d6ff22d 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -541,6 +541,12 @@ class PortsTest(unittest.TestCase): assert internal_port == ["2000"] assert external_port == [("2001:abcd:ef00::2", "1000")] + def test_split_port_with_ipv6_square_brackets_address(self): + internal_port, external_port = split_port( + "[2001:abcd:ef00::2]:1000:2000") + assert internal_port == ["2000"] + assert external_port == [("2001:abcd:ef00::2", "1000")] + def test_split_port_invalid(self): with pytest.raises(ValueError): split_port("0.0.0.0:1000:2000:tcp") From c8fba210a222d4f7fde90da8f48db1e7faa637ec Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Wed, 10 Mar 2021 20:43:37 -0300 Subject: [PATCH 144/211] Remove support to pre python 3.6 Signed-off-by: Ulysses Souza --- .github/workflows/ci.yml | 6 +-- .readthedocs.yml | 2 +- Dockerfile | 6 +-- Dockerfile-py3 | 15 -------- Jenkinsfile | 5 +-- Makefile | 50 ++++--------------------- docker/api/client.py | 18 ++++----- docker/context/context.py | 7 ++-- docker/transport/sshconn.py | 4 +- docker/utils/utils.py | 49 ++++++++++-------------- requirements.txt | 1 - setup.py | 12 +----- test-requirements.txt | 2 +- tests/Dockerfile-dind-certs | 2 +- tests/integration/api_container_test.py | 48 +++++++----------------- tests/integration/api_image_test.py | 11 +++--- tests/integration/api_service_test.py | 3 +- tests/unit/sshadapter_test.py | 27 ++++++++----- tox.ini | 2 +- 19 files changed, 88 insertions(+), 182 deletions(-) delete mode 100644 Dockerfile-py3 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1f119c90..b6925082 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,7 +8,7 @@ jobs: strategy: max-parallel: 1 matrix: - python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9] + python-version: [3.6, 3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 @@ -18,8 +18,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip - pip install -r test-requirements.txt -r requirements.txt + python3 -m pip install --upgrade pip + pip3 install -r test-requirements.txt -r requirements.txt - name: Test with pytest run: | docker logout diff --git a/.readthedocs.yml b/.readthedocs.yml index 7679f80a..32113fed 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,7 +4,7 @@ sphinx: configuration: docs/conf.py python: - version: 3.5 + version: 3.6 install: - requirements: docs-requirements.txt - requirements: requirements.txt diff --git a/Dockerfile b/Dockerfile index 7309a83e..22732dec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,11 +1,7 @@ -ARG PYTHON_VERSION=2.7 +ARG PYTHON_VERSION=3.7 FROM python:${PYTHON_VERSION} -# Add SSH keys and set permissions -COPY tests/ssh-keys /root/.ssh -RUN chmod -R 600 /root/.ssh - RUN mkdir /src WORKDIR /src diff --git a/Dockerfile-py3 b/Dockerfile-py3 deleted file mode 100644 index 22732dec..00000000 --- a/Dockerfile-py3 +++ /dev/null @@ -1,15 +0,0 @@ -ARG PYTHON_VERSION=3.7 - -FROM python:${PYTHON_VERSION} - -RUN mkdir /src -WORKDIR /src - -COPY requirements.txt /src/requirements.txt -RUN pip install -r requirements.txt - -COPY test-requirements.txt /src/test-requirements.txt -RUN pip install -r test-requirements.txt - -COPY . /src -RUN pip install . diff --git a/Jenkinsfile b/Jenkinsfile index 471072bf..f524ae7a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,7 +1,6 @@ #!groovy def imageNameBase = "dockerpinata/docker-py" -def imageNamePy2 def imageNamePy3 def imageDindSSH def images = [:] @@ -22,12 +21,10 @@ def buildImages = { -> stage("build image") { checkout(scm) - imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}" imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}" withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "") - buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") } } @@ -73,7 +70,7 @@ def runTests = { Map settings -> throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`") } if (!pythonVersion) { - throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py2.7')`") + throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.7')`") } { -> diff --git a/Makefile b/Makefile index 70d7083e..60d99842 100644 --- a/Makefile +++ b/Makefile @@ -6,13 +6,9 @@ all: test .PHONY: clean clean: - -docker rm -f dpy-dind-py2 dpy-dind-py3 dpy-dind-certs dpy-dind-ssl + -docker rm -f dpy-dind-py3 dpy-dind-certs dpy-dind-ssl find -name "__pycache__" | xargs rm -rf -.PHONY: build -build: - docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 --build-arg APT_MIRROR . - .PHONY: build-dind-ssh build-dind-ssh: docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR . @@ -30,20 +26,12 @@ build-dind-certs: docker build -t dpy-dind-certs -f tests/Dockerfile-dind-certs . .PHONY: test -test: flake8 unit-test unit-test-py3 integration-dind integration-dind-ssl - -.PHONY: unit-test -unit-test: build - docker run -t --rm docker-sdk-python py.test tests/unit +test: flake8 unit-test-py3 integration-dind integration-dind-ssl .PHONY: unit-test-py3 unit-test-py3: build-py3 docker run -t --rm docker-sdk-python3 py.test tests/unit -.PHONY: integration-test -integration-test: build - docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python py.test -v tests/integration/${file} - .PHONY: integration-test-py3 integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} @@ -53,16 +41,7 @@ setup-network: docker network inspect dpy-tests || docker network create dpy-tests .PHONY: integration-dind -integration-dind: integration-dind-py2 integration-dind-py3 - -.PHONY: integration-dind-py2 -integration-dind-py2: build setup-network - docker rm -vf dpy-dind-py2 || : - docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ - docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py2:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python py.test tests/integration/${file} - docker rm -vf dpy-dind-py2 +integration-dind: integration-dind-py3 .PHONY: integration-dind-py3 integration-dind-py3: build-py3 setup-network @@ -73,16 +52,6 @@ integration-dind-py3: build-py3 setup-network --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} docker rm -vf dpy-dind-py3 -.PHONY: integration-ssh-py2 -integration-ssh-py2: build-dind-ssh build setup-network - docker rm -vf dpy-dind-py2 || : - docker run -d --network dpy-tests --name dpy-dind-py2 --privileged\ - docker-dind-ssh dockerd --experimental - # start SSH daemon - docker exec dpy-dind-py2 sh -c "/usr/sbin/sshd" - docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py2" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python py.test tests/ssh/${file} - docker rm -vf dpy-dind-py2 .PHONY: integration-ssh-py3 integration-ssh-py3: build-dind-ssh build-py3 setup-network @@ -97,7 +66,7 @@ integration-ssh-py3: build-dind-ssh build-py3 setup-network .PHONY: integration-dind-ssl -integration-dind-ssl: build-dind-certs build build-py3 +integration-dind-ssl: build-dind-certs build-py3 docker rm -vf dpy-dind-certs dpy-dind-ssl || : docker run -d --name dpy-dind-certs dpy-dind-certs docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\ @@ -106,22 +75,19 @@ integration-dind-ssl: build-dind-certs build build-py3 docker:${TEST_ENGINE_VERSION}-dind\ dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\ --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ - --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python py.test tests/integration/${file} docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} docker rm -vf dpy-dind-ssl dpy-dind-certs .PHONY: flake8 -flake8: build - docker run -t --rm docker-sdk-python flake8 docker tests +flake8: build-py3 + docker run -t --rm docker-sdk-python3 flake8 docker tests .PHONY: docs docs: build-docs docker run --rm -t -v `pwd`:/src docker-sdk-python-docs sphinx-build docs docs/_build .PHONY: shell -shell: build - docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python python +shell: build-py3 + docker run -it -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 python diff --git a/docker/api/client.py b/docker/api/client.py index 2b67291a..ee9ad9c3 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -1,10 +1,10 @@ import json import struct +import urllib from functools import partial import requests import requests.exceptions -import six import websocket from .. import auth @@ -192,12 +192,12 @@ class APIClient( # version detection needs to be after unix adapter mounting if version is None or (isinstance( version, - six.string_types + str ) and version.lower() == 'auto'): self._version = self._retrieve_server_version() else: self._version = version - if not isinstance(self._version, six.string_types): + if not isinstance(self._version, str): raise DockerException( 'Version parameter must be a string or None. Found {0}'.format( type(version).__name__ @@ -246,13 +246,13 @@ class APIClient( def _url(self, pathfmt, *args, **kwargs): for arg in args: - if not isinstance(arg, six.string_types): + if not isinstance(arg, str): raise ValueError( 'Expected a string but found {0} ({1}) ' 'instead'.format(arg, type(arg)) ) - quote_f = partial(six.moves.urllib.parse.quote, safe="/:") + quote_f = partial(urllib.parse.quote, safe="/:") args = map(quote_f, args) if kwargs.get('versioned_api', True): @@ -284,7 +284,7 @@ class APIClient( # so we do this disgusting thing here. data2 = {} if data is not None and isinstance(data, dict): - for k, v in six.iteritems(data): + for k, v in iter(data.items()): if v is not None: data2[k] = v elif data is not None: @@ -320,12 +320,10 @@ class APIClient( sock = response.raw._fp.fp.raw.sock elif self.base_url.startswith('http+docker://ssh'): sock = response.raw._fp.fp.channel - elif six.PY3: + else: sock = response.raw._fp.fp.raw if self.base_url.startswith("https://"): sock = sock._sock - else: - sock = response.raw._fp.fp._sock try: # Keep a reference to the response to stop it being garbage # collected. If the response is garbage collected, it will @@ -465,7 +463,7 @@ class APIClient( self._result(res, binary=True) self._raise_for_status(res) - sep = six.binary_type() + sep = b'' if stream: return self._multiplexed_response_stream_helper(res) else: diff --git a/docker/context/context.py b/docker/context/context.py index b1cacf92..f4aff6b0 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -11,6 +11,7 @@ from docker.context.config import get_context_host class Context: """A context.""" + def __init__(self, name, orchestrator=None, host=None, endpoints=None, tls=False): if not name: @@ -128,9 +129,9 @@ class Context: key = os.path.join(tls_dir, endpoint, filename) if all([ca_cert, cert, key]): verify = None - if endpoint == "docker": - if not self.endpoints["docker"].get("SkipTLSVerify", False): - verify = True + if endpoint == "docker" and not self.endpoints["docker"].get( + "SkipTLSVerify", False): + verify = True certs[endpoint] = TLSConfig( client_cert=(cert, key), ca_cert=ca_cert, verify=verify) self.tls_cfg = certs diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index a761ef51..fb5c6bbe 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -53,7 +53,7 @@ class SSHSocket(socket.socket): signal.signal(signal.SIGINT, signal.SIG_IGN) preexec_func = f - env = dict(os.environ) + env = dict(os.environ) # drop LD_LIBRARY_PATH and SSL_CERT_FILE env.pop('LD_LIBRARY_PATH', None) @@ -65,7 +65,7 @@ class SSHSocket(socket.socket): shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, - preexec_fn=preexec_func) + preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func) def _write(self, data): if not self.proc or self.proc.stdin.closed: diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 1b195e27..f703cbd3 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -7,8 +7,6 @@ import string from datetime import datetime from distutils.version import StrictVersion -import six - from .. import errors from .. import tls from ..constants import DEFAULT_HTTP_HOST @@ -16,11 +14,7 @@ from ..constants import DEFAULT_UNIX_SOCKET from ..constants import DEFAULT_NPIPE from ..constants import BYTE_UNITS -if six.PY2: - from urllib import splitnport - from urlparse import urlparse -else: - from urllib.parse import splitnport, urlparse +from urllib.parse import splitnport, urlparse def create_ipam_pool(*args, **kwargs): @@ -39,8 +33,7 @@ def create_ipam_config(*args, **kwargs): def decode_json_header(header): data = base64.b64decode(header) - if six.PY3: - data = data.decode('utf-8') + data = data.decode('utf-8') return json.loads(data) @@ -80,7 +73,7 @@ def _convert_port_binding(binding): if len(binding) == 2: result['HostPort'] = binding[1] result['HostIp'] = binding[0] - elif isinstance(binding[0], six.string_types): + elif isinstance(binding[0], str): result['HostIp'] = binding[0] else: result['HostPort'] = binding[0] @@ -104,7 +97,7 @@ def _convert_port_binding(binding): def convert_port_bindings(port_bindings): result = {} - for k, v in six.iteritems(port_bindings): + for k, v in iter(port_bindings.items()): key = str(k) if '/' not in key: key += '/tcp' @@ -121,7 +114,7 @@ def convert_volume_binds(binds): result = [] for k, v in binds.items(): - if isinstance(k, six.binary_type): + if isinstance(k, bytes): k = k.decode('utf-8') if isinstance(v, dict): @@ -132,7 +125,7 @@ def convert_volume_binds(binds): ) bind = v['bind'] - if isinstance(bind, six.binary_type): + if isinstance(bind, bytes): bind = bind.decode('utf-8') if 'ro' in v: @@ -143,13 +136,13 @@ def convert_volume_binds(binds): mode = 'rw' result.append( - six.text_type('{0}:{1}:{2}').format(k, bind, mode) + str('{0}:{1}:{2}').format(k, bind, mode) ) else: - if isinstance(v, six.binary_type): + if isinstance(v, bytes): v = v.decode('utf-8') result.append( - six.text_type('{0}:{1}:rw').format(k, v) + str('{0}:{1}:rw').format(k, v) ) return result @@ -166,7 +159,7 @@ def convert_tmpfs_mounts(tmpfs): result = {} for mount in tmpfs: - if isinstance(mount, six.string_types): + if isinstance(mount, str): if ":" in mount: name, options = mount.split(":", 1) else: @@ -191,7 +184,7 @@ def convert_service_networks(networks): result = [] for n in networks: - if isinstance(n, six.string_types): + if isinstance(n, str): n = {'Target': n} result.append(n) return result @@ -302,7 +295,7 @@ def parse_devices(devices): if isinstance(device, dict): device_list.append(device) continue - if not isinstance(device, six.string_types): + if not isinstance(device, str): raise errors.DockerException( 'Invalid device type {0}'.format(type(device)) ) @@ -372,13 +365,13 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None): def convert_filters(filters): result = {} - for k, v in six.iteritems(filters): + for k, v in iter(filters.items()): if isinstance(v, bool): v = 'true' if v else 'false' if not isinstance(v, list): v = [v, ] result[k] = [ - str(item) if not isinstance(item, six.string_types) else item + str(item) if not isinstance(item, str) else item for item in v ] return json.dumps(result) @@ -391,7 +384,7 @@ def datetime_to_timestamp(dt): def parse_bytes(s): - if isinstance(s, six.integer_types + (float,)): + if isinstance(s, (int, float,)): return s if len(s) == 0: return 0 @@ -433,7 +426,7 @@ def parse_bytes(s): def normalize_links(links): if isinstance(links, dict): - links = six.iteritems(links) + links = iter(links.items()) return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)] @@ -468,8 +461,6 @@ def parse_env_file(env_file): def split_command(command): - if six.PY2 and not isinstance(command, six.binary_type): - command = command.encode('utf-8') return shlex.split(command) @@ -477,22 +468,22 @@ def format_environment(environment): def format_env(key, value): if value is None: return key - if isinstance(value, six.binary_type): + if isinstance(value, bytes): value = value.decode('utf-8') return u'{key}={value}'.format(key=key, value=value) - return [format_env(*var) for var in six.iteritems(environment)] + return [format_env(*var) for var in iter(environment.items())] def format_extra_hosts(extra_hosts, task=False): # Use format dictated by Swarm API if container is part of a task if task: return [ - '{} {}'.format(v, k) for k, v in sorted(six.iteritems(extra_hosts)) + '{} {}'.format(v, k) for k, v in sorted(iter(extra_hosts.items())) ] return [ - '{}:{}'.format(k, v) for k, v in sorted(six.iteritems(extra_hosts)) + '{}:{}'.format(k, v) for k, v in sorted(iter(extra_hosts.items())) ] diff --git a/requirements.txt b/requirements.txt index 43a688fd..f86a7bd7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,6 +13,5 @@ pyOpenSSL==18.0.0 pyparsing==2.2.0 pywin32==227; sys_platform == 'win32' requests==2.20.0 -six==1.10.0 urllib3==1.24.3 websocket-client==0.56.0 diff --git a/setup.py b/setup.py index 330ab3e3..b86016ef 100644 --- a/setup.py +++ b/setup.py @@ -11,18 +11,11 @@ ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ - 'six >= 1.4.0', 'websocket-client >= 0.32.0', 'requests >= 2.14.2, != 2.18.0', ] extras_require = { - ':python_version < "3.5"': 'backports.ssl_match_hostname >= 3.5', - # While not imported explicitly, the ipaddress module is required for - # ssl_match_hostname to verify hosts match with certificates via - # ServerAltname: https://pypi.python.org/pypi/backports.ssl_match_hostname - ':python_version < "3.3"': 'ipaddress >= 1.0.16', - # win32 APIs if on Windows (required for npipe support) ':sys_platform == "win32"': 'pywin32==227', @@ -69,7 +62,7 @@ setup( install_requires=requirements, tests_require=test_requirements, extras_require=extras_require, - python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*', + python_requires='>=3.6', zip_safe=False, test_suite='tests', classifiers=[ @@ -78,10 +71,7 @@ setup( 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/test-requirements.txt b/test-requirements.txt index 24078e27..40161bb8 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,4 +1,4 @@ -setuptools==44.0.0 # last version with python 2.7 support +setuptools==54.1.1 coverage==4.5.2 flake8==3.6.0 mock==1.0.1 diff --git a/tests/Dockerfile-dind-certs b/tests/Dockerfile-dind-certs index 2ab87ef7..8829ff79 100644 --- a/tests/Dockerfile-dind-certs +++ b/tests/Dockerfile-dind-certs @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=2.7 +ARG PYTHON_VERSION=3.6 FROM python:${PYTHON_VERSION} RUN mkdir /tmp/certs diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 65e611b2..3087045b 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -7,7 +7,6 @@ from datetime import datetime import pytest import requests -import six import docker from .. import helpers @@ -35,7 +34,7 @@ class ListContainersTest(BaseAPIIntegrationTest): assert len(retrieved) == 1 retrieved = retrieved[0] assert 'Command' in retrieved - assert retrieved['Command'] == six.text_type('true') + assert retrieved['Command'] == str('true') assert 'Image' in retrieved assert re.search(r'alpine:.*', retrieved['Image']) assert 'Status' in retrieved @@ -104,9 +103,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): self.client.start(container3_id) assert self.client.wait(container3_id)['StatusCode'] == 0 - logs = self.client.logs(container3_id) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container3_id).decode('utf-8') assert '{0}_NAME='.format(link_env_prefix1) in logs assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs assert '{0}_NAME='.format(link_env_prefix2) in logs @@ -227,9 +224,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): self.client.start(container) self.client.wait(container) - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container).decode('utf-8') groups = logs.strip().split(' ') assert '1000' in groups assert '1001' in groups @@ -244,9 +239,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): self.client.start(container) self.client.wait(container) - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container).decode('utf-8') groups = logs.strip().split(' ') assert '1000' in groups @@ -515,10 +508,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): TEST_IMG, ['ls', self.mount_dest], ) - logs = self.client.logs(container) - - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container).decode('utf-8') assert self.filename in logs inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, True) @@ -534,10 +524,8 @@ class VolumeBindTest(BaseAPIIntegrationTest): TEST_IMG, ['ls', self.mount_dest], ) - logs = self.client.logs(container) + logs = self.client.logs(container).decode('utf-8') - if six.PY3: - logs = logs.decode('utf-8') assert self.filename in logs inspect_data = self.client.inspect_container(container) @@ -554,9 +542,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): host_config=host_config ) assert container - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container).decode('utf-8') assert self.filename in logs inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, True) @@ -573,9 +559,7 @@ class VolumeBindTest(BaseAPIIntegrationTest): host_config=host_config ) assert container - logs = self.client.logs(container) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(container).decode('utf-8') assert self.filename in logs inspect_data = self.client.inspect_container(container) self.check_container_data(inspect_data, False) @@ -645,9 +629,8 @@ class ArchiveTest(BaseAPIIntegrationTest): for d in strm: destination.write(d) destination.seek(0) - retrieved_data = helpers.untar_file(destination, 'data.txt') - if six.PY3: - retrieved_data = retrieved_data.decode('utf-8') + retrieved_data = helpers.untar_file(destination, 'data.txt')\ + .decode('utf-8') assert data == retrieved_data.strip() def test_get_file_stat_from_container(self): @@ -683,9 +666,6 @@ class ArchiveTest(BaseAPIIntegrationTest): self.client.start(ctnr) self.client.wait(ctnr) logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') - data = data.decode('utf-8') assert logs.strip() == data def test_copy_directory_to_container(self): @@ -700,9 +680,7 @@ class ArchiveTest(BaseAPIIntegrationTest): self.client.put_archive(ctnr, '/vol1', test_tar) self.client.start(ctnr) self.client.wait(ctnr) - logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = self.client.logs(ctnr).decode('utf-8') results = logs.strip().split() assert 'a.py' in results assert 'b.py' in results @@ -861,7 +839,7 @@ Line2''' id = container['Id'] self.tmp_containers.append(id) self.client.start(id) - logs = six.binary_type() + logs = b'' for chunk in self.client.logs(id, stream=True, follow=True): logs += chunk @@ -881,7 +859,7 @@ Line2''' id = container['Id'] self.tmp_containers.append(id) self.client.start(id) - logs = six.binary_type() + logs = b'' generator = self.client.logs(id, stream=True, follow=True) threading.Timer(1, generator.close).start() diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index 37e26a3f..d5f89893 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -7,9 +7,8 @@ import tempfile import threading import pytest -import six -from six.moves import BaseHTTPServer -from six.moves import socketserver +from http.server import SimpleHTTPRequestHandler +import socketserver import docker @@ -33,7 +32,7 @@ class ListImagesTest(BaseAPIIntegrationTest): def test_images_quiet(self): res1 = self.client.images(quiet=True) - assert type(res1[0]) == six.text_type + assert type(res1[0]) == str class PullImageTest(BaseAPIIntegrationTest): @@ -44,7 +43,7 @@ class PullImageTest(BaseAPIIntegrationTest): pass res = self.client.pull('hello-world') self.tmp_imgs.append('hello-world') - assert type(res) == six.text_type + assert type(res) == str assert len(self.client.images('hello-world')) >= 1 img_info = self.client.inspect_image('hello-world') assert 'Id' in img_info @@ -273,7 +272,7 @@ class ImportImageTest(BaseAPIIntegrationTest): def temporary_http_file_server(self, stream): '''Serve data from an IO stream over HTTP.''' - class Handler(BaseHTTPServer.BaseHTTPRequestHandler): + class Handler(SimpleHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header('Content-Type', 'application/x-tar') diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 7e5336e2..1bee46e5 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -5,7 +5,6 @@ import time import docker import pytest -import six from ..helpers import ( force_leave_swarm, requires_api_version, requires_experimental @@ -150,7 +149,7 @@ class ServiceTest(BaseAPIIntegrationTest): else: break - if six.PY3: + if log_line is not None: log_line = log_line.decode('utf-8') assert 'hello\n' in log_line diff --git a/tests/unit/sshadapter_test.py b/tests/unit/sshadapter_test.py index ddee5920..874239ac 100644 --- a/tests/unit/sshadapter_test.py +++ b/tests/unit/sshadapter_test.py @@ -2,31 +2,38 @@ import unittest import docker from docker.transport.sshconn import SSHSocket + class SSHAdapterTest(unittest.TestCase): - def test_ssh_hostname_prefix_trim(self): - conn = docker.transport.SSHHTTPAdapter(base_url="ssh://user@hostname:1234", shell_out=True) + @staticmethod + def test_ssh_hostname_prefix_trim(): + conn = docker.transport.SSHHTTPAdapter( + base_url="ssh://user@hostname:1234", shell_out=True) assert conn.ssh_host == "user@hostname:1234" - def test_ssh_parse_url(self): + @staticmethod + def test_ssh_parse_url(): c = SSHSocket(host="user@hostname:1234") assert c.host == "hostname" assert c.port == "1234" assert c.user == "user" - def test_ssh_parse_hostname_only(self): + @staticmethod + def test_ssh_parse_hostname_only(): c = SSHSocket(host="hostname") assert c.host == "hostname" - assert c.port == None - assert c.user == None + assert c.port is None + assert c.user is None - def test_ssh_parse_user_and_hostname(self): + @staticmethod + def test_ssh_parse_user_and_hostname(): c = SSHSocket(host="user@hostname") assert c.host == "hostname" - assert c.port == None + assert c.port is None assert c.user == "user" - def test_ssh_parse_hostname_and_port(self): + @staticmethod + def test_ssh_parse_hostname_and_port(): c = SSHSocket(host="hostname:22") assert c.host == "hostname" assert c.port == "22" - assert c.user == None \ No newline at end of file + assert c.user is None diff --git a/tox.ini b/tox.ini index df797f41..d35d41ae 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py35, py36, py37, flake8 +envlist = py36, py37, flake8 skipsdist=True [testenv] From d4310b2db0e5fe5cb987a454f7e97c6e388a470e Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Wed, 24 Mar 2021 17:59:47 +0100 Subject: [PATCH 145/211] Fix `KeyError` when creating a new secret How to reproduce the issue: ```py >>> import docker >>> cli = docker.from_env() >>> cli.secrets.create(name="any_name", data="1") Traceback (most recent call last): File "", line 1, in File "/home/docker-py/docker/models/secrets.py", line 10, in __repr__ return "<%s: '%s'>" % (self.__class__.__name__, self.name) File "/home/docker-py/docker/models/secrets.py", line 14, in name return self.attrs['Spec']['Name'] KeyError: 'Spec' ``` The exception raises because create secrets API `/secrets/create` only return the `id` attribute: https://docs.docker.com/engine/api/v1.41/#operation/SecretCreate The secret model is created using just the `id` attribute and fails when looking for Spec.Name attribute. ```py def __repr__(self): return "<%s: '%s'>" % (self.__class__.__name__, self.name) ``` ```py @property def name(self): return self.attrs['Spec']['Name'] ``` I came up with a ugly solution but will prevent the problem to happen again: ```py def create(self, **kwargs): obj = self.client.api.create_secret(**kwargs) + obj.setdefault("Spec", {})["Name"] = kwargs.get("name") return self.prepare_model(obj) ``` After the API call, I added the name attribute to the right place to be used on the property name. ```py >>> import docker >>> cli = docker.from_env() >>> cli.secrets.create(name="any_name", data="1") ``` It isn't the most elegant solution, but it will do the trick. I had a previous PR #2517 when I propose using the `id` attribute instead of `name` on the `__repr__` method, but I think this one will be better. That fixes #2025 Signed-off-by: Felipe Ruhland --- docker/models/secrets.py | 1 + tests/unit/fake_api.py | 10 ++++++++++ tests/unit/fake_api_client.py | 1 + tests/unit/models_secrets_test.py | 11 +++++++++++ 4 files changed, 23 insertions(+) create mode 100644 tests/unit/models_secrets_test.py diff --git a/docker/models/secrets.py b/docker/models/secrets.py index ca11edeb..e2ee88af 100644 --- a/docker/models/secrets.py +++ b/docker/models/secrets.py @@ -30,6 +30,7 @@ class SecretCollection(Collection): def create(self, **kwargs): obj = self.client.api.create_secret(**kwargs) + obj.setdefault("Spec", {})["Name"] = kwargs.get("name") return self.prepare_model(obj) create.__doc__ = APIClient.create_secret.__doc__ diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index 27e463d2..4fd4d113 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -17,6 +17,8 @@ FAKE_URL = 'myurl' FAKE_PATH = '/path' FAKE_VOLUME_NAME = 'perfectcherryblossom' FAKE_NODE_ID = '24ifsmvkjbyhk' +FAKE_SECRET_ID = 'epdyrw4tsi03xy3deu8g8ly6o' +FAKE_SECRET_NAME = 'super_secret' # Each method is prefixed with HTTP method (get, post...) # for clarity and readability @@ -512,6 +514,12 @@ def post_fake_network_disconnect(): return 200, None +def post_fake_secret(): + status_code = 200 + response = {'ID': FAKE_SECRET_ID} + return status_code, response + + # Maps real api url to fake response callback prefix = 'http+docker://localhost' if constants.IS_WINDOWS_PLATFORM: @@ -643,4 +651,6 @@ fake_responses = { CURRENT_VERSION, prefix, FAKE_NETWORK_ID ), 'POST'): post_fake_network_disconnect, + '{1}/{0}/secrets/create'.format(CURRENT_VERSION, prefix): + post_fake_secret, } diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index e85001db..5825b6ec 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -40,6 +40,7 @@ def make_fake_api_client(overrides=None): fake_api.post_fake_create_container()[1], 'create_host_config.side_effect': api_client.create_host_config, 'create_network.return_value': fake_api.post_fake_network()[1], + 'create_secret.return_value': fake_api.post_fake_secret()[1], 'exec_create.return_value': fake_api.post_fake_exec_create()[1], 'exec_start.return_value': fake_api.post_fake_exec_start()[1], 'images.return_value': fake_api.get_fake_images()[1], diff --git a/tests/unit/models_secrets_test.py b/tests/unit/models_secrets_test.py new file mode 100644 index 00000000..4ccf4c63 --- /dev/null +++ b/tests/unit/models_secrets_test.py @@ -0,0 +1,11 @@ +import unittest + +from .fake_api_client import make_fake_client +from .fake_api import FAKE_SECRET_NAME + + +class CreateServiceTest(unittest.TestCase): + def test_secrets_repr(self): + client = make_fake_client() + secret = client.secrets.create(name="super_secret", data="secret") + assert secret.__repr__() == "".format(FAKE_SECRET_NAME) From 2403774e76b279ecdd7238d35f04def6fb1ca8b8 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Fri, 2 Apr 2021 02:30:22 +0200 Subject: [PATCH 146/211] Upgrade cryptography library to version 3.4.7 Dependabot opened a pull request 93bcc0497d8302aa2d78bd7ef756fc2ff3fd0912 to upgrade cryptography from 2.3 to 3.2. However, only `requirements.txt` was updated. The extra requirements were kept outdated. This commit was made to update the library to the last version. Fix #2791 Signed-off-by: Felipe Ruhland --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index f86a7bd7..1d0be30a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ appdirs==1.4.3 asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.14.4 -cryptography==3.2 +cryptography==3.4.7 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 diff --git a/setup.py b/setup.py index b86016ef..b692eabd 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ extras_require = { # https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of # installing the extra dependencies, install the following instead: # 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2' - 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=1.3.4', 'idna>=2.0.0'], + 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'], # Only required when connecting using the ssh:// protocol 'ssh': ['paramiko>=2.4.2'], From a34dd8b1a987bfd98882197c909a24a963b68a8f Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Mon, 5 Apr 2021 14:57:52 +0200 Subject: [PATCH 147/211] Fix images low-level documentation examples I realize that the documentation of low-level `images` was outdated when answering issue #2798 The issue can reproduce it with a simple test: ```py In [1]: import docker In [2]: client = docker.from_env() In [3]: client.pull --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) in ----> 1 client.pull ~/docker-py/docker/client.py in __getattr__(self, name) 219 "object APIClient. See the low-level API section of the " 220 "documentation for more details.") --> 221 raise AttributeError(' '.join(s)) 222 223 AttributeError: 'DockerClient' object has no attribute 'pull' In Docker SDK for Python 2.0, this method is now on the object APIClient. See the low-level API section of the documentation for more details. In [4]: client.push --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) in ----> 1 client.push ~/docker-py/docker/client.py in __getattr__(self, name) 219 "object APIClient. See the low-level API section of the " 220 "documentation for more details.") --> 221 raise AttributeError(' '.join(s)) 222 223 AttributeError: 'DockerClient' object has no attribute 'push' In Docker SDK for Python 2.0, this method is now on the object APIClient. See the low-level API section of the documentation for more details. In [5]: client.tag --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) in ----> 1 client.tag ~/docker-py/docker/client.py in __getattr__(self, name) 219 "object APIClient. See the low-level API section of the " 220 "documentation for more details.") --> 221 raise AttributeError(' '.join(s)) 222 223 AttributeError: 'DockerClient' object has no attribute 'tag' In Docker SDK for Python 2.0, this method is now on the object APIClient. See the low-level API section of the documentation for more details. In [6]: client.get_image --------------------------------------------------------------------------- AttributeError Traceback (most recent call last) in ----> 1 client.get_image ~/docker-py/docker/client.py in __getattr__(self, name) 219 "object APIClient. See the low-level API section of the " 220 "documentation for more details.") --> 221 raise AttributeError(' '.join(s)) 222 223 AttributeError: 'DockerClient' object has no attribute 'get_image' In Docker SDK for Python 2.0, this method is now on the object APIClient. See the low-level API section of the documentation for more details. In [7]: client.api.get_image Out[7]: > In [8]: client.api.tag Out[8]: > In [9]: client.api.pull Out[9]: > In [10]: client.api.push Out[10]: > ``` Signed-off-by: Felipe Ruhland --- docker/api/image.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index 56c5448e..db806c49 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -31,7 +31,7 @@ class ImageApiMixin(object): Example: - >>> image = cli.get_image("busybox:latest") + >>> image = client.api.get_image("busybox:latest") >>> f = open('/tmp/busybox-latest.tar', 'wb') >>> for chunk in image: >>> f.write(chunk) @@ -379,7 +379,7 @@ class ImageApiMixin(object): Example: - >>> for line in cli.pull('busybox', stream=True, decode=True): + >>> for line in client.api.pull('busybox', stream=True, decode=True): ... print(json.dumps(line, indent=4)) { "status": "Pulling image (latest) from busybox", @@ -458,7 +458,7 @@ class ImageApiMixin(object): If the server returns an error. Example: - >>> for line in cli.push('yourname/app', stream=True, decode=True): + >>> for line in client.api.push('yourname/app', stream=True, decode=True): ... print(line) {'status': 'Pushing repository yourname/app (1 tags)'} {'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'} @@ -549,7 +549,7 @@ class ImageApiMixin(object): Example: - >>> client.tag('ubuntu', 'localhost:5000/ubuntu', 'latest', + >>> client.api.tag('ubuntu', 'localhost:5000/ubuntu', 'latest', force=True) """ params = { From ac9ae1f249c7635b3fd12b12d293f1c9848aaef5 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Mon, 5 Apr 2021 15:31:43 +0200 Subject: [PATCH 148/211] Fix containers low-level documentation examples I realize that low-level documentation has outdated examples, so I created issue #2800 to fix that Signed-off-by: Felipe Ruhland --- docker/api/container.py | 50 ++++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 754b5dc6..369eba95 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -244,9 +244,9 @@ class ContainerApiMixin(object): .. code-block:: python - container_id = cli.create_container( + container_id = client.api.create_container( 'busybox', 'ls', ports=[1111, 2222], - host_config=cli.create_host_config(port_bindings={ + host_config=client.api.create_host_config(port_bindings={ 1111: 4567, 2222: None }) @@ -258,22 +258,22 @@ class ContainerApiMixin(object): .. code-block:: python - cli.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)}) + client.api.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)}) Or without host port assignment: .. code-block:: python - cli.create_host_config(port_bindings={1111: ('127.0.0.1',)}) + client.api.create_host_config(port_bindings={1111: ('127.0.0.1',)}) If you wish to use UDP instead of TCP (default), you need to declare ports as such in both the config and host config: .. code-block:: python - container_id = cli.create_container( + container_id = client.api.create_container( 'busybox', 'ls', ports=[(1111, 'udp'), 2222], - host_config=cli.create_host_config(port_bindings={ + host_config=client.api.create_host_config(port_bindings={ '1111/udp': 4567, 2222: None }) ) @@ -283,7 +283,7 @@ class ContainerApiMixin(object): .. code-block:: python - cli.create_host_config(port_bindings={ + client.api.create_host_config(port_bindings={ 1111: [1234, 4567] }) @@ -291,7 +291,7 @@ class ContainerApiMixin(object): .. code-block:: python - cli.create_host_config(port_bindings={ + client.api.create_host_config(port_bindings={ 1111: [ ('192.168.0.100', 1234), ('192.168.0.101', 1234) @@ -307,9 +307,9 @@ class ContainerApiMixin(object): .. code-block:: python - container_id = cli.create_container( + container_id = client.api.create_container( 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], - host_config=cli.create_host_config(binds={ + host_config=client.api.create_host_config(binds={ '/home/user1/': { 'bind': '/mnt/vol2', 'mode': 'rw', @@ -326,9 +326,9 @@ class ContainerApiMixin(object): .. code-block:: python - container_id = cli.create_container( + container_id = client.api.create_container( 'busybox', 'ls', volumes=['/mnt/vol1', '/mnt/vol2'], - host_config=cli.create_host_config(binds=[ + host_config=client.api.create_host_config(binds=[ '/home/user1/:/mnt/vol2', '/var/www:/mnt/vol1:ro', ]) @@ -346,15 +346,15 @@ class ContainerApiMixin(object): .. code-block:: python - networking_config = docker_client.create_networking_config({ - 'network1': docker_client.create_endpoint_config( + networking_config = client.api.create_networking_config({ + 'network1': client.api.create_endpoint_config( ipv4_address='172.28.0.124', aliases=['foo', 'bar'], links=['container2'] ) }) - ctnr = docker_client.create_container( + ctnr = client.api.create_container( img, command, networking_config=networking_config ) @@ -581,7 +581,7 @@ class ContainerApiMixin(object): Example: - >>> cli.create_host_config(privileged=True, cap_drop=['MKNOD'], + >>> client.api.create_host_config(privileged=True, cap_drop=['MKNOD'], volumes_from=['nostalgic_newton']) {'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True, 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False} @@ -612,11 +612,11 @@ class ContainerApiMixin(object): Example: - >>> docker_client.create_network('network1') - >>> networking_config = docker_client.create_networking_config({ - 'network1': docker_client.create_endpoint_config() + >>> client.api.create_network('network1') + >>> networking_config = client.api.create_networking_config({ + 'network1': client.api.create_endpoint_config() }) - >>> container = docker_client.create_container( + >>> container = client.api.create_container( img, command, networking_config=networking_config ) @@ -650,7 +650,7 @@ class ContainerApiMixin(object): Example: - >>> endpoint_config = client.create_endpoint_config( + >>> endpoint_config = client.api.create_endpoint_config( aliases=['web', 'app'], links={'app_db': 'db', 'another': None}, ipv4_address='132.65.0.123' @@ -729,7 +729,7 @@ class ContainerApiMixin(object): >>> c = docker.APIClient() >>> f = open('./sh_bin.tar', 'wb') - >>> bits, stat = c.get_archive(container, '/bin/sh') + >>> bits, stat = c.api.get_archive(container, '/bin/sh') >>> print(stat) {'name': 'sh', 'size': 1075464, 'mode': 493, 'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''} @@ -916,7 +916,7 @@ class ContainerApiMixin(object): .. code-block:: python - >>> cli.port('7174d6347063', 80) + >>> client.api.port('7174d6347063', 80) [{'HostIp': '0.0.0.0', 'HostPort': '80'}] """ res = self._get(self._url("/containers/{0}/json", container)) @@ -1095,10 +1095,10 @@ class ContainerApiMixin(object): Example: - >>> container = cli.create_container( + >>> container = client.api.create_container( ... image='busybox:latest', ... command='/bin/sleep 30') - >>> cli.start(container=container.get('Id')) + >>> client.api.start(container=container.get('Id')) """ if args or kwargs: raise errors.DeprecatedMethod( From 8945fda6be44f99cdbd68e5912eca4dffbb13acc Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Tue, 6 Apr 2021 16:01:16 +0200 Subject: [PATCH 149/211] Update maintainers Signed-off-by: Anca Iordache --- MAINTAINERS | 14 +++++++++++++- setup.py | 4 ++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/MAINTAINERS b/MAINTAINERS index b857d13d..b74cb28f 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -11,7 +11,8 @@ [Org] [Org."Core maintainers"] people = [ - "shin-", + "aiordache", + "ulyssessouza", ] [Org.Alumni] people = [ @@ -20,6 +21,7 @@ "dnephin", "mnowster", "mpetazzoni", + "shin-", ] [people] @@ -35,6 +37,11 @@ Email = "aanand@docker.com" GitHub = "aanand" + [people.aiordache] + Name = "Anca Iordache" + Email = "anca.iordache@docker.com" + GitHub = "aiordache" + [people.bfirsh] Name = "Ben Firshman" Email = "b@fir.sh" @@ -59,3 +66,8 @@ Name = "Joffrey F" Email = "joffrey@docker.com" GitHub = "shin-" + + [people.ulyssessouza] + Name = "Ulysses Domiciano Souza" + Email = "ulysses.souza@docker.com" + GitHub = "ulyssessouza" diff --git a/setup.py b/setup.py index b692eabd..ec1a51de 100644 --- a/setup.py +++ b/setup.py @@ -80,6 +80,6 @@ setup( 'Topic :: Utilities', 'License :: OSI Approved :: Apache Software License', ], - maintainer='Joffrey F', - maintainer_email='joffrey@docker.com', + maintainer='Ulysses Souza', + maintainer_email='ulysses.souza@docker.com', ) From 4b44fa7e5db98af52fb9269422d05b9aa7e03f5c Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Tue, 6 Apr 2021 20:32:05 +0200 Subject: [PATCH 150/211] Fix volumes low-level documentation examples I realize that low-level documentation has outdated examples, so I created issue #2800 to fix that Signed-off-by: Felipe Ruhland --- docker/api/volume.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/api/volume.py b/docker/api/volume.py index 900a6086..9604554a 100644 --- a/docker/api/volume.py +++ b/docker/api/volume.py @@ -21,7 +21,7 @@ class VolumeApiMixin(object): Example: - >>> cli.volumes() + >>> client.api.volumes() {u'Volumes': [{u'Driver': u'local', u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', u'Name': u'foobar'}, @@ -56,7 +56,7 @@ class VolumeApiMixin(object): Example: - >>> volume = cli.create_volume(name='foobar', driver='local', + >>> volume = client.api.create_volume(name='foobar', driver='local', driver_opts={'foo': 'bar', 'baz': 'false'}, labels={"key": "value"}) >>> print(volume) @@ -104,7 +104,7 @@ class VolumeApiMixin(object): Example: - >>> cli.inspect_volume('foobar') + >>> client.api.inspect_volume('foobar') {u'Driver': u'local', u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', u'Name': u'foobar'} From 50a0ff596fde9cea5acb5250a07de16ca584d0a1 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Tue, 6 Apr 2021 20:27:07 +0200 Subject: [PATCH 151/211] Fix network low-level documentation examples I realize that low-level documentation has outdated examples, so I created issue #2800 to fix that Signed-off-by: Felipe Ruhland --- docker/api/network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/api/network.py b/docker/api/network.py index 139c2d1a..18419932 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -75,7 +75,7 @@ class NetworkApiMixin(object): Example: A network using the bridge driver: - >>> client.create_network("network1", driver="bridge") + >>> client.api.create_network("network1", driver="bridge") You can also create more advanced networks with custom IPAM configurations. For example, setting the subnet to @@ -90,7 +90,7 @@ class NetworkApiMixin(object): >>> ipam_config = docker.types.IPAMConfig( pool_configs=[ipam_pool] ) - >>> docker_client.create_network("network1", driver="bridge", + >>> client.api.create_network("network1", driver="bridge", ipam=ipam_config) """ if options is not None and not isinstance(options, dict): From f53e615e0fd4b7becf9d72c73b8a9e021d473f62 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Wed, 7 Apr 2021 21:44:24 +0200 Subject: [PATCH 152/211] Update API and Engine versions The Makefile and `docker/constants.py` were with old versions, so I updated them to the current one Signed-off-by: Felipe Ruhland --- Makefile | 4 ++-- docker/constants.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 60d99842..78a0d334 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ -TEST_API_VERSION ?= 1.39 -TEST_ENGINE_VERSION ?= 19.03.13 +TEST_API_VERSION ?= 1.41 +TEST_ENGINE_VERSION ?= 20.10.05 .PHONY: all all: test diff --git a/docker/constants.py b/docker/constants.py index 43fce613..9cd58b67 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -1,7 +1,7 @@ import sys from .version import version -DEFAULT_DOCKER_API_VERSION = '1.39' +DEFAULT_DOCKER_API_VERSION = '1.41' MINIMUM_DOCKER_API_VERSION = '1.21' DEFAULT_TIMEOUT_SECONDS = 60 STREAM_HEADER_SIZE_BYTES = 8 From 7ac8b56730c70e3b61ad628e7512082a4468e4f3 Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Wed, 7 Apr 2021 22:11:52 +0200 Subject: [PATCH 153/211] Add `CapabilityAdd` and `CapabilityDrop` to ContainerSpec Docker Engine v1.41 added `CapAdd` and `CapDrop` as part of the ContainerSpec, and `docker-py` should do the same. ``` GET /services now returns CapAdd and CapDrop as part of the ContainerSpec. GET /services/{id} now returns CapAdd and CapDrop as part of the ContainerSpec. POST /services/create now accepts CapAdd and CapDrop as part of the ContainerSpec. POST /services/{id}/update now accepts CapAdd and CapDrop as part of the ContainerSpec. GET /tasks now returns CapAdd and CapDrop as part of the ContainerSpec. GET /tasks/{id} now returns CapAdd and CapDrop as part of the ContainerSpec. ``` I added capabilities on docstrings, `service.create` init method and create tests for that. That change was mention in issue #2802. Signed-off-by: Felipe Ruhland --- docker/models/services.py | 6 ++++ docker/types/services.py | 19 +++++++++++- tests/integration/api_service_test.py | 30 ++++++++++++++++++ tests/integration/models_services_test.py | 38 +++++++++++++++++++++++ 4 files changed, 92 insertions(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index a29ff132..200dd333 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -213,6 +213,10 @@ class ServiceCollection(Collection): to the service. privileges (Privileges): Security options for the service's containers. + cap_add (:py:class:`list`): A list of kernel capabilities to add to + the default set for the container. + cap_drop (:py:class:`list`): A list of kernel capabilities to drop + from the default set for the container. Returns: :py:class:`Service`: The created service. @@ -277,6 +281,8 @@ class ServiceCollection(Collection): # kwargs to copy straight over to ContainerSpec CONTAINER_SPEC_KWARGS = [ 'args', + 'cap_add', + 'cap_drop', 'command', 'configs', 'dns_config', diff --git a/docker/types/services.py b/docker/types/services.py index 29498e97..8e87c7b4 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -112,13 +112,18 @@ class ContainerSpec(dict): containers. Only used for Windows containers. init (boolean): Run an init inside the container that forwards signals and reaps processes. + cap_add (:py:class:`list`): A list of kernel capabilities to add to the + default set for the container. + cap_drop (:py:class:`list`): A list of kernel capabilities to drop from + the default set for the container. """ def __init__(self, image, command=None, args=None, hostname=None, env=None, workdir=None, user=None, labels=None, mounts=None, stop_grace_period=None, secrets=None, tty=None, groups=None, open_stdin=None, read_only=None, stop_signal=None, healthcheck=None, hosts=None, dns_config=None, configs=None, - privileges=None, isolation=None, init=None): + privileges=None, isolation=None, init=None, cap_add=None, + cap_drop=None): self['Image'] = image if isinstance(command, six.string_types): @@ -188,6 +193,18 @@ class ContainerSpec(dict): if init is not None: self['Init'] = init + if cap_add is not None: + if not isinstance(cap_add, list): + raise TypeError('cap_add must be a list') + + self['CapabilityAdd'] = cap_add + + if cap_drop is not None: + if not isinstance(cap_drop, list): + raise TypeError('cap_drop must be a list') + + self['CapabilityDrop'] = cap_drop + class Mount(dict): """ diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 1bee46e5..57077e62 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -1358,3 +1358,33 @@ class ServiceTest(BaseAPIIntegrationTest): self.client.update_service(*args, **kwargs) else: raise + + @requires_api_version('1.41') + def test_create_service_cap_add(self): + name = self.get_service_name() + container_spec = docker.types.ContainerSpec( + TEST_IMG, ['echo', 'hello'], cap_add=['CAP_SYSLOG'] + ) + task_tmpl = docker.types.TaskTemplate(container_spec) + svc_id = self.client.create_service(task_tmpl, name=name) + assert self.client.inspect_service(svc_id) + services = self.client.services(filters={'name': name}) + assert len(services) == 1 + assert services[0]['ID'] == svc_id['ID'] + spec = services[0]['Spec']['TaskTemplate']['ContainerSpec'] + assert 'CAP_SYSLOG' in spec['CapabilityAdd'] + + @requires_api_version('1.41') + def test_create_service_cap_drop(self): + name = self.get_service_name() + container_spec = docker.types.ContainerSpec( + TEST_IMG, ['echo', 'hello'], cap_drop=['CAP_SYSLOG'] + ) + task_tmpl = docker.types.TaskTemplate(container_spec) + svc_id = self.client.create_service(task_tmpl, name=name) + assert self.client.inspect_service(svc_id) + services = self.client.services(filters={'name': name}) + assert len(services) == 1 + assert services[0]['ID'] == svc_id['ID'] + spec = services[0]['Spec']['TaskTemplate']['ContainerSpec'] + assert 'CAP_SYSLOG' in spec['CapabilityDrop'] diff --git a/tests/integration/models_services_test.py b/tests/integration/models_services_test.py index 36caa851..982842b3 100644 --- a/tests/integration/models_services_test.py +++ b/tests/integration/models_services_test.py @@ -333,3 +333,41 @@ class ServiceTest(unittest.TestCase): assert service.force_update() service.reload() assert service.version > initial_version + + @helpers.requires_api_version('1.41') + def test_create_cap_add(self): + client = docker.from_env(version=TEST_API_VERSION) + name = helpers.random_name() + service = client.services.create( + name=name, + labels={'foo': 'bar'}, + image="alpine", + command="sleep 300", + container_labels={'container': 'label'}, + cap_add=["CAP_SYSLOG"] + ) + assert service.name == name + assert service.attrs['Spec']['Labels']['foo'] == 'bar' + container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec'] + assert "alpine" in container_spec['Image'] + assert container_spec['Labels'] == {'container': 'label'} + assert "CAP_SYSLOG" in container_spec["CapabilityAdd"] + + @helpers.requires_api_version('1.41') + def test_create_cap_drop(self): + client = docker.from_env(version=TEST_API_VERSION) + name = helpers.random_name() + service = client.services.create( + name=name, + labels={'foo': 'bar'}, + image="alpine", + command="sleep 300", + container_labels={'container': 'label'}, + cap_drop=["CAP_SYSLOG"] + ) + assert service.name == name + assert service.attrs['Spec']['Labels']['foo'] == 'bar' + container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec'] + assert "alpine" in container_spec['Image'] + assert container_spec['Labels'] == {'container': 'label'} + assert "CAP_SYSLOG" in container_spec["CapabilityDrop"] From 13c316de692fb21521df5e019c65f9241f7ab52a Mon Sep 17 00:00:00 2001 From: Felipe Ruhland Date: Wed, 7 Apr 2021 22:55:23 +0200 Subject: [PATCH 154/211] Fix swarm low-level documentation examples I realize that low-level documentation has outdated examples, so I created issue #2800 to fix that Signed-off-by: Felipe Ruhland --- docker/api/swarm.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 897f08e4..420529ac 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -58,10 +58,10 @@ class SwarmApiMixin(object): Example: - >>> spec = client.create_swarm_spec( + >>> spec = client.api.create_swarm_spec( snapshot_interval=5000, log_entries_for_slow_followers=1200 ) - >>> client.init_swarm( + >>> client.api.init_swarm( advertise_addr='eth0', listen_addr='0.0.0.0:5000', force_new_cluster=False, swarm_spec=spec ) @@ -354,8 +354,8 @@ class SwarmApiMixin(object): Example: - >>> key = client.get_unlock_key() - >>> client.unlock_node(key) + >>> key = client.api.get_unlock_key() + >>> client.unlock_swarm(key) """ if isinstance(key, dict): @@ -396,7 +396,7 @@ class SwarmApiMixin(object): 'Role': 'manager', 'Labels': {'foo': 'bar'} } - >>> client.update_node(node_id='24ifsmvkjbyhk', version=8, + >>> client.api.update_node(node_id='24ifsmvkjbyhk', version=8, node_spec=node_spec) """ From d58ca9720725219fd25a4145b8b5adbe1ed2ebc5 Mon Sep 17 00:00:00 2001 From: Roger Camargo Date: Thu, 3 Jun 2021 09:33:24 -0300 Subject: [PATCH 155/211] [DOCS] Update the Image.save documentation with a working example. Issue #836 Signed-off-by: Roger Camargo --- docker/models/images.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/models/images.py b/docker/models/images.py index e6355885..28cfc93c 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -84,9 +84,9 @@ class Image(Model): Example: - >>> image = cli.get_image("busybox:latest") + >>> image = cli.images.get("busybox:latest") >>> f = open('/tmp/busybox-latest.tar', 'wb') - >>> for chunk in image: + >>> for chunk in image.save(): >>> f.write(chunk) >>> f.close() """ From f42a81dca2aa7a152677ee1a7d5e14248e9a6e76 Mon Sep 17 00:00:00 2001 From: Sebastiano Mariani Date: Thu, 3 Jun 2021 15:51:52 -0700 Subject: [PATCH 156/211] Add the possibility to set a templating driver when creating a new Docker config Signed-off-by: Sebastiano Mariani --- docker/api/config.py | 9 +++++++-- tests/integration/api_config_test.py | 13 +++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/docker/api/config.py b/docker/api/config.py index 93e5168f..303be682 100644 --- a/docker/api/config.py +++ b/docker/api/config.py @@ -6,8 +6,9 @@ from .. import utils class ConfigApiMixin(object): + # TODO: The templating field is only available starting from API v 1.37 @utils.minimum_version('1.30') - def create_config(self, name, data, labels=None): + def create_config(self, name, data, labels=None, templating=None): """ Create a config @@ -15,6 +16,9 @@ class ConfigApiMixin(object): name (string): Name of the config data (bytes): Config data to be stored labels (dict): A mapping of labels to assign to the config + templating (dict): dictionary containing the name of the + templating driver to be used expressed as + { name: } Returns (dict): ID of the newly created config """ @@ -27,7 +31,8 @@ class ConfigApiMixin(object): body = { 'Data': data, 'Name': name, - 'Labels': labels + 'Labels': labels, + 'Templating': templating } url = self._url('/configs/create') diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py index 0ffd7675..7b7d9c18 100644 --- a/tests/integration/api_config_test.py +++ b/tests/integration/api_config_test.py @@ -70,3 +70,16 @@ class ConfigAPITest(BaseAPIIntegrationTest): data = self.client.configs(filters={'name': ['favorite_character']}) assert len(data) == 1 assert data[0]['ID'] == config_id['ID'] + + @requires_api_version('1.37') + def test_create_config_with_templating(self): + config_id = self.client.create_config( + 'favorite_character', 'sakuya izayoi', + templating={ 'name': 'golang'} + ) + self.tmp_configs.append(config_id) + assert 'ID' in config_id + data = self.client.inspect_config(config_id) + assert data['Spec']['Name'] == 'favorite_character' + assert 'Templating' in data['Spec'] + assert data['Spec']['Templating']['Name'] == 'golang' From 5fcc293ba268a89ea1535114d36fbdcb73ec3d88 Mon Sep 17 00:00:00 2001 From: Anthony Sottile Date: Mon, 5 Jul 2021 18:24:23 -0400 Subject: [PATCH 157/211] use python3.6+ constructs Signed-off-by: Anthony Sottile --- docker/api/build.py | 12 +-- docker/api/client.py | 20 ++-- docker/api/config.py | 7 +- docker/api/container.py | 8 +- docker/api/daemon.py | 2 +- docker/api/exec_api.py | 6 +- docker/api/image.py | 10 +- docker/api/network.py | 2 +- docker/api/plugin.py | 6 +- docker/api/secret.py | 7 +- docker/api/service.py | 4 +- docker/api/swarm.py | 4 +- docker/api/volume.py | 2 +- docker/auth.py | 38 ++++--- docker/client.py | 4 +- docker/constants.py | 2 +- docker/context/api.py | 6 +- docker/context/config.py | 4 +- docker/context/context.py | 4 +- docker/credentials/store.py | 27 ++--- docker/errors.py | 26 ++--- docker/models/configs.py | 2 +- docker/models/images.py | 14 ++- docker/models/plugins.py | 5 +- docker/models/resource.py | 9 +- docker/models/secrets.py | 2 +- docker/models/swarm.py | 2 +- docker/tls.py | 2 +- docker/transport/basehttpadapter.py | 2 +- docker/transport/npipeconn.py | 17 ++- docker/transport/npipesocket.py | 8 +- docker/transport/sshconn.py | 27 +++-- docker/transport/ssladapter.py | 4 +- docker/transport/unixconn.py | 26 ++--- docker/types/base.py | 5 +- docker/types/containers.py | 42 ++++---- docker/types/daemon.py | 4 +- docker/types/healthcheck.py | 8 +- docker/types/services.py | 20 ++-- docker/utils/build.py | 25 ++--- docker/utils/config.py | 6 +- docker/utils/decorators.py | 2 +- docker/utils/fnmatch.py | 2 +- docker/utils/json_stream.py | 13 +-- docker/utils/ports.py | 2 +- docker/utils/socket.py | 14 ++- docker/utils/utils.py | 32 +++--- docker/version.py | 2 +- docs/conf.py | 19 ++-- scripts/versions.py | 4 +- setup.py | 1 - tests/helpers.py | 11 +- tests/integration/api_build_test.py | 19 ++-- tests/integration/api_client_test.py | 2 +- tests/integration/api_config_test.py | 4 +- tests/integration/api_container_test.py | 42 ++++---- tests/integration/api_exec_test.py | 2 +- tests/integration/api_image_test.py | 6 +- tests/integration/api_network_test.py | 2 +- tests/integration/api_secret_test.py | 4 +- tests/integration/api_service_test.py | 32 +++--- tests/integration/api_swarm_test.py | 4 +- tests/integration/base.py | 4 +- tests/integration/conftest.py | 6 +- tests/integration/credentials/store_test.py | 7 +- tests/integration/credentials/utils_test.py | 2 +- tests/integration/models_images_test.py | 22 ++-- tests/integration/regression_test.py | 10 +- tests/ssh/api_build_test.py | 19 ++-- tests/ssh/base.py | 2 +- tests/unit/api_container_test.py | 27 +++-- tests/unit/api_exec_test.py | 10 +- tests/unit/api_image_test.py | 2 +- tests/unit/api_network_test.py | 20 ++-- tests/unit/api_test.py | 44 ++++---- tests/unit/api_volume_test.py | 4 +- tests/unit/auth_test.py | 22 ++-- tests/unit/client_test.py | 2 +- tests/unit/dockertypes_test.py | 4 +- tests/unit/errors_test.py | 2 +- tests/unit/fake_api.py | 100 ++++++++--------- tests/unit/fake_api_client.py | 4 +- tests/unit/models_resources_test.py | 2 +- tests/unit/models_secrets_test.py | 2 +- tests/unit/models_services_test.py | 8 +- tests/unit/ssladapter_test.py | 38 +++---- tests/unit/swarm_test.py | 2 - tests/unit/utils_build_test.py | 112 ++++++++++---------- tests/unit/utils_config_test.py | 2 +- tests/unit/utils_json_stream_test.py | 12 +-- tests/unit/utils_proxy_test.py | 7 +- tests/unit/utils_test.py | 34 +++--- 92 files changed, 524 insertions(+), 658 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 365129a0..aac43c46 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -12,7 +12,7 @@ from .. import utils log = logging.getLogger(__name__) -class BuildApiMixin(object): +class BuildApiMixin: def build(self, path=None, tag=None, quiet=False, fileobj=None, nocache=False, rm=False, timeout=None, custom_context=False, encoding=None, pull=False, @@ -132,7 +132,7 @@ class BuildApiMixin(object): for key in container_limits.keys(): if key not in constants.CONTAINER_LIMITS_KEYS: raise errors.DockerException( - 'Invalid container_limits key {0}'.format(key) + f'Invalid container_limits key {key}' ) if custom_context: @@ -150,7 +150,7 @@ class BuildApiMixin(object): dockerignore = os.path.join(path, '.dockerignore') exclude = None if os.path.exists(dockerignore): - with open(dockerignore, 'r') as f: + with open(dockerignore) as f: exclude = list(filter( lambda x: x != '' and x[0] != '#', [l.strip() for l in f.read().splitlines()] @@ -313,7 +313,7 @@ class BuildApiMixin(object): auth_data[auth.INDEX_URL] = auth_data.get(auth.INDEX_NAME, {}) log.debug( - 'Sending auth config ({0})'.format( + 'Sending auth config ({})'.format( ', '.join(repr(k) for k in auth_data.keys()) ) ) @@ -344,9 +344,9 @@ def process_dockerfile(dockerfile, path): if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or os.path.relpath(abs_dockerfile, path).startswith('..')): # Dockerfile not in context - read data to insert into tar later - with open(abs_dockerfile, 'r') as df: + with open(abs_dockerfile) as df: return ( - '.dockerfile.{0:x}'.format(random.getrandbits(160)), + f'.dockerfile.{random.getrandbits(160):x}', df.read() ) diff --git a/docker/api/client.py b/docker/api/client.py index ee9ad9c3..f0cb39b8 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -107,7 +107,7 @@ class APIClient( user_agent=DEFAULT_USER_AGENT, num_pools=None, credstore_env=None, use_ssh_client=False, max_pool_size=DEFAULT_MAX_POOL_SIZE): - super(APIClient, self).__init__() + super().__init__() if tls and not base_url: raise TLSParameterError( @@ -199,7 +199,7 @@ class APIClient( self._version = version if not isinstance(self._version, str): raise DockerException( - 'Version parameter must be a string or None. Found {0}'.format( + 'Version parameter must be a string or None. Found {}'.format( type(version).__name__ ) ) @@ -219,7 +219,7 @@ class APIClient( ) except Exception as e: raise DockerException( - 'Error while fetching server API version: {0}'.format(e) + f'Error while fetching server API version: {e}' ) def _set_request_timeout(self, kwargs): @@ -248,7 +248,7 @@ class APIClient( for arg in args: if not isinstance(arg, str): raise ValueError( - 'Expected a string but found {0} ({1}) ' + 'Expected a string but found {} ({}) ' 'instead'.format(arg, type(arg)) ) @@ -256,11 +256,11 @@ class APIClient( args = map(quote_f, args) if kwargs.get('versioned_api', True): - return '{0}/v{1}{2}'.format( + return '{}/v{}{}'.format( self.base_url, self._version, pathfmt.format(*args) ) else: - return '{0}{1}'.format(self.base_url, pathfmt.format(*args)) + return f'{self.base_url}{pathfmt.format(*args)}' def _raise_for_status(self, response): """Raises stored :class:`APIError`, if one occurred.""" @@ -341,8 +341,7 @@ class APIClient( if response.raw._fp.chunked: if decode: - for chunk in json_stream(self._stream_helper(response, False)): - yield chunk + yield from json_stream(self._stream_helper(response, False)) else: reader = response.raw while not reader.closed: @@ -398,8 +397,7 @@ class APIClient( def _stream_raw_result(self, response, chunk_size=1, decode=True): ''' Stream result for TTY-enabled container and raw binary data''' self._raise_for_status(response) - for out in response.iter_content(chunk_size, decode): - yield out + yield from response.iter_content(chunk_size, decode) def _read_from_socket(self, response, stream, tty=True, demux=False): socket = self._get_raw_response_socket(response) @@ -477,7 +475,7 @@ class APIClient( def get_adapter(self, url): try: - return super(APIClient, self).get_adapter(url) + return super().get_adapter(url) except requests.exceptions.InvalidSchema as e: if self._custom_adapter: return self._custom_adapter diff --git a/docker/api/config.py b/docker/api/config.py index 93e5168f..8cf74e1a 100644 --- a/docker/api/config.py +++ b/docker/api/config.py @@ -1,11 +1,9 @@ import base64 -import six - from .. import utils -class ConfigApiMixin(object): +class ConfigApiMixin: @utils.minimum_version('1.30') def create_config(self, name, data, labels=None): """ @@ -22,8 +20,7 @@ class ConfigApiMixin(object): data = data.encode('utf-8') data = base64.b64encode(data) - if six.PY3: - data = data.decode('ascii') + data = data.decode('ascii') body = { 'Data': data, 'Name': name, diff --git a/docker/api/container.py b/docker/api/container.py index 369eba95..83fcd4f6 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1,7 +1,5 @@ from datetime import datetime -import six - from .. import errors from .. import utils from ..constants import DEFAULT_DATA_CHUNK_SIZE @@ -12,7 +10,7 @@ from ..types import HostConfig from ..types import NetworkingConfig -class ContainerApiMixin(object): +class ContainerApiMixin: @utils.check_resource('container') def attach(self, container, stdout=True, stderr=True, stream=False, logs=False, demux=False): @@ -408,7 +406,7 @@ class ContainerApiMixin(object): :py:class:`docker.errors.APIError` If the server returns an error. """ - if isinstance(volumes, six.string_types): + if isinstance(volumes, str): volumes = [volumes, ] if isinstance(environment, dict): @@ -790,7 +788,7 @@ class ContainerApiMixin(object): url = self._url("/containers/{0}/kill", container) params = {} if signal is not None: - if not isinstance(signal, six.string_types): + if not isinstance(signal, str): signal = int(signal) params['signal'] = signal res = self._post(url, params=params) diff --git a/docker/api/daemon.py b/docker/api/daemon.py index 6b719268..a8572132 100644 --- a/docker/api/daemon.py +++ b/docker/api/daemon.py @@ -4,7 +4,7 @@ from datetime import datetime from .. import auth, types, utils -class DaemonApiMixin(object): +class DaemonApiMixin: @utils.minimum_version('1.25') def df(self): """ diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py index 4c49ac33..496308a0 100644 --- a/docker/api/exec_api.py +++ b/docker/api/exec_api.py @@ -1,10 +1,8 @@ -import six - from .. import errors from .. import utils -class ExecApiMixin(object): +class ExecApiMixin: @utils.check_resource('container') def exec_create(self, container, cmd, stdout=True, stderr=True, stdin=False, tty=False, privileged=False, user='', @@ -45,7 +43,7 @@ class ExecApiMixin(object): 'Setting environment for exec is not supported in API < 1.25' ) - if isinstance(cmd, six.string_types): + if isinstance(cmd, str): cmd = utils.split_command(cmd) if isinstance(environment, dict): diff --git a/docker/api/image.py b/docker/api/image.py index 772101f4..772d8895 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -1,15 +1,13 @@ import logging import os -import six - from .. import auth, errors, utils from ..constants import DEFAULT_DATA_CHUNK_SIZE log = logging.getLogger(__name__) -class ImageApiMixin(object): +class ImageApiMixin: @utils.check_resource('image') def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE): @@ -130,7 +128,7 @@ class ImageApiMixin(object): params = _import_image_params( repository, tag, image, - src=(src if isinstance(src, six.string_types) else None), + src=(src if isinstance(src, str) else None), changes=changes ) headers = {'Content-Type': 'application/tar'} @@ -139,7 +137,7 @@ class ImageApiMixin(object): return self._result( self._post(u, data=None, params=params) ) - elif isinstance(src, six.string_types): # from file path + elif isinstance(src, str): # from file path with open(src, 'rb') as f: return self._result( self._post( @@ -571,7 +569,7 @@ class ImageApiMixin(object): def is_file(src): try: return ( - isinstance(src, six.string_types) and + isinstance(src, str) and os.path.isfile(src) ) except TypeError: # a data string will make isfile() raise a TypeError diff --git a/docker/api/network.py b/docker/api/network.py index 139c2d1a..0b76bf32 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -4,7 +4,7 @@ from ..utils import version_lt from .. import utils -class NetworkApiMixin(object): +class NetworkApiMixin: def networks(self, names=None, ids=None, filters=None): """ List networks. Similar to the ``docker network ls`` command. diff --git a/docker/api/plugin.py b/docker/api/plugin.py index f6c0b133..57110f11 100644 --- a/docker/api/plugin.py +++ b/docker/api/plugin.py @@ -1,9 +1,7 @@ -import six - from .. import auth, utils -class PluginApiMixin(object): +class PluginApiMixin: @utils.minimum_version('1.25') @utils.check_resource('name') def configure_plugin(self, name, options): @@ -21,7 +19,7 @@ class PluginApiMixin(object): url = self._url('/plugins/{0}/set', name) data = options if isinstance(data, dict): - data = ['{0}={1}'.format(k, v) for k, v in six.iteritems(data)] + data = [f'{k}={v}' for k, v in data.items()] res = self._post_json(url, data=data) self._raise_for_status(res) return True diff --git a/docker/api/secret.py b/docker/api/secret.py index e57952b5..cd440b95 100644 --- a/docker/api/secret.py +++ b/docker/api/secret.py @@ -1,12 +1,10 @@ import base64 -import six - from .. import errors from .. import utils -class SecretApiMixin(object): +class SecretApiMixin: @utils.minimum_version('1.25') def create_secret(self, name, data, labels=None, driver=None): """ @@ -25,8 +23,7 @@ class SecretApiMixin(object): data = data.encode('utf-8') data = base64.b64encode(data) - if six.PY3: - data = data.decode('ascii') + data = data.decode('ascii') body = { 'Data': data, 'Name': name, diff --git a/docker/api/service.py b/docker/api/service.py index e9027bfa..371f541e 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -45,7 +45,7 @@ def _check_api_features(version, task_template, update_config, endpoint_spec, if task_template is not None: if 'ForceUpdate' in task_template and utils.version_lt( version, '1.25'): - raise_version_error('force_update', '1.25') + raise_version_error('force_update', '1.25') if task_template.get('Placement'): if utils.version_lt(version, '1.30'): @@ -113,7 +113,7 @@ def _merge_task_template(current, override): return merged -class ServiceApiMixin(object): +class ServiceApiMixin: @utils.minimum_version('1.24') def create_service( self, task_template, name=None, labels=None, mode=None, diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 897f08e4..2ec1aea5 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -1,5 +1,5 @@ import logging -from six.moves import http_client +import http.client as http_client from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE from .. import errors from .. import types @@ -8,7 +8,7 @@ from .. import utils log = logging.getLogger(__name__) -class SwarmApiMixin(object): +class SwarmApiMixin: def create_swarm_spec(self, *args, **kwargs): """ diff --git a/docker/api/volume.py b/docker/api/volume.py index 900a6086..c6b26fe3 100644 --- a/docker/api/volume.py +++ b/docker/api/volume.py @@ -2,7 +2,7 @@ from .. import errors from .. import utils -class VolumeApiMixin(object): +class VolumeApiMixin: def volumes(self, filters=None): """ List volumes currently registered by the docker daemon. Similar to the diff --git a/docker/auth.py b/docker/auth.py index 6a07ea20..4fa798fc 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -2,14 +2,12 @@ import base64 import json import logging -import six - from . import credentials from . import errors from .utils import config INDEX_NAME = 'docker.io' -INDEX_URL = 'https://index.{0}/v1/'.format(INDEX_NAME) +INDEX_URL = f'https://index.{INDEX_NAME}/v1/' TOKEN_USERNAME = '' log = logging.getLogger(__name__) @@ -18,13 +16,13 @@ log = logging.getLogger(__name__) def resolve_repository_name(repo_name): if '://' in repo_name: raise errors.InvalidRepository( - 'Repository name cannot contain a scheme ({0})'.format(repo_name) + f'Repository name cannot contain a scheme ({repo_name})' ) index_name, remote_name = split_repo_name(repo_name) if index_name[0] == '-' or index_name[-1] == '-': raise errors.InvalidRepository( - 'Invalid index name ({0}). Cannot begin or end with a' + 'Invalid index name ({}). Cannot begin or end with a' ' hyphen.'.format(index_name) ) return resolve_index_name(index_name), remote_name @@ -98,10 +96,10 @@ class AuthConfig(dict): """ conf = {} - for registry, entry in six.iteritems(entries): + for registry, entry in entries.items(): if not isinstance(entry, dict): log.debug( - 'Config entry for key {0} is not auth config'.format( + 'Config entry for key {} is not auth config'.format( registry ) ) @@ -111,14 +109,14 @@ class AuthConfig(dict): # keys is not formatted properly. if raise_on_error: raise errors.InvalidConfigFile( - 'Invalid configuration for registry {0}'.format( + 'Invalid configuration for registry {}'.format( registry ) ) return {} if 'identitytoken' in entry: log.debug( - 'Found an IdentityToken entry for registry {0}'.format( + 'Found an IdentityToken entry for registry {}'.format( registry ) ) @@ -132,7 +130,7 @@ class AuthConfig(dict): # a valid value in the auths config. # https://github.com/docker/compose/issues/3265 log.debug( - 'Auth data for {0} is absent. Client might be using a ' + 'Auth data for {} is absent. Client might be using a ' 'credentials store instead.'.format(registry) ) conf[registry] = {} @@ -140,7 +138,7 @@ class AuthConfig(dict): username, password = decode_auth(entry['auth']) log.debug( - 'Found entry (registry={0}, username={1})' + 'Found entry (registry={}, username={})' .format(repr(registry), repr(username)) ) @@ -170,7 +168,7 @@ class AuthConfig(dict): try: with open(config_file) as f: config_dict = json.load(f) - except (IOError, KeyError, ValueError) as e: + except (OSError, KeyError, ValueError) as e: # Likely missing new Docker config file or it's in an # unknown format, continue to attempt to read old location # and format. @@ -230,7 +228,7 @@ class AuthConfig(dict): store_name = self.get_credential_store(registry) if store_name is not None: log.debug( - 'Using credentials store "{0}"'.format(store_name) + f'Using credentials store "{store_name}"' ) cfg = self._resolve_authconfig_credstore(registry, store_name) if cfg is not None: @@ -239,15 +237,15 @@ class AuthConfig(dict): # Default to the public index server registry = resolve_index_name(registry) if registry else INDEX_NAME - log.debug("Looking for auth entry for {0}".format(repr(registry))) + log.debug(f"Looking for auth entry for {repr(registry)}") if registry in self.auths: - log.debug("Found {0}".format(repr(registry))) + log.debug(f"Found {repr(registry)}") return self.auths[registry] - for key, conf in six.iteritems(self.auths): + for key, conf in self.auths.items(): if resolve_index_name(key) == registry: - log.debug("Found {0}".format(repr(key))) + log.debug(f"Found {repr(key)}") return conf log.debug("No entry found") @@ -258,7 +256,7 @@ class AuthConfig(dict): # The ecosystem is a little schizophrenic with index.docker.io VS # docker.io - in that case, it seems the full URL is necessary. registry = INDEX_URL - log.debug("Looking for auth entry for {0}".format(repr(registry))) + log.debug(f"Looking for auth entry for {repr(registry)}") store = self._get_store_instance(credstore_name) try: data = store.get(registry) @@ -278,7 +276,7 @@ class AuthConfig(dict): return None except credentials.StoreError as e: raise errors.DockerException( - 'Credentials store error: {0}'.format(repr(e)) + f'Credentials store error: {repr(e)}' ) def _get_store_instance(self, name): @@ -329,7 +327,7 @@ def convert_to_hostname(url): def decode_auth(auth): - if isinstance(auth, six.string_types): + if isinstance(auth, str): auth = auth.encode('ascii') s = base64.b64decode(auth) login, pwd = s.split(b':', 1) diff --git a/docker/client.py b/docker/client.py index 5add5d73..4dbd846f 100644 --- a/docker/client.py +++ b/docker/client.py @@ -13,7 +13,7 @@ from .models.volumes import VolumeCollection from .utils import kwargs_from_env -class DockerClient(object): +class DockerClient: """ A client for communicating with a Docker server. @@ -212,7 +212,7 @@ class DockerClient(object): close.__doc__ = APIClient.close.__doc__ def __getattr__(self, name): - s = ["'DockerClient' object has no attribute '{}'".format(name)] + s = [f"'DockerClient' object has no attribute '{name}'"] # If a user calls a method on APIClient, they if hasattr(APIClient, name): s.append("In Docker SDK for Python 2.0, this method is now on the " diff --git a/docker/constants.py b/docker/constants.py index 43fce613..218e4915 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -28,7 +28,7 @@ INSECURE_REGISTRY_DEPRECATION_WARNING = \ IS_WINDOWS_PLATFORM = (sys.platform == 'win32') WINDOWS_LONGPATH_PREFIX = '\\\\?\\' -DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version) +DEFAULT_USER_AGENT = f"docker-sdk-python/{version}" DEFAULT_NUM_POOLS = 25 # The OpenSSH server default value for MaxSessions is 10 which means we can diff --git a/docker/context/api.py b/docker/context/api.py index c45115bc..380e8c4c 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -9,7 +9,7 @@ from docker.context.config import write_context_name_to_docker_config from docker.context import Context -class ContextAPI(object): +class ContextAPI: """Context API. Contains methods for context management: create, list, remove, get, inspect. @@ -109,7 +109,7 @@ class ContextAPI(object): if filename == METAFILE: try: data = json.load( - open(os.path.join(dirname, filename), "r")) + open(os.path.join(dirname, filename))) names.append(data["Name"]) except Exception as e: raise errors.ContextException( @@ -138,7 +138,7 @@ class ContextAPI(object): err = write_context_name_to_docker_config(name) if err: raise errors.ContextException( - 'Failed to set current context: {}'.format(err)) + f'Failed to set current context: {err}') @classmethod def remove_context(cls, name): diff --git a/docker/context/config.py b/docker/context/config.py index baf54f79..d761aef1 100644 --- a/docker/context/config.py +++ b/docker/context/config.py @@ -15,7 +15,7 @@ def get_current_context_name(): docker_cfg_path = find_config_file() if docker_cfg_path: try: - with open(docker_cfg_path, "r") as f: + with open(docker_cfg_path) as f: name = json.load(f).get("currentContext", "default") except Exception: return "default" @@ -29,7 +29,7 @@ def write_context_name_to_docker_config(name=None): config = {} if docker_cfg_path: try: - with open(docker_cfg_path, "r") as f: + with open(docker_cfg_path) as f: config = json.load(f) except Exception as e: return e diff --git a/docker/context/context.py b/docker/context/context.py index f4aff6b0..dbaa01cb 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -94,7 +94,7 @@ class Context: try: with open(meta_file) as f: metadata = json.load(f) - except (IOError, KeyError, ValueError) as e: + except (OSError, KeyError, ValueError) as e: # unknown format raise Exception("""Detected corrupted meta file for context {} : {}""".format(name, e)) @@ -171,7 +171,7 @@ class Context: rmtree(self.tls_path) def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" def __str__(self): return json.dumps(self.__call__(), indent=2) diff --git a/docker/credentials/store.py b/docker/credentials/store.py index 00178889..e55976f1 100644 --- a/docker/credentials/store.py +++ b/docker/credentials/store.py @@ -2,15 +2,13 @@ import errno import json import subprocess -import six - from . import constants from . import errors from .utils import create_environment_dict from .utils import find_executable -class Store(object): +class Store: def __init__(self, program, environment=None): """ Create a store object that acts as an interface to perform the basic operations for storing, retrieving @@ -30,7 +28,7 @@ class Store(object): """ Retrieve credentials for `server`. If no credentials are found, a `StoreError` will be raised. """ - if not isinstance(server, six.binary_type): + if not isinstance(server, bytes): server = server.encode('utf-8') data = self._execute('get', server) result = json.loads(data.decode('utf-8')) @@ -41,7 +39,7 @@ class Store(object): # raise CredentialsNotFound if result['Username'] == '' and result['Secret'] == '': raise errors.CredentialsNotFound( - 'No matching credentials in {}'.format(self.program) + f'No matching credentials in {self.program}' ) return result @@ -61,7 +59,7 @@ class Store(object): """ Erase credentials for `server`. Raises a `StoreError` if an error occurs. """ - if not isinstance(server, six.binary_type): + if not isinstance(server, bytes): server = server.encode('utf-8') self._execute('erase', server) @@ -75,20 +73,9 @@ class Store(object): output = None env = create_environment_dict(self.environment) try: - if six.PY3: - output = subprocess.check_output( - [self.exe, subcmd], input=data_input, env=env, - ) - else: - process = subprocess.Popen( - [self.exe, subcmd], stdin=subprocess.PIPE, - stdout=subprocess.PIPE, env=env, - ) - output, _ = process.communicate(data_input) - if process.returncode != 0: - raise subprocess.CalledProcessError( - returncode=process.returncode, cmd='', output=output - ) + output = subprocess.check_output( + [self.exe, subcmd], input=data_input, env=env, + ) except subprocess.CalledProcessError as e: raise errors.process_store_error(e, self.program) except OSError as e: diff --git a/docker/errors.py b/docker/errors.py index ab30a290..ba952562 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -38,25 +38,25 @@ class APIError(requests.exceptions.HTTPError, DockerException): def __init__(self, message, response=None, explanation=None): # requests 1.2 supports response as a keyword argument, but # requests 1.1 doesn't - super(APIError, self).__init__(message) + super().__init__(message) self.response = response self.explanation = explanation def __str__(self): - message = super(APIError, self).__str__() + message = super().__str__() if self.is_client_error(): - message = '{0} Client Error for {1}: {2}'.format( + message = '{} Client Error for {}: {}'.format( self.response.status_code, self.response.url, self.response.reason) elif self.is_server_error(): - message = '{0} Server Error for {1}: {2}'.format( + message = '{} Server Error for {}: {}'.format( self.response.status_code, self.response.url, self.response.reason) if self.explanation: - message = '{0} ("{1}")'.format(message, self.explanation) + message = f'{message} ("{self.explanation}")' return message @@ -133,11 +133,11 @@ class ContainerError(DockerException): self.image = image self.stderr = stderr - err = ": {}".format(stderr) if stderr is not None else "" + err = f": {stderr}" if stderr is not None else "" msg = ("Command '{}' in image '{}' returned non-zero exit " "status {}{}").format(command, image, exit_status, err) - super(ContainerError, self).__init__(msg) + super().__init__(msg) class StreamParseError(RuntimeError): @@ -147,7 +147,7 @@ class StreamParseError(RuntimeError): class BuildError(DockerException): def __init__(self, reason, build_log): - super(BuildError, self).__init__(reason) + super().__init__(reason) self.msg = reason self.build_log = build_log @@ -157,8 +157,8 @@ class ImageLoadError(DockerException): def create_unexpected_kwargs_error(name, kwargs): - quoted_kwargs = ["'{}'".format(k) for k in sorted(kwargs)] - text = ["{}() ".format(name)] + quoted_kwargs = [f"'{k}'" for k in sorted(kwargs)] + text = [f"{name}() "] if len(quoted_kwargs) == 1: text.append("got an unexpected keyword argument ") else: @@ -172,7 +172,7 @@ class MissingContextParameter(DockerException): self.param = param def __str__(self): - return ("missing parameter: {}".format(self.param)) + return (f"missing parameter: {self.param}") class ContextAlreadyExists(DockerException): @@ -180,7 +180,7 @@ class ContextAlreadyExists(DockerException): self.name = name def __str__(self): - return ("context {} already exists".format(self.name)) + return (f"context {self.name} already exists") class ContextException(DockerException): @@ -196,4 +196,4 @@ class ContextNotFound(DockerException): self.name = name def __str__(self): - return ("context '{}' not found".format(self.name)) + return (f"context '{self.name}' not found") diff --git a/docker/models/configs.py b/docker/models/configs.py index 7f23f650..3588c8b5 100644 --- a/docker/models/configs.py +++ b/docker/models/configs.py @@ -7,7 +7,7 @@ class Config(Model): id_attribute = 'ID' def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): diff --git a/docker/models/images.py b/docker/models/images.py index 28cfc93c..46f8efee 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -2,8 +2,6 @@ import itertools import re import warnings -import six - from ..api import APIClient from ..constants import DEFAULT_DATA_CHUNK_SIZE from ..errors import BuildError, ImageLoadError, InvalidArgument @@ -17,7 +15,7 @@ class Image(Model): An image on the server. """ def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, "', '".join(self.tags)) + return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags)) @property def labels(self): @@ -93,10 +91,10 @@ class Image(Model): img = self.id if named: img = self.tags[0] if self.tags else img - if isinstance(named, six.string_types): + if isinstance(named, str): if named not in self.tags: raise InvalidArgument( - "{} is not a valid tag for this image".format(named) + f"{named} is not a valid tag for this image" ) img = named @@ -127,7 +125,7 @@ class RegistryData(Model): Image metadata stored on the registry, including available platforms. """ def __init__(self, image_name, *args, **kwargs): - super(RegistryData, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.image_name = image_name @property @@ -180,7 +178,7 @@ class RegistryData(Model): parts = platform.split('/') if len(parts) > 3 or len(parts) < 1: raise InvalidArgument( - '"{0}" is not a valid platform descriptor'.format(platform) + f'"{platform}" is not a valid platform descriptor' ) platform = {'os': parts[0]} if len(parts) > 2: @@ -277,7 +275,7 @@ class ImageCollection(Collection): If neither ``path`` nor ``fileobj`` is specified. """ resp = self.client.api.build(**kwargs) - if isinstance(resp, six.string_types): + if isinstance(resp, str): return self.get(resp) last_event = None image_id = None diff --git a/docker/models/plugins.py b/docker/models/plugins.py index ae5851c9..37ecefbe 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -7,7 +7,7 @@ class Plugin(Model): A plugin on the server. """ def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): @@ -117,8 +117,7 @@ class Plugin(Model): if remote is None: remote = self.name privileges = self.client.api.plugin_privileges(remote) - for d in self.client.api.upgrade_plugin(self.name, remote, privileges): - yield d + yield from self.client.api.upgrade_plugin(self.name, remote, privileges) self.reload() diff --git a/docker/models/resource.py b/docker/models/resource.py index ed3900af..dec2349f 100644 --- a/docker/models/resource.py +++ b/docker/models/resource.py @@ -1,5 +1,4 @@ - -class Model(object): +class Model: """ A base class for representing a single object on the server. """ @@ -18,13 +17,13 @@ class Model(object): self.attrs = {} def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.short_id) + return f"<{self.__class__.__name__}: {self.short_id}>" def __eq__(self, other): return isinstance(other, self.__class__) and self.id == other.id def __hash__(self): - return hash("%s:%s" % (self.__class__.__name__, self.id)) + return hash(f"{self.__class__.__name__}:{self.id}") @property def id(self): @@ -49,7 +48,7 @@ class Model(object): self.attrs = new_model.attrs -class Collection(object): +class Collection: """ A base class for representing all objects of a particular type on the server. diff --git a/docker/models/secrets.py b/docker/models/secrets.py index e2ee88af..da01d44c 100644 --- a/docker/models/secrets.py +++ b/docker/models/secrets.py @@ -7,7 +7,7 @@ class Secret(Model): id_attribute = 'ID' def __repr__(self): - return "<%s: '%s'>" % (self.__class__.__name__, self.name) + return f"<{self.__class__.__name__}: '{self.name}'>" @property def name(self): diff --git a/docker/models/swarm.py b/docker/models/swarm.py index 755c17db..b0b1a2ef 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -11,7 +11,7 @@ class Swarm(Model): id_attribute = 'ID' def __init__(self, *args, **kwargs): - super(Swarm, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if self.client: try: self.reload() diff --git a/docker/tls.py b/docker/tls.py index 1b297ab6..067d5563 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -5,7 +5,7 @@ from . import errors from .transport import SSLHTTPAdapter -class TLSConfig(object): +class TLSConfig: """ TLS configuration. diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py index 4d819b66..dfbb193b 100644 --- a/docker/transport/basehttpadapter.py +++ b/docker/transport/basehttpadapter.py @@ -3,6 +3,6 @@ import requests.adapters class BaseHTTPAdapter(requests.adapters.HTTPAdapter): def close(self): - super(BaseHTTPAdapter, self).close() + super().close() if hasattr(self, 'pools'): self.pools.clear() diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index 70d8519d..df67f212 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -1,14 +1,11 @@ -import six +import queue import requests.adapters from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants from .npipesocket import NpipeSocket -if six.PY3: - import http.client as httplib -else: - import httplib +import http.client as httplib try: import requests.packages.urllib3 as urllib3 @@ -18,9 +15,9 @@ except ImportError: RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer -class NpipeHTTPConnection(httplib.HTTPConnection, object): +class NpipeHTTPConnection(httplib.HTTPConnection): def __init__(self, npipe_path, timeout=60): - super(NpipeHTTPConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.npipe_path = npipe_path @@ -35,7 +32,7 @@ class NpipeHTTPConnection(httplib.HTTPConnection, object): class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, npipe_path, timeout=60, maxsize=10): - super(NpipeHTTPConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.npipe_path = npipe_path @@ -57,7 +54,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): except AttributeError: # self.pool is None raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - except six.moves.queue.Empty: + except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( self, @@ -85,7 +82,7 @@ class NpipeHTTPAdapter(BaseHTTPAdapter): self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(NpipeHTTPAdapter, self).__init__() + super().__init__() def get_connection(self, url, proxies=None): with self.pools.lock: diff --git a/docker/transport/npipesocket.py b/docker/transport/npipesocket.py index 176b5c87..766372ae 100644 --- a/docker/transport/npipesocket.py +++ b/docker/transport/npipesocket.py @@ -2,7 +2,6 @@ import functools import time import io -import six import win32file import win32pipe @@ -24,7 +23,7 @@ def check_closed(f): return wrapped -class NpipeSocket(object): +class NpipeSocket: """ Partial implementation of the socket API over windows named pipes. This implementation is only designed to be used as a client socket, and server-specific methods (bind, listen, accept...) are not @@ -128,9 +127,6 @@ class NpipeSocket(object): @check_closed def recv_into(self, buf, nbytes=0): - if six.PY2: - return self._recv_into_py2(buf, nbytes) - readbuf = buf if not isinstance(buf, memoryview): readbuf = memoryview(buf) @@ -195,7 +191,7 @@ class NpipeFileIOBase(io.RawIOBase): self.sock = npipe_socket def close(self): - super(NpipeFileIOBase, self).close() + super().close() self.sock = None def fileno(self): diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index fb5c6bbe..3ca45c4c 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -1,6 +1,7 @@ import paramiko +import queue +import urllib.parse import requests.adapters -import six import logging import os import signal @@ -10,10 +11,7 @@ import subprocess from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants -if six.PY3: - import http.client as httplib -else: - import httplib +import http.client as httplib try: import requests.packages.urllib3 as urllib3 @@ -25,7 +23,7 @@ RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer class SSHSocket(socket.socket): def __init__(self, host): - super(SSHSocket, self).__init__( + super().__init__( socket.AF_INET, socket.SOCK_STREAM) self.host = host self.port = None @@ -90,8 +88,7 @@ class SSHSocket(socket.socket): def makefile(self, mode): if not self.proc: self.connect() - if six.PY3: - self.proc.stdout.channel = self + self.proc.stdout.channel = self return self.proc.stdout @@ -103,9 +100,9 @@ class SSHSocket(socket.socket): self.proc.terminate() -class SSHConnection(httplib.HTTPConnection, object): +class SSHConnection(httplib.HTTPConnection): def __init__(self, ssh_transport=None, timeout=60, host=None): - super(SSHConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.ssh_transport = ssh_transport @@ -129,7 +126,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): scheme = 'ssh' def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None): - super(SSHConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.ssh_transport = None @@ -152,7 +149,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): except AttributeError: # self.pool is None raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.") - except six.moves.queue.Empty: + except queue.Empty: if self.block: raise urllib3.exceptions.EmptyPoolError( self, @@ -188,12 +185,12 @@ class SSHHTTPAdapter(BaseHTTPAdapter): self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(SSHHTTPAdapter, self).__init__() + super().__init__() def _create_paramiko_client(self, base_url): logging.getLogger("paramiko").setLevel(logging.WARNING) self.ssh_client = paramiko.SSHClient() - base_url = six.moves.urllib_parse.urlparse(base_url) + base_url = urllib.parse.urlparse(base_url) self.ssh_params = { "hostname": base_url.hostname, "port": base_url.port, @@ -252,6 +249,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter): return pool def close(self): - super(SSHHTTPAdapter, self).close() + super().close() if self.ssh_client: self.ssh_client.close() diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py index 12de76cd..31e3014e 100644 --- a/docker/transport/ssladapter.py +++ b/docker/transport/ssladapter.py @@ -36,7 +36,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter): self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint - super(SSLHTTPAdapter, self).__init__(**kwargs) + super().__init__(**kwargs) def init_poolmanager(self, connections, maxsize, block=False): kwargs = { @@ -59,7 +59,7 @@ class SSLHTTPAdapter(BaseHTTPAdapter): But we still need to take care of when there is a proxy poolmanager """ - conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs) + conn = super().get_connection(*args, **kwargs) if conn.assert_hostname != self.assert_hostname: conn.assert_hostname = self.assert_hostname return conn diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index 3e040c5a..adb6f18a 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -1,7 +1,6 @@ -import six import requests.adapters import socket -from six.moves import http_client as httplib +import http.client as httplib from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants @@ -15,21 +14,10 @@ except ImportError: RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer -class UnixHTTPResponse(httplib.HTTPResponse, object): - def __init__(self, sock, *args, **kwargs): - disable_buffering = kwargs.pop('disable_buffering', False) - if six.PY2: - # FIXME: We may need to disable buffering on Py3 as well, - # but there's no clear way to do it at the moment. See: - # https://github.com/docker/docker-py/issues/1799 - kwargs['buffering'] = not disable_buffering - super(UnixHTTPResponse, self).__init__(sock, *args, **kwargs) - - -class UnixHTTPConnection(httplib.HTTPConnection, object): +class UnixHTTPConnection(httplib.HTTPConnection): def __init__(self, base_url, unix_socket, timeout=60): - super(UnixHTTPConnection, self).__init__( + super().__init__( 'localhost', timeout=timeout ) self.base_url = base_url @@ -44,7 +32,7 @@ class UnixHTTPConnection(httplib.HTTPConnection, object): self.sock = sock def putheader(self, header, *values): - super(UnixHTTPConnection, self).putheader(header, *values) + super().putheader(header, *values) if header == 'Connection' and 'Upgrade' in values: self.disable_buffering = True @@ -52,12 +40,12 @@ class UnixHTTPConnection(httplib.HTTPConnection, object): if self.disable_buffering: kwargs['disable_buffering'] = True - return UnixHTTPResponse(sock, *args, **kwargs) + return httplib.HTTPResponse(sock, *args, **kwargs) class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): def __init__(self, base_url, socket_path, timeout=60, maxsize=10): - super(UnixHTTPConnectionPool, self).__init__( + super().__init__( 'localhost', timeout=timeout, maxsize=maxsize ) self.base_url = base_url @@ -89,7 +77,7 @@ class UnixHTTPAdapter(BaseHTTPAdapter): self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(UnixHTTPAdapter, self).__init__() + super().__init__() def get_connection(self, url, proxies=None): with self.pools.lock: diff --git a/docker/types/base.py b/docker/types/base.py index 68910623..8851f1e2 100644 --- a/docker/types/base.py +++ b/docker/types/base.py @@ -1,7 +1,4 @@ -import six - - class DictType(dict): def __init__(self, init): - for k, v in six.iteritems(init): + for k, v in init.items(): self[k] = v diff --git a/docker/types/containers.py b/docker/types/containers.py index 9fa4656a..f1b60b2d 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -1,5 +1,3 @@ -import six - from .. import errors from ..utils.utils import ( convert_port_bindings, convert_tmpfs_mounts, convert_volume_binds, @@ -10,7 +8,7 @@ from .base import DictType from .healthcheck import Healthcheck -class LogConfigTypesEnum(object): +class LogConfigTypesEnum: _values = ( 'json-file', 'syslog', @@ -61,7 +59,7 @@ class LogConfig(DictType): if config and not isinstance(config, dict): raise ValueError("LogConfig.config must be a dictionary") - super(LogConfig, self).__init__({ + super().__init__({ 'Type': log_driver_type, 'Config': config }) @@ -117,13 +115,13 @@ class Ulimit(DictType): name = kwargs.get('name', kwargs.get('Name')) soft = kwargs.get('soft', kwargs.get('Soft')) hard = kwargs.get('hard', kwargs.get('Hard')) - if not isinstance(name, six.string_types): + if not isinstance(name, str): raise ValueError("Ulimit.name must be a string") if soft and not isinstance(soft, int): raise ValueError("Ulimit.soft must be an integer") if hard and not isinstance(hard, int): raise ValueError("Ulimit.hard must be an integer") - super(Ulimit, self).__init__({ + super().__init__({ 'Name': name, 'Soft': soft, 'Hard': hard @@ -184,7 +182,7 @@ class DeviceRequest(DictType): if driver is None: driver = '' - elif not isinstance(driver, six.string_types): + elif not isinstance(driver, str): raise ValueError('DeviceRequest.driver must be a string') if count is None: count = 0 @@ -203,7 +201,7 @@ class DeviceRequest(DictType): elif not isinstance(options, dict): raise ValueError('DeviceRequest.options must be a dict') - super(DeviceRequest, self).__init__({ + super().__init__({ 'Driver': driver, 'Count': count, 'DeviceIDs': device_ids, @@ -297,7 +295,7 @@ class HostConfig(dict): self['MemorySwappiness'] = mem_swappiness if shm_size is not None: - if isinstance(shm_size, six.string_types): + if isinstance(shm_size, str): shm_size = parse_bytes(shm_size) self['ShmSize'] = shm_size @@ -358,7 +356,7 @@ class HostConfig(dict): self['Devices'] = parse_devices(devices) if group_add: - self['GroupAdd'] = [six.text_type(grp) for grp in group_add] + self['GroupAdd'] = [str(grp) for grp in group_add] if dns is not None: self['Dns'] = dns @@ -378,11 +376,11 @@ class HostConfig(dict): if not isinstance(sysctls, dict): raise host_config_type_error('sysctls', sysctls, 'dict') self['Sysctls'] = {} - for k, v in six.iteritems(sysctls): - self['Sysctls'][k] = six.text_type(v) + for k, v in sysctls.items(): + self['Sysctls'][k] = str(v) if volumes_from is not None: - if isinstance(volumes_from, six.string_types): + if isinstance(volumes_from, str): volumes_from = volumes_from.split(',') self['VolumesFrom'] = volumes_from @@ -404,7 +402,7 @@ class HostConfig(dict): if isinstance(lxc_conf, dict): formatted = [] - for k, v in six.iteritems(lxc_conf): + for k, v in lxc_conf.items(): formatted.append({'Key': k, 'Value': str(v)}) lxc_conf = formatted @@ -559,7 +557,7 @@ class HostConfig(dict): self["PidsLimit"] = pids_limit if isolation: - if not isinstance(isolation, six.string_types): + if not isinstance(isolation, str): raise host_config_type_error('isolation', isolation, 'string') if version_lt(version, '1.24'): raise host_config_version_error('isolation', '1.24') @@ -609,7 +607,7 @@ class HostConfig(dict): self['CpuPercent'] = cpu_percent if nano_cpus: - if not isinstance(nano_cpus, six.integer_types): + if not isinstance(nano_cpus, int): raise host_config_type_error('nano_cpus', nano_cpus, 'int') if version_lt(version, '1.25'): raise host_config_version_error('nano_cpus', '1.25') @@ -699,17 +697,17 @@ class ContainerConfig(dict): 'version 1.29' ) - if isinstance(command, six.string_types): + if isinstance(command, str): command = split_command(command) - if isinstance(entrypoint, six.string_types): + if isinstance(entrypoint, str): entrypoint = split_command(entrypoint) if isinstance(environment, dict): environment = format_environment(environment) if isinstance(labels, list): - labels = dict((lbl, six.text_type('')) for lbl in labels) + labels = {lbl: '' for lbl in labels} if isinstance(ports, list): exposed_ports = {} @@ -720,10 +718,10 @@ class ContainerConfig(dict): if len(port_definition) == 2: proto = port_definition[1] port = port_definition[0] - exposed_ports['{0}/{1}'.format(port, proto)] = {} + exposed_ports[f'{port}/{proto}'] = {} ports = exposed_ports - if isinstance(volumes, six.string_types): + if isinstance(volumes, str): volumes = [volumes, ] if isinstance(volumes, list): @@ -752,7 +750,7 @@ class ContainerConfig(dict): 'Hostname': hostname, 'Domainname': domainname, 'ExposedPorts': ports, - 'User': six.text_type(user) if user is not None else None, + 'User': str(user) if user is not None else None, 'Tty': tty, 'OpenStdin': stdin_open, 'StdinOnce': stdin_once, diff --git a/docker/types/daemon.py b/docker/types/daemon.py index af3e5bcb..10e81014 100644 --- a/docker/types/daemon.py +++ b/docker/types/daemon.py @@ -8,7 +8,7 @@ except ImportError: from ..errors import DockerException -class CancellableStream(object): +class CancellableStream: """ Stream wrapper for real-time events, logs, etc. from the server. @@ -32,7 +32,7 @@ class CancellableStream(object): return next(self._stream) except urllib3.exceptions.ProtocolError: raise StopIteration - except socket.error: + except OSError: raise StopIteration next = __next__ diff --git a/docker/types/healthcheck.py b/docker/types/healthcheck.py index 9815018d..dfc88a97 100644 --- a/docker/types/healthcheck.py +++ b/docker/types/healthcheck.py @@ -1,7 +1,5 @@ from .base import DictType -import six - class Healthcheck(DictType): """ @@ -31,7 +29,7 @@ class Healthcheck(DictType): """ def __init__(self, **kwargs): test = kwargs.get('test', kwargs.get('Test')) - if isinstance(test, six.string_types): + if isinstance(test, str): test = ["CMD-SHELL", test] interval = kwargs.get('interval', kwargs.get('Interval')) @@ -39,7 +37,7 @@ class Healthcheck(DictType): retries = kwargs.get('retries', kwargs.get('Retries')) start_period = kwargs.get('start_period', kwargs.get('StartPeriod')) - super(Healthcheck, self).__init__({ + super().__init__({ 'Test': test, 'Interval': interval, 'Timeout': timeout, @@ -53,7 +51,7 @@ class Healthcheck(DictType): @test.setter def test(self, value): - if isinstance(value, six.string_types): + if isinstance(value, str): value = ["CMD-SHELL", value] self['Test'] = value diff --git a/docker/types/services.py b/docker/types/services.py index 29498e97..a6dd76e3 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -1,5 +1,3 @@ -import six - from .. import errors from ..constants import IS_WINDOWS_PLATFORM from ..utils import ( @@ -121,7 +119,7 @@ class ContainerSpec(dict): privileges=None, isolation=None, init=None): self['Image'] = image - if isinstance(command, six.string_types): + if isinstance(command, str): command = split_command(command) self['Command'] = command self['Args'] = args @@ -151,7 +149,7 @@ class ContainerSpec(dict): if mounts is not None: parsed_mounts = [] for mount in mounts: - if isinstance(mount, six.string_types): + if isinstance(mount, str): parsed_mounts.append(Mount.parse_mount_string(mount)) else: # If mount already parsed @@ -224,7 +222,7 @@ class Mount(dict): self['Source'] = source if type not in ('bind', 'volume', 'tmpfs', 'npipe'): raise errors.InvalidArgument( - 'Unsupported mount type: "{}"'.format(type) + f'Unsupported mount type: "{type}"' ) self['Type'] = type self['ReadOnly'] = read_only @@ -260,7 +258,7 @@ class Mount(dict): elif type == 'tmpfs': tmpfs_opts = {} if tmpfs_mode: - if not isinstance(tmpfs_mode, six.integer_types): + if not isinstance(tmpfs_mode, int): raise errors.InvalidArgument( 'tmpfs_mode must be an integer' ) @@ -280,7 +278,7 @@ class Mount(dict): parts = string.split(':') if len(parts) > 3: raise errors.InvalidArgument( - 'Invalid mount format "{0}"'.format(string) + f'Invalid mount format "{string}"' ) if len(parts) == 1: return cls(target=parts[0], source=None) @@ -347,7 +345,7 @@ def _convert_generic_resources_dict(generic_resources): ' (found {})'.format(type(generic_resources)) ) resources = [] - for kind, value in six.iteritems(generic_resources): + for kind, value in generic_resources.items(): resource_type = None if isinstance(value, int): resource_type = 'DiscreteResourceSpec' @@ -443,7 +441,7 @@ class RollbackConfig(UpdateConfig): pass -class RestartConditionTypesEnum(object): +class RestartConditionTypesEnum: _values = ( 'none', 'on-failure', @@ -474,7 +472,7 @@ class RestartPolicy(dict): max_attempts=0, window=0): if condition not in self.condition_types._values: raise TypeError( - 'Invalid RestartPolicy condition {0}'.format(condition) + f'Invalid RestartPolicy condition {condition}' ) self['Condition'] = condition @@ -533,7 +531,7 @@ def convert_service_ports(ports): ) result = [] - for k, v in six.iteritems(ports): + for k, v in ports.items(): port_spec = { 'Protocol': 'tcp', 'PublishedPort': k diff --git a/docker/utils/build.py b/docker/utils/build.py index 5787cab0..ac060434 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -4,8 +4,6 @@ import re import tarfile import tempfile -import six - from .fnmatch import fnmatch from ..constants import IS_WINDOWS_PLATFORM @@ -69,7 +67,7 @@ def create_archive(root, files=None, fileobj=None, gzip=False, t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj) if files is None: files = build_file_list(root) - extra_names = set(e[0] for e in extra_files) + extra_names = {e[0] for e in extra_files} for path in files: if path in extra_names: # Extra files override context files with the same name @@ -95,9 +93,9 @@ def create_archive(root, files=None, fileobj=None, gzip=False, try: with open(full_path, 'rb') as f: t.addfile(i, f) - except IOError: - raise IOError( - 'Can not read file in context: {}'.format(full_path) + except OSError: + raise OSError( + f'Can not read file in context: {full_path}' ) else: # Directories, FIFOs, symlinks... don't need to be read. @@ -119,12 +117,8 @@ def mkbuildcontext(dockerfile): t = tarfile.open(mode='w', fileobj=f) if isinstance(dockerfile, io.StringIO): dfinfo = tarfile.TarInfo('Dockerfile') - if six.PY3: - raise TypeError('Please use io.BytesIO to create in-memory ' - 'Dockerfiles with Python 3') - else: - dfinfo.size = len(dockerfile.getvalue()) - dockerfile.seek(0) + raise TypeError('Please use io.BytesIO to create in-memory ' + 'Dockerfiles with Python 3') elif isinstance(dockerfile, io.BytesIO): dfinfo = tarfile.TarInfo('Dockerfile') dfinfo.size = len(dockerfile.getvalue()) @@ -154,7 +148,7 @@ def walk(root, patterns, default=True): # Heavily based on # https://github.com/moby/moby/blob/master/pkg/fileutils/fileutils.go -class PatternMatcher(object): +class PatternMatcher: def __init__(self, patterns): self.patterns = list(filter( lambda p: p.dirs, [Pattern(p) for p in patterns] @@ -212,13 +206,12 @@ class PatternMatcher(object): break if skip: continue - for sub in rec_walk(cur): - yield sub + yield from rec_walk(cur) return rec_walk(root) -class Pattern(object): +class Pattern: def __init__(self, pattern_str): self.exclusion = False if pattern_str.startswith('!'): diff --git a/docker/utils/config.py b/docker/utils/config.py index 82a0e2a5..8e24959a 100644 --- a/docker/utils/config.py +++ b/docker/utils/config.py @@ -18,11 +18,11 @@ def find_config_file(config_path=None): os.path.join(home_dir(), LEGACY_DOCKER_CONFIG_FILENAME), # 4 ])) - log.debug("Trying paths: {0}".format(repr(paths))) + log.debug(f"Trying paths: {repr(paths)}") for path in paths: if os.path.exists(path): - log.debug("Found file at path: {0}".format(path)) + log.debug(f"Found file at path: {path}") return path log.debug("No config file found") @@ -57,7 +57,7 @@ def load_general_config(config_path=None): try: with open(config_file) as f: return json.load(f) - except (IOError, ValueError) as e: + except (OSError, ValueError) as e: # In the case of a legacy `.dockercfg` file, we won't # be able to load any JSON data. log.debug(e) diff --git a/docker/utils/decorators.py b/docker/utils/decorators.py index c975d4b4..cf1baf49 100644 --- a/docker/utils/decorators.py +++ b/docker/utils/decorators.py @@ -27,7 +27,7 @@ def minimum_version(version): def wrapper(self, *args, **kwargs): if utils.version_lt(self._version, version): raise errors.InvalidVersion( - '{0} is not available for version < {1}'.format( + '{} is not available for version < {}'.format( f.__name__, version ) ) diff --git a/docker/utils/fnmatch.py b/docker/utils/fnmatch.py index cc940a2e..90e9f60f 100644 --- a/docker/utils/fnmatch.py +++ b/docker/utils/fnmatch.py @@ -108,7 +108,7 @@ def translate(pat): stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) + res = f'{res}[{stuff}]' else: res = res + re.escape(c) diff --git a/docker/utils/json_stream.py b/docker/utils/json_stream.py index addffdf2..f384175f 100644 --- a/docker/utils/json_stream.py +++ b/docker/utils/json_stream.py @@ -1,11 +1,6 @@ -from __future__ import absolute_import -from __future__ import unicode_literals - import json import json.decoder -import six - from ..errors import StreamParseError @@ -20,7 +15,7 @@ def stream_as_text(stream): instead of byte streams. """ for data in stream: - if not isinstance(data, six.text_type): + if not isinstance(data, str): data = data.decode('utf-8', 'replace') yield data @@ -46,8 +41,8 @@ def json_stream(stream): return split_buffer(stream, json_splitter, json_decoder.decode) -def line_splitter(buffer, separator=u'\n'): - index = buffer.find(six.text_type(separator)) +def line_splitter(buffer, separator='\n'): + index = buffer.find(str(separator)) if index == -1: return None return buffer[:index + 1], buffer[index + 1:] @@ -61,7 +56,7 @@ def split_buffer(stream, splitter=None, decoder=lambda a: a): of the input. """ splitter = splitter or line_splitter - buffered = six.text_type('') + buffered = '' for data in stream_as_text(stream): buffered += data diff --git a/docker/utils/ports.py b/docker/utils/ports.py index 10b19d74..e8139366 100644 --- a/docker/utils/ports.py +++ b/docker/utils/ports.py @@ -49,7 +49,7 @@ def port_range(start, end, proto, randomly_available_port=False): if not end: return [start + proto] if randomly_available_port: - return ['{}-{}'.format(start, end) + proto] + return [f'{start}-{end}' + proto] return [str(port) + proto for port in range(int(start), int(end) + 1)] diff --git a/docker/utils/socket.py b/docker/utils/socket.py index 7ba95055..4a2076ec 100644 --- a/docker/utils/socket.py +++ b/docker/utils/socket.py @@ -4,8 +4,6 @@ import select import socket as pysocket import struct -import six - try: from ..transport import NpipeSocket except ImportError: @@ -27,16 +25,16 @@ def read(socket, n=4096): recoverable_errors = (errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK) - if six.PY3 and not isinstance(socket, NpipeSocket): + if not isinstance(socket, NpipeSocket): select.select([socket], [], []) try: if hasattr(socket, 'recv'): return socket.recv(n) - if six.PY3 and isinstance(socket, getattr(pysocket, 'SocketIO')): + if isinstance(socket, getattr(pysocket, 'SocketIO')): return socket.read(n) return os.read(socket.fileno(), n) - except EnvironmentError as e: + except OSError as e: if e.errno not in recoverable_errors: raise @@ -46,7 +44,7 @@ def read_exactly(socket, n): Reads exactly n bytes from socket Raises SocketError if there isn't enough data """ - data = six.binary_type() + data = bytes() while len(data) < n: next_data = read(socket, n - len(data)) if not next_data: @@ -134,7 +132,7 @@ def consume_socket_output(frames, demux=False): if demux is False: # If the streams are multiplexed, the generator returns strings, that # we just need to concatenate. - return six.binary_type().join(frames) + return bytes().join(frames) # If the streams are demultiplexed, the generator yields tuples # (stdout, stderr) @@ -166,4 +164,4 @@ def demux_adaptor(stream_id, data): elif stream_id == STDERR: return (None, data) else: - raise ValueError('{0} is not a valid stream'.format(stream_id)) + raise ValueError(f'{stream_id} is not a valid stream') diff --git a/docker/utils/utils.py b/docker/utils/utils.py index f703cbd3..f7c3dd7d 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -136,13 +136,13 @@ def convert_volume_binds(binds): mode = 'rw' result.append( - str('{0}:{1}:{2}').format(k, bind, mode) + f'{k}:{bind}:{mode}' ) else: if isinstance(v, bytes): v = v.decode('utf-8') result.append( - str('{0}:{1}:rw').format(k, v) + f'{k}:{v}:rw' ) return result @@ -233,14 +233,14 @@ def parse_host(addr, is_win32=False, tls=False): if proto not in ('tcp', 'unix', 'npipe', 'ssh'): raise errors.DockerException( - "Invalid bind address protocol: {}".format(addr) + f"Invalid bind address protocol: {addr}" ) if proto == 'tcp' and not parsed_url.netloc: # "tcp://" is exceptionally disallowed by convention; # omitting a hostname for other protocols is fine raise errors.DockerException( - 'Invalid bind address format: {}'.format(addr) + f'Invalid bind address format: {addr}' ) if any([ @@ -248,7 +248,7 @@ def parse_host(addr, is_win32=False, tls=False): parsed_url.password ]): raise errors.DockerException( - 'Invalid bind address format: {}'.format(addr) + f'Invalid bind address format: {addr}' ) if parsed_url.path and proto == 'ssh': @@ -285,8 +285,8 @@ def parse_host(addr, is_win32=False, tls=False): proto = 'http+unix' if proto in ('http+unix', 'npipe'): - return "{}://{}".format(proto, path).rstrip('/') - return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/') + return f"{proto}://{path}".rstrip('/') + return f'{proto}://{host}:{port}{path}'.rstrip('/') def parse_devices(devices): @@ -297,7 +297,7 @@ def parse_devices(devices): continue if not isinstance(device, str): raise errors.DockerException( - 'Invalid device type {0}'.format(type(device)) + f'Invalid device type {type(device)}' ) device_mapping = device.split(':') if device_mapping: @@ -408,7 +408,7 @@ def parse_bytes(s): digits = float(digits_part) except ValueError: raise errors.DockerException( - 'Failed converting the string value for memory ({0}) to' + 'Failed converting the string value for memory ({}) to' ' an integer.'.format(digits_part) ) @@ -416,7 +416,7 @@ def parse_bytes(s): s = int(digits * units[suffix]) else: raise errors.DockerException( - 'The specified value for memory ({0}) should specify the' + 'The specified value for memory ({}) should specify the' ' units. The postfix should be one of the `b` `k` `m` `g`' ' characters'.format(s) ) @@ -428,7 +428,7 @@ def normalize_links(links): if isinstance(links, dict): links = iter(links.items()) - return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)] + return [f'{k}:{v}' if v else k for k, v in sorted(links)] def parse_env_file(env_file): @@ -438,7 +438,7 @@ def parse_env_file(env_file): """ environment = {} - with open(env_file, 'r') as f: + with open(env_file) as f: for line in f: if line[0] == '#': @@ -454,7 +454,7 @@ def parse_env_file(env_file): environment[k] = v else: raise errors.DockerException( - 'Invalid line in environment file {0}:\n{1}'.format( + 'Invalid line in environment file {}:\n{}'.format( env_file, line)) return environment @@ -471,7 +471,7 @@ def format_environment(environment): if isinstance(value, bytes): value = value.decode('utf-8') - return u'{key}={value}'.format(key=key, value=value) + return f'{key}={value}' return [format_env(*var) for var in iter(environment.items())] @@ -479,11 +479,11 @@ def format_extra_hosts(extra_hosts, task=False): # Use format dictated by Swarm API if container is part of a task if task: return [ - '{} {}'.format(v, k) for k, v in sorted(iter(extra_hosts.items())) + f'{v} {k}' for k, v in sorted(iter(extra_hosts.items())) ] return [ - '{}:{}'.format(k, v) for k, v in sorted(iter(extra_hosts.items())) + f'{k}:{v}' for k, v in sorted(iter(extra_hosts.items())) ] diff --git a/docker/version.py b/docker/version.py index bc09e637..35541041 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ version = "4.5.0-dev" -version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) +version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/conf.py b/docs/conf.py index f46d1f76..2b0a7195 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # docker-sdk-python documentation build configuration file, created by # sphinx-quickstart on Wed Sep 14 15:48:58 2016. @@ -60,21 +59,21 @@ source_suffix = ['.rst', '.md'] master_doc = 'index' # General information about the project. -project = u'Docker SDK for Python' +project = 'Docker SDK for Python' year = datetime.datetime.now().year -copyright = u'%d Docker Inc' % year -author = u'Docker Inc' +copyright = '%d Docker Inc' % year +author = 'Docker Inc' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # -with open('../docker/version.py', 'r') as vfile: +with open('../docker/version.py') as vfile: exec(vfile.read()) # The full version, including alpha/beta/rc tags. release = version # The short X.Y version. -version = '{}.{}'.format(version_info[0], version_info[1]) +version = f'{version_info[0]}.{version_info[1]}' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -283,8 +282,8 @@ latex_elements = { # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'docker-sdk-python.tex', u'docker-sdk-python Documentation', - u'Docker Inc.', 'manual'), + (master_doc, 'docker-sdk-python.tex', 'docker-sdk-python Documentation', + 'Docker Inc.', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -325,7 +324,7 @@ latex_documents = [ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', + (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation', [author], 1) ] @@ -340,7 +339,7 @@ man_pages = [ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'docker-sdk-python', u'docker-sdk-python Documentation', + (master_doc, 'docker-sdk-python', 'docker-sdk-python Documentation', author, 'docker-sdk-python', 'One line description of project.', 'Miscellaneous'), ] diff --git a/scripts/versions.py b/scripts/versions.py index 4bdcb74d..75e5355e 100755 --- a/scripts/versions.py +++ b/scripts/versions.py @@ -52,8 +52,8 @@ class Version(namedtuple('_Version', 'major minor patch stage edition')): return (int(self.major), int(self.minor), int(self.patch)) + stage def __str__(self): - stage = '-{}'.format(self.stage) if self.stage else '' - edition = '-{}'.format(self.edition) if self.edition else '' + stage = f'-{self.stage}' if self.stage else '' + edition = f'-{self.edition}' if self.edition else '' return '.'.join(map(str, self[:3])) + edition + stage diff --git a/setup.py b/setup.py index ec1a51de..a966fea2 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -from __future__ import print_function import codecs import os diff --git a/tests/helpers.py b/tests/helpers.py index f344e1c3..63cbe2e6 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -11,7 +11,6 @@ import time import docker import paramiko import pytest -import six def make_tree(dirs, files): @@ -54,7 +53,7 @@ def requires_api_version(version): return pytest.mark.skipif( docker.utils.version_lt(test_version, version), - reason="API version is too low (< {0})".format(version) + reason=f"API version is too low (< {version})" ) @@ -86,7 +85,7 @@ def wait_on_condition(condition, delay=0.1, timeout=40): def random_name(): - return u'dockerpytest_{0:x}'.format(random.getrandbits(64)) + return f'dockerpytest_{random.getrandbits(64):x}' def force_leave_swarm(client): @@ -105,11 +104,11 @@ def force_leave_swarm(client): def swarm_listen_addr(): - return '0.0.0.0:{0}'.format(random.randrange(10000, 25000)) + return f'0.0.0.0:{random.randrange(10000, 25000)}' def assert_cat_socket_detached_with_keys(sock, inputs): - if six.PY3 and hasattr(sock, '_sock'): + if hasattr(sock, '_sock'): sock = sock._sock for i in inputs: @@ -128,7 +127,7 @@ def assert_cat_socket_detached_with_keys(sock, inputs): # of the daemon no longer cause this to raise an error. try: sock.sendall(b'make sure the socket is closed\n') - except socket.error: + except OSError: return sock.sendall(b"make sure the socket is closed\n") diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index b830a106..ef48e12e 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -7,7 +7,6 @@ from docker import errors from docker.utils.proxy import ProxyConfig import pytest -import six from .base import BaseAPIIntegrationTest, TEST_IMG from ..helpers import random_name, requires_api_version, requires_experimental @@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest): assert len(logs) > 0 def test_build_from_stringio(self): - if six.PY3: - return - script = io.StringIO(six.text_type('\n').join([ + return + script = io.StringIO('\n'.join([ 'FROM busybox', 'RUN mkdir -p /tmp/test', 'EXPOSE 8080', @@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest): stream = self.client.build(fileobj=script) logs = '' for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') + chunk = chunk.decode('utf-8') logs += chunk assert logs != '' @@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest): self.client.wait(c) logs = self.client.logs(c) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert sorted(list(filter(None, logs.split('\n')))) == sorted([ '/test/#file.txt', @@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest): assert self.client.inspect_image(img_name) ctnr = self.run_container(img_name, 'cat /hosts-file') logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\thello.world.test' in logs @@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest): snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' script = io.BytesIO(b'\n'.join([ b'FROM busybox', - 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8') ])) stream = self.client.build( @@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest): @requires_api_version('1.32') @requires_experimental(until=None) def test_build_invalid_platform(self): - script = io.BytesIO('FROM busybox\n'.encode('ascii')) + script = io.BytesIO(b'FROM busybox\n') with pytest.raises(errors.APIError) as excinfo: stream = self.client.build(fileobj=script, platform='foobar') diff --git a/tests/integration/api_client_test.py b/tests/integration/api_client_test.py index 9e348f3e..d1622fa8 100644 --- a/tests/integration/api_client_test.py +++ b/tests/integration/api_client_test.py @@ -72,6 +72,6 @@ class UnixconnTest(unittest.TestCase): client.close() del client - assert len(w) == 0, "No warnings produced: {0}".format( + assert len(w) == 0, "No warnings produced: {}".format( w[0].message ) diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py index 0ffd7675..72cbb431 100644 --- a/tests/integration/api_config_test.py +++ b/tests/integration/api_config_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import docker import pytest @@ -31,7 +29,7 @@ class ConfigAPITest(BaseAPIIntegrationTest): def test_create_config_unicode_data(self): config_id = self.client.create_config( - 'favorite_character', u'いざよいさくや' + 'favorite_character', 'いざよいさくや' ) self.tmp_configs.append(config_id) assert 'ID' in config_id diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 3087045b..9da2cfbf 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -34,7 +34,7 @@ class ListContainersTest(BaseAPIIntegrationTest): assert len(retrieved) == 1 retrieved = retrieved[0] assert 'Command' in retrieved - assert retrieved['Command'] == str('true') + assert retrieved['Command'] == 'true' assert 'Image' in retrieved assert re.search(r'alpine:.*', retrieved['Image']) assert 'Status' in retrieved @@ -104,10 +104,10 @@ class CreateContainerTest(BaseAPIIntegrationTest): assert self.client.wait(container3_id)['StatusCode'] == 0 logs = self.client.logs(container3_id).decode('utf-8') - assert '{0}_NAME='.format(link_env_prefix1) in logs - assert '{0}_ENV_FOO=1'.format(link_env_prefix1) in logs - assert '{0}_NAME='.format(link_env_prefix2) in logs - assert '{0}_ENV_FOO=1'.format(link_env_prefix2) in logs + assert f'{link_env_prefix1}_NAME=' in logs + assert f'{link_env_prefix1}_ENV_FOO=1' in logs + assert f'{link_env_prefix2}_NAME=' in logs + assert f'{link_env_prefix2}_ENV_FOO=1' in logs def test_create_with_restart_policy(self): container = self.client.create_container( @@ -487,7 +487,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) class VolumeBindTest(BaseAPIIntegrationTest): def setUp(self): - super(VolumeBindTest, self).setUp() + super().setUp() self.mount_dest = '/mnt' @@ -618,7 +618,7 @@ class ArchiveTest(BaseAPIIntegrationTest): def test_get_file_archive_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - TEST_IMG, 'sh -c "echo {0} > /vol1/data.txt"'.format(data), + TEST_IMG, f'sh -c "echo {data} > /vol1/data.txt"', volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -636,7 +636,7 @@ class ArchiveTest(BaseAPIIntegrationTest): def test_get_file_stat_from_container(self): data = 'The Maid and the Pocket Watch of Blood' ctnr = self.client.create_container( - TEST_IMG, 'sh -c "echo -n {0} > /vol1/data.txt"'.format(data), + TEST_IMG, f'sh -c "echo -n {data} > /vol1/data.txt"', volumes=['/vol1'] ) self.tmp_containers.append(ctnr) @@ -655,7 +655,7 @@ class ArchiveTest(BaseAPIIntegrationTest): test_file.seek(0) ctnr = self.client.create_container( TEST_IMG, - 'cat {0}'.format( + 'cat {}'.format( os.path.join('/vol1/', os.path.basename(test_file.name)) ), volumes=['/vol1'] @@ -701,7 +701,7 @@ class RenameContainerTest(BaseAPIIntegrationTest): if version == '1.5.0': assert name == inspect['Name'] else: - assert '/{0}'.format(name) == inspect['Name'] + assert f'/{name}' == inspect['Name'] class StartContainerTest(BaseAPIIntegrationTest): @@ -807,7 +807,7 @@ class LogsTest(BaseAPIIntegrationTest): def test_logs(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -821,7 +821,7 @@ class LogsTest(BaseAPIIntegrationTest): snippet = '''Line1 Line2''' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) id = container['Id'] self.tmp_containers.append(id) @@ -834,7 +834,7 @@ Line2''' def test_logs_streaming_and_follow(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -854,7 +854,7 @@ Line2''' def test_logs_streaming_and_follow_and_cancel(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'sh -c "echo \\"{0}\\" && sleep 3"'.format(snippet) + TEST_IMG, f'sh -c "echo \\"{snippet}\\" && sleep 3"' ) id = container['Id'] self.tmp_containers.append(id) @@ -872,7 +872,7 @@ Line2''' def test_logs_with_dict_instead_of_id(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo {0}'.format(snippet) + TEST_IMG, f'echo {snippet}' ) id = container['Id'] self.tmp_containers.append(id) @@ -885,7 +885,7 @@ Line2''' def test_logs_with_tail_0(self): snippet = 'Flowering Nights (Sakuya Iyazoi)' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) id = container['Id'] self.tmp_containers.append(id) @@ -899,7 +899,7 @@ Line2''' def test_logs_with_until(self): snippet = 'Shanghai Teahouse (Hong Meiling)' container = self.client.create_container( - TEST_IMG, 'echo "{0}"'.format(snippet) + TEST_IMG, f'echo "{snippet}"' ) self.tmp_containers.append(container) @@ -1095,7 +1095,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): self.client.start(container) res = self.client.top(container) if not IS_WINDOWS_PLATFORM: - assert res['Titles'] == [u'PID', u'USER', u'TIME', u'COMMAND'] + assert res['Titles'] == ['PID', 'USER', 'TIME', 'COMMAND'] assert len(res['Processes']) == 1 assert res['Processes'][0][-1] == 'sleep 60' self.client.kill(container) @@ -1113,7 +1113,7 @@ class ContainerTopTest(BaseAPIIntegrationTest): self.client.start(container) res = self.client.top(container, '-eopid,user') - assert res['Titles'] == [u'PID', u'USER'] + assert res['Titles'] == ['PID', 'USER'] assert len(res['Processes']) == 1 assert res['Processes'][0][10] == 'sleep 60' @@ -1203,7 +1203,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): def test_run_container_reading_socket(self): line = 'hi there and stuff and things, words!' # `echo` appends CRLF, `printf` doesn't - command = "printf '{0}'".format(line) + command = f"printf '{line}'" container = self.client.create_container(TEST_IMG, command, detach=True, tty=False) self.tmp_containers.append(container) @@ -1487,7 +1487,7 @@ class LinkTest(BaseAPIIntegrationTest): # Remove link linked_name = self.client.inspect_container(container2_id)['Name'][1:] - link_name = '%s/%s' % (linked_name, link_alias) + link_name = f'{linked_name}/{link_alias}' self.client.remove_container(link_name, link=True) # Link is gone diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index 554e8629..4d7748f5 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -239,7 +239,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest): ) def setUp(self): - super(ExecDemuxTest, self).setUp() + super().setUp() self.container = self.client.create_container( TEST_IMG, 'cat', detach=True, stdin_open=True ) diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index d5f89893..e30de46c 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -265,7 +265,7 @@ class ImportImageTest(BaseAPIIntegrationTest): output = self.client.load_image(data) assert any([ line for line in output - if 'Loaded image: {}'.format(test_img) in line.get('stream', '') + if f'Loaded image: {test_img}' in line.get('stream', '') ]) @contextlib.contextmanager @@ -284,7 +284,7 @@ class ImportImageTest(BaseAPIIntegrationTest): thread.setDaemon(True) thread.start() - yield 'http://%s:%s' % (socket.gethostname(), server.server_address[1]) + yield f'http://{socket.gethostname()}:{server.server_address[1]}' server.shutdown() @@ -350,7 +350,7 @@ class SaveLoadImagesTest(BaseAPIIntegrationTest): result = self.client.load_image(f.read()) success = False - result_line = 'Loaded image: {}\n'.format(TEST_IMG) + result_line = f'Loaded image: {TEST_IMG}\n' for data in result: print(data) if 'stream' in data: diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py index af22da8d..25681384 100644 --- a/tests/integration/api_network_test.py +++ b/tests/integration/api_network_test.py @@ -9,7 +9,7 @@ from .base import BaseAPIIntegrationTest, TEST_IMG class TestNetworks(BaseAPIIntegrationTest): def tearDown(self): self.client.leave_swarm(force=True) - super(TestNetworks, self).tearDown() + super().tearDown() def create_network(self, *args, **kwargs): net_name = random_name() diff --git a/tests/integration/api_secret_test.py b/tests/integration/api_secret_test.py index b3d93b8f..fd985434 100644 --- a/tests/integration/api_secret_test.py +++ b/tests/integration/api_secret_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import docker import pytest @@ -31,7 +29,7 @@ class SecretAPITest(BaseAPIIntegrationTest): def test_create_secret_unicode_data(self): secret_id = self.client.create_secret( - 'favorite_character', u'いざよいさくや' + 'favorite_character', 'いざよいさくや' ) self.tmp_secrets.append(secret_id) assert 'ID' in secret_id diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 1bee46e5..19a6f154 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import random import time @@ -30,10 +28,10 @@ class ServiceTest(BaseAPIIntegrationTest): self.client.remove_service(service['ID']) except docker.errors.APIError: pass - super(ServiceTest, self).tearDown() + super().tearDown() def get_service_name(self): - return 'dockerpytest_{0:x}'.format(random.getrandbits(64)) + return f'dockerpytest_{random.getrandbits(64):x}' def get_service_container(self, service_name, attempts=20, interval=0.5, include_stopped=False): @@ -54,7 +52,7 @@ class ServiceTest(BaseAPIIntegrationTest): def create_simple_service(self, name=None, labels=None): if name: - name = 'dockerpytest_{0}'.format(name) + name = f'dockerpytest_{name}' else: name = self.get_service_name() @@ -403,20 +401,20 @@ class ServiceTest(BaseAPIIntegrationTest): node_id = self.client.nodes()[0]['ID'] container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) task_tmpl = docker.types.TaskTemplate( - container_spec, placement=['node.id=={}'.format(node_id)] + container_spec, placement=[f'node.id=={node_id}'] ) name = self.get_service_name() svc_id = self.client.create_service(task_tmpl, name=name) svc_info = self.client.inspect_service(svc_id) assert 'Placement' in svc_info['Spec']['TaskTemplate'] assert (svc_info['Spec']['TaskTemplate']['Placement'] == - {'Constraints': ['node.id=={}'.format(node_id)]}) + {'Constraints': [f'node.id=={node_id}']}) def test_create_service_with_placement_object(self): node_id = self.client.nodes()[0]['ID'] container_spec = docker.types.ContainerSpec(TEST_IMG, ['true']) placemt = docker.types.Placement( - constraints=['node.id=={}'.format(node_id)] + constraints=[f'node.id=={node_id}'] ) task_tmpl = docker.types.TaskTemplate( container_spec, placement=placemt @@ -508,7 +506,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 @@ -670,14 +668,14 @@ class ServiceTest(BaseAPIIntegrationTest): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /run/secrets/{0}'.format(secret_name) + container, f'cat /run/secrets/{secret_name}' ) assert self.client.exec_start(exec_id) == secret_data @requires_api_version('1.25') def test_create_service_with_unicode_secret(self): secret_name = 'favorite_touhou' - secret_data = u'東方花映塚' + secret_data = '東方花映塚' secret_id = self.client.create_secret(secret_name, secret_data) self.tmp_secrets.append(secret_id) secret_ref = docker.types.SecretReference(secret_id, secret_name) @@ -695,7 +693,7 @@ class ServiceTest(BaseAPIIntegrationTest): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /run/secrets/{0}'.format(secret_name) + container, f'cat /run/secrets/{secret_name}' ) container_secret = self.client.exec_start(exec_id) container_secret = container_secret.decode('utf-8') @@ -722,14 +720,14 @@ class ServiceTest(BaseAPIIntegrationTest): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /{0}'.format(config_name) + container, f'cat /{config_name}' ) assert self.client.exec_start(exec_id) == config_data @requires_api_version('1.30') def test_create_service_with_unicode_config(self): config_name = 'favorite_touhou' - config_data = u'東方花映塚' + config_data = '東方花映塚' config_id = self.client.create_config(config_name, config_data) self.tmp_configs.append(config_id) config_ref = docker.types.ConfigReference(config_id, config_name) @@ -747,7 +745,7 @@ class ServiceTest(BaseAPIIntegrationTest): container = self.get_service_container(name) assert container is not None exec_id = self.client.exec_create( - container, 'cat /{0}'.format(config_name) + container, f'cat /{config_name}' ) container_config = self.client.exec_start(exec_id) container_config = container_config.decode('utf-8') @@ -1136,7 +1134,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 @@ -1163,7 +1161,7 @@ class ServiceTest(BaseAPIIntegrationTest): assert port['TargetPort'] == 1990 assert port['Protocol'] == 'udp' else: - self.fail('Invalid port specification: {0}'.format(port)) + self.fail(f'Invalid port specification: {port}') assert len(ports) == 3 diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index f1cbc264..48c0592c 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -8,7 +8,7 @@ from .base import BaseAPIIntegrationTest class SwarmTest(BaseAPIIntegrationTest): def setUp(self): - super(SwarmTest, self).setUp() + super().setUp() force_leave_swarm(self.client) self._unlock_key = None @@ -19,7 +19,7 @@ class SwarmTest(BaseAPIIntegrationTest): except docker.errors.APIError: pass force_leave_swarm(self.client) - super(SwarmTest, self).tearDown() + super().tearDown() @requires_api_version('1.24') def test_init_swarm_simple(self): diff --git a/tests/integration/base.py b/tests/integration/base.py index a7613f69..031079c9 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -75,11 +75,11 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): """ def setUp(self): - super(BaseAPIIntegrationTest, self).setUp() + super().setUp() self.client = self.get_client_instance() def tearDown(self): - super(BaseAPIIntegrationTest, self).tearDown() + super().tearDown() self.client.close() @staticmethod diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index ec48835d..ae945955 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import sys import warnings @@ -17,11 +15,11 @@ def setup_test_session(): try: c.inspect_image(TEST_IMG) except docker.errors.NotFound: - print("\npulling {0}".format(TEST_IMG), file=sys.stderr) + print(f"\npulling {TEST_IMG}", file=sys.stderr) for data in c.pull(TEST_IMG, stream=True, decode=True): status = data.get("status") progress = data.get("progress") - detail = "{0} - {1}".format(status, progress) + detail = f"{status} - {progress}" print(detail, file=sys.stderr) # Double make sure we now have busybox diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py index dd543e24..d0cfd541 100644 --- a/tests/integration/credentials/store_test.py +++ b/tests/integration/credentials/store_test.py @@ -3,7 +3,6 @@ import random import sys import pytest -import six from distutils.spawn import find_executable from docker.credentials import ( @@ -12,7 +11,7 @@ from docker.credentials import ( ) -class TestStore(object): +class TestStore: def teardown_method(self): for server in self.tmp_keys: try: @@ -33,7 +32,7 @@ class TestStore(object): self.store = Store(DEFAULT_OSX_STORE) def get_random_servername(self): - res = 'pycreds_test_{:x}'.format(random.getrandbits(32)) + res = f'pycreds_test_{random.getrandbits(32):x}' self.tmp_keys.append(res) return res @@ -61,7 +60,7 @@ class TestStore(object): def test_unicode_strings(self): key = self.get_random_servername() - key = six.u(key) + key = key self.store.store(server=key, username='user', secret='pass') data = self.store.get(key) assert data diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py index ad55f321..d7b2a1a4 100644 --- a/tests/integration/credentials/utils_test.py +++ b/tests/integration/credentials/utils_test.py @@ -5,7 +5,7 @@ from docker.credentials.utils import create_environment_dict try: from unittest import mock except ImportError: - import mock + from unittest import mock @mock.patch.dict(os.environ) diff --git a/tests/integration/models_images_test.py b/tests/integration/models_images_test.py index 0d60f37b..94aa2010 100644 --- a/tests/integration/models_images_test.py +++ b/tests/integration/models_images_test.py @@ -13,8 +13,8 @@ class ImageCollectionTest(BaseIntegrationTest): def test_build(self): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build(fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo hello world".encode('ascii') + b"FROM alpine\n" + b"CMD echo hello world" )) self.tmp_imgs.append(image.id) assert client.containers.run(image) == b"hello world\n" @@ -24,8 +24,8 @@ class ImageCollectionTest(BaseIntegrationTest): client = docker.from_env(version=TEST_API_VERSION) with pytest.raises(docker.errors.BuildError) as cm: client.images.build(fileobj=io.BytesIO( - "FROM alpine\n" - "RUN exit 1".encode('ascii') + b"FROM alpine\n" + b"RUN exit 1" )) assert ( "The command '/bin/sh -c exit 1' returned a non-zero code: 1" @@ -36,8 +36,8 @@ class ImageCollectionTest(BaseIntegrationTest): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build( tag='some-tag', fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo hello world".encode('ascii') + b"FROM alpine\n" + b"CMD echo hello world" ) ) self.tmp_imgs.append(image.id) @@ -47,8 +47,8 @@ class ImageCollectionTest(BaseIntegrationTest): client = docker.from_env(version=TEST_API_VERSION) image, _ = client.images.build( tag='dup-txt-tag', fileobj=io.BytesIO( - "FROM alpine\n" - "CMD echo Successfully built abcd1234".encode('ascii') + b"FROM alpine\n" + b"CMD echo Successfully built abcd1234" ) ) self.tmp_imgs.append(image.id) @@ -119,7 +119,7 @@ class ImageCollectionTest(BaseIntegrationTest): self.tmp_imgs.append(additional_tag) image.reload() with tempfile.TemporaryFile() as f: - stream = image.save(named='{}:latest'.format(additional_tag)) + stream = image.save(named=f'{additional_tag}:latest') for chunk in stream: f.write(chunk) @@ -129,7 +129,7 @@ class ImageCollectionTest(BaseIntegrationTest): assert len(result) == 1 assert result[0].id == image.id - assert '{}:latest'.format(additional_tag) in result[0].tags + assert f'{additional_tag}:latest' in result[0].tags def test_save_name_error(self): client = docker.from_env(version=TEST_API_VERSION) @@ -143,7 +143,7 @@ class ImageTest(BaseIntegrationTest): def test_tag_and_remove(self): repo = 'dockersdk.tests.images.test_tag' tag = 'some-tag' - identifier = '{}:{}'.format(repo, tag) + identifier = f'{repo}:{tag}' client = docker.from_env(version=TEST_API_VERSION) image = client.images.pull('alpine:latest') diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py index a63883c4..deb9aff1 100644 --- a/tests/integration/regression_test.py +++ b/tests/integration/regression_test.py @@ -2,7 +2,6 @@ import io import random import docker -import six from .base import BaseAPIIntegrationTest, TEST_IMG import pytest @@ -39,8 +38,7 @@ class TestRegressions(BaseAPIIntegrationTest): self.client.start(ctnr) self.client.wait(ctnr) logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert logs == '1000\n' def test_792_explicit_port_protocol(self): @@ -56,10 +54,10 @@ class TestRegressions(BaseAPIIntegrationTest): self.client.start(ctnr) assert self.client.port( ctnr, 2000 - )[0]['HostPort'] == six.text_type(tcp_port) + )[0]['HostPort'] == str(tcp_port) assert self.client.port( ctnr, '2000/tcp' - )[0]['HostPort'] == six.text_type(tcp_port) + )[0]['HostPort'] == str(tcp_port) assert self.client.port( ctnr, '2000/udp' - )[0]['HostPort'] == six.text_type(udp_port) + )[0]['HostPort'] == str(udp_port) diff --git a/tests/ssh/api_build_test.py b/tests/ssh/api_build_test.py index b830a106..ef48e12e 100644 --- a/tests/ssh/api_build_test.py +++ b/tests/ssh/api_build_test.py @@ -7,7 +7,6 @@ from docker import errors from docker.utils.proxy import ProxyConfig import pytest -import six from .base import BaseAPIIntegrationTest, TEST_IMG from ..helpers import random_name, requires_api_version, requires_experimental @@ -71,9 +70,8 @@ class BuildTest(BaseAPIIntegrationTest): assert len(logs) > 0 def test_build_from_stringio(self): - if six.PY3: - return - script = io.StringIO(six.text_type('\n').join([ + return + script = io.StringIO('\n'.join([ 'FROM busybox', 'RUN mkdir -p /tmp/test', 'EXPOSE 8080', @@ -83,8 +81,7 @@ class BuildTest(BaseAPIIntegrationTest): stream = self.client.build(fileobj=script) logs = '' for chunk in stream: - if six.PY3: - chunk = chunk.decode('utf-8') + chunk = chunk.decode('utf-8') logs += chunk assert logs != '' @@ -135,8 +132,7 @@ class BuildTest(BaseAPIIntegrationTest): self.client.wait(c) logs = self.client.logs(c) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert sorted(list(filter(None, logs.split('\n')))) == sorted([ '/test/#file.txt', @@ -340,8 +336,7 @@ class BuildTest(BaseAPIIntegrationTest): assert self.client.inspect_image(img_name) ctnr = self.run_container(img_name, 'cat /hosts-file') logs = self.client.logs(ctnr) - if six.PY3: - logs = logs.decode('utf-8') + logs = logs.decode('utf-8') assert '127.0.0.1\textrahost.local.test' in logs assert '127.0.0.1\thello.world.test' in logs @@ -376,7 +371,7 @@ class BuildTest(BaseAPIIntegrationTest): snippet = 'Ancient Temple (Mystic Oriental Dream ~ Ancient Temple)' script = io.BytesIO(b'\n'.join([ b'FROM busybox', - 'RUN sh -c ">&2 echo \'{0}\'"'.format(snippet).encode('utf-8') + f'RUN sh -c ">&2 echo \'{snippet}\'"'.encode('utf-8') ])) stream = self.client.build( @@ -440,7 +435,7 @@ class BuildTest(BaseAPIIntegrationTest): @requires_api_version('1.32') @requires_experimental(until=None) def test_build_invalid_platform(self): - script = io.BytesIO('FROM busybox\n'.encode('ascii')) + script = io.BytesIO(b'FROM busybox\n') with pytest.raises(errors.APIError) as excinfo: stream = self.client.build(fileobj=script, platform='foobar') diff --git a/tests/ssh/base.py b/tests/ssh/base.py index c723d823..4825227f 100644 --- a/tests/ssh/base.py +++ b/tests/ssh/base.py @@ -79,7 +79,7 @@ class BaseAPIIntegrationTest(BaseIntegrationTest): cls.client.pull(TEST_IMG) def tearDown(self): - super(BaseAPIIntegrationTest, self).tearDown() + super().tearDown() self.client.close() @staticmethod diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index 8a0577e7..1ebd37df 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import datetime import json import signal @@ -7,7 +5,6 @@ import signal import docker from docker.api import APIClient import pytest -import six from . import fake_api from ..helpers import requires_api_version @@ -19,7 +16,7 @@ from .api_test import ( try: from unittest import mock except ImportError: - import mock + from unittest import mock def fake_inspect_container_tty(self, container): @@ -771,7 +768,7 @@ class CreateContainerTest(BaseAPIClientTest): def test_create_container_with_device_requests(self): client = APIClient(version='1.40') fake_api.fake_responses.setdefault( - '{0}/v1.40/containers/create'.format(fake_api.prefix), + f'{fake_api.prefix}/v1.40/containers/create', fake_api.post_fake_create_container, ) client.create_container( @@ -831,8 +828,8 @@ class CreateContainerTest(BaseAPIClientTest): def test_create_container_with_labels_dict(self): labels_dict = { - six.text_type('foo'): six.text_type('1'), - six.text_type('bar'): six.text_type('2'), + 'foo': '1', + 'bar': '2', } self.client.create_container( @@ -848,12 +845,12 @@ class CreateContainerTest(BaseAPIClientTest): def test_create_container_with_labels_list(self): labels_list = [ - six.text_type('foo'), - six.text_type('bar'), + 'foo', + 'bar', ] labels_dict = { - six.text_type('foo'): six.text_type(), - six.text_type('bar'): six.text_type(), + 'foo': '', + 'bar': '', } self.client.create_container( @@ -1013,11 +1010,11 @@ class CreateContainerTest(BaseAPIClientTest): def test_create_container_with_unicode_envvars(self): envvars_dict = { - 'foo': u'☃', + 'foo': '☃', } expected = [ - u'foo=☃' + 'foo=☃' ] self.client.create_container( @@ -1138,7 +1135,7 @@ class ContainerTest(BaseAPIClientTest): stream=False ) - assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n' def test_logs_with_dict_instead_of_id(self): with mock.patch('docker.api.client.APIClient.inspect_container', @@ -1154,7 +1151,7 @@ class ContainerTest(BaseAPIClientTest): stream=False ) - assert logs == 'Flowering Nights\n(Sakuya Iyazoi)\n'.encode('ascii') + assert logs == b'Flowering Nights\n(Sakuya Iyazoi)\n' def test_log_streaming(self): with mock.patch('docker.api.client.APIClient.inspect_container', diff --git a/tests/unit/api_exec_test.py b/tests/unit/api_exec_test.py index a9d2dd5b..45042508 100644 --- a/tests/unit/api_exec_test.py +++ b/tests/unit/api_exec_test.py @@ -11,7 +11,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_create(fake_api.FAKE_CONTAINER_ID, ['ls', '-1']) args = fake_request.call_args - assert 'POST' == args[0][0], url_prefix + 'containers/{0}/exec'.format( + assert 'POST' == args[0][0], url_prefix + 'containers/{}/exec'.format( fake_api.FAKE_CONTAINER_ID ) @@ -32,7 +32,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_start(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/start'.format( + assert args[0][1] == url_prefix + 'exec/{}/start'.format( fake_api.FAKE_EXEC_ID ) @@ -51,7 +51,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_start(fake_api.FAKE_EXEC_ID, detach=True) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/start'.format( + assert args[0][1] == url_prefix + 'exec/{}/start'.format( fake_api.FAKE_EXEC_ID ) @@ -68,7 +68,7 @@ class ExecTest(BaseAPIClientTest): self.client.exec_inspect(fake_api.FAKE_EXEC_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'exec/{0}/json'.format( + assert args[0][1] == url_prefix + 'exec/{}/json'.format( fake_api.FAKE_EXEC_ID ) @@ -77,7 +77,7 @@ class ExecTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'exec/{0}/resize'.format(fake_api.FAKE_EXEC_ID), + url_prefix + f'exec/{fake_api.FAKE_EXEC_ID}/resize', params={'h': 20, 'w': 60}, timeout=DEFAULT_TIMEOUT_SECONDS ) diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 0b60df43..843c11b8 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -11,7 +11,7 @@ from .api_test import ( try: from unittest import mock except ImportError: - import mock + from unittest import mock class ImageTest(BaseAPIClientTest): diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index 758f0132..84d65449 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -1,14 +1,12 @@ import json -import six - from .api_test import BaseAPIClientTest, url_prefix, response from docker.types import IPAMConfig, IPAMPool try: from unittest import mock except ImportError: - import mock + from unittest import mock class NetworkTest(BaseAPIClientTest): @@ -103,16 +101,16 @@ class NetworkTest(BaseAPIClientTest): self.client.remove_network(network_id) args = delete.call_args - assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) + assert args[0][0] == url_prefix + f'networks/{network_id}' def test_inspect_network(self): network_id = 'abc12345' network_name = 'foo' network_data = { - six.u('name'): network_name, - six.u('id'): network_id, - six.u('driver'): 'bridge', - six.u('containers'): {}, + 'name': network_name, + 'id': network_id, + 'driver': 'bridge', + 'containers': {}, } network_response = response(status_code=200, content=network_data) @@ -123,7 +121,7 @@ class NetworkTest(BaseAPIClientTest): assert result == network_data args = get.call_args - assert args[0][0] == url_prefix + 'networks/{0}'.format(network_id) + assert args[0][0] == url_prefix + f'networks/{network_id}' def test_connect_container_to_network(self): network_id = 'abc12345' @@ -141,7 +139,7 @@ class NetworkTest(BaseAPIClientTest): ) assert post.call_args[0][0] == ( - url_prefix + 'networks/{0}/connect'.format(network_id) + url_prefix + f'networks/{network_id}/connect' ) assert json.loads(post.call_args[1]['data']) == { @@ -164,7 +162,7 @@ class NetworkTest(BaseAPIClientTest): container={'Id': container_id}, net_id=network_id) assert post.call_args[0][0] == ( - url_prefix + 'networks/{0}/disconnect'.format(network_id) + url_prefix + f'networks/{network_id}/disconnect' ) assert json.loads(post.call_args[1]['data']) == { 'Container': container_id diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index cb14b74e..dfc38164 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -10,11 +10,12 @@ import tempfile import threading import time import unittest +import socketserver +import http.server import docker import pytest import requests -import six from docker.api import APIClient from docker.constants import DEFAULT_DOCKER_API_VERSION from requests.packages import urllib3 @@ -24,7 +25,7 @@ from . import fake_api try: from unittest import mock except ImportError: - import mock + from unittest import mock DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS @@ -34,7 +35,7 @@ def response(status_code=200, content='', headers=None, reason=None, elapsed=0, request=None, raw=None): res = requests.Response() res.status_code = status_code - if not isinstance(content, six.binary_type): + if not isinstance(content, bytes): content = json.dumps(content).encode('ascii') res._content = content res.headers = requests.structures.CaseInsensitiveDict(headers or {}) @@ -60,7 +61,7 @@ def fake_resp(method, url, *args, **kwargs): elif (url, method) in fake_api.fake_responses: key = (url, method) if not key: - raise Exception('{0} {1}'.format(method, url)) + raise Exception(f'{method} {url}') status_code, content = fake_api.fake_responses[key]() return response(status_code=status_code, content=content) @@ -85,11 +86,11 @@ def fake_delete(self, url, *args, **kwargs): def fake_read_from_socket(self, response, stream, tty=False, demux=False): - return six.binary_type() + return bytes() -url_base = '{0}/'.format(fake_api.prefix) -url_prefix = '{0}v{1}/'.format( +url_base = f'{fake_api.prefix}/' +url_prefix = '{}v{}/'.format( url_base, docker.constants.DEFAULT_DOCKER_API_VERSION) @@ -133,20 +134,20 @@ class DockerApiTest(BaseAPIClientTest): def test_url_valid_resource(self): url = self.client._url('/hello/{0}/world', 'somename') - assert url == '{0}{1}'.format(url_prefix, 'hello/somename/world') + assert url == '{}{}'.format(url_prefix, 'hello/somename/world') url = self.client._url( '/hello/{0}/world/{1}', 'somename', 'someothername' ) - assert url == '{0}{1}'.format( + assert url == '{}{}'.format( url_prefix, 'hello/somename/world/someothername' ) url = self.client._url('/hello/{0}/world', 'some?name') - assert url == '{0}{1}'.format(url_prefix, 'hello/some%3Fname/world') + assert url == '{}{}'.format(url_prefix, 'hello/some%3Fname/world') url = self.client._url("/images/{0}/push", "localhost:5000/image") - assert url == '{0}{1}'.format( + assert url == '{}{}'.format( url_prefix, 'images/localhost:5000/image/push' ) @@ -156,13 +157,13 @@ class DockerApiTest(BaseAPIClientTest): def test_url_no_resource(self): url = self.client._url('/simple') - assert url == '{0}{1}'.format(url_prefix, 'simple') + assert url == '{}{}'.format(url_prefix, 'simple') def test_url_unversioned_api(self): url = self.client._url( '/hello/{0}/world', 'somename', versioned_api=False ) - assert url == '{0}{1}'.format(url_base, 'hello/somename/world') + assert url == '{}{}'.format(url_base, 'hello/somename/world') def test_version(self): self.client.version() @@ -184,13 +185,13 @@ class DockerApiTest(BaseAPIClientTest): def test_retrieve_server_version(self): client = APIClient(version="auto") - assert isinstance(client._version, six.string_types) + assert isinstance(client._version, str) assert not (client._version == "auto") client.close() def test_auto_retrieve_server_version(self): version = self.client._retrieve_server_version() - assert isinstance(version, six.string_types) + assert isinstance(version, str) def test_info(self): self.client.info() @@ -337,8 +338,7 @@ class DockerApiTest(BaseAPIClientTest): def test_stream_helper_decoding(self): status_code, content = fake_api.fake_responses[url_prefix + 'events']() content_str = json.dumps(content) - if six.PY3: - content_str = content_str.encode('utf-8') + content_str = content_str.encode('utf-8') body = io.BytesIO(content_str) # mock a stream interface @@ -405,7 +405,7 @@ class UnixSocketStreamTest(unittest.TestCase): while not self.stop_server: try: connection, client_address = self.server_socket.accept() - except socket.error: + except OSError: # Probably no connection to accept yet time.sleep(0.01) continue @@ -489,7 +489,7 @@ class TCPSocketStreamTest(unittest.TestCase): @classmethod def setup_class(cls): - cls.server = six.moves.socketserver.ThreadingTCPServer( + cls.server = socketserver.ThreadingTCPServer( ('', 0), cls.get_handler_class()) cls.thread = threading.Thread(target=cls.server.serve_forever) cls.thread.setDaemon(True) @@ -508,7 +508,7 @@ class TCPSocketStreamTest(unittest.TestCase): stdout_data = cls.stdout_data stderr_data = cls.stderr_data - class Handler(six.moves.BaseHTTPServer.BaseHTTPRequestHandler, object): + class Handler(http.server.BaseHTTPRequestHandler): def do_POST(self): resp_data = self.get_resp_data() self.send_response(101) @@ -534,7 +534,7 @@ class TCPSocketStreamTest(unittest.TestCase): data += stderr_data return data else: - raise Exception('Unknown path {0}'.format(path)) + raise Exception(f'Unknown path {path}') @staticmethod def frame_header(stream, data): @@ -632,7 +632,7 @@ class UserAgentTest(unittest.TestCase): class DisableSocketTest(unittest.TestCase): - class DummySocket(object): + class DummySocket: def __init__(self, timeout=60): self.timeout = timeout diff --git a/tests/unit/api_volume_test.py b/tests/unit/api_volume_test.py index 7850c224..a8d9193f 100644 --- a/tests/unit/api_volume_test.py +++ b/tests/unit/api_volume_test.py @@ -104,7 +104,7 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'GET' - assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) + assert args[0][1] == f'{url_prefix}volumes/{name}' def test_remove_volume(self): name = 'perfectcherryblossom' @@ -112,4 +112,4 @@ class VolumeTest(BaseAPIClientTest): args = fake_request.call_args assert args[0][0] == 'DELETE' - assert args[0][1] == '{0}volumes/{1}'.format(url_prefix, name) + assert args[0][1] == f'{url_prefix}volumes/{name}' diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index aac89109..8bd2e165 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import base64 import json import os @@ -15,7 +13,7 @@ import pytest try: from unittest import mock except ImportError: - import mock + from unittest import mock class RegressionTest(unittest.TestCase): @@ -239,7 +237,7 @@ class LoadConfigTest(unittest.TestCase): cfg_path = os.path.join(folder, '.dockercfg') auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') with open(cfg_path, 'w') as f: - f.write('auth = {0}\n'.format(auth_)) + f.write(f'auth = {auth_}\n') f.write('email = sakuya@scarlet.net') cfg = auth.load_config(cfg_path) @@ -297,13 +295,13 @@ class LoadConfigTest(unittest.TestCase): self.addCleanup(shutil.rmtree, folder) dockercfg_path = os.path.join(folder, - '.{0}.dockercfg'.format( + '.{}.dockercfg'.format( random.randrange(100000))) registry = 'https://your.private.registry.io' auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') config = { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -329,7 +327,7 @@ class LoadConfigTest(unittest.TestCase): auth_ = base64.b64encode(b'sakuya:izayoi').decode('ascii') config = { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -357,7 +355,7 @@ class LoadConfigTest(unittest.TestCase): config = { 'auths': { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -386,7 +384,7 @@ class LoadConfigTest(unittest.TestCase): config = { 'auths': { registry: { - 'auth': '{0}'.format(auth_), + 'auth': f'{auth_}', 'email': 'sakuya@scarlet.net' } } @@ -794,9 +792,9 @@ class InMemoryStore(credentials.Store): } def list(self): - return dict( - [(k, v['Username']) for k, v in self.__store.items()] - ) + return { + k: v['Username'] for k, v in self.__store.items() + } def erase(self, server): del self.__store[server] diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index ad88e845..d647d3a1 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -15,7 +15,7 @@ from . import fake_api try: from unittest import mock except ImportError: - import mock + from unittest import mock TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') POOL_SIZE = 20 diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py index 0689d07b..a0a171be 100644 --- a/tests/unit/dockertypes_test.py +++ b/tests/unit/dockertypes_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import unittest import pytest @@ -15,7 +13,7 @@ from docker.types.services import convert_service_ports try: from unittest import mock except: # noqa: E722 - import mock + from unittest import mock def create_host_config(*args, **kwargs): diff --git a/tests/unit/errors_test.py b/tests/unit/errors_test.py index 54c2ba8f..f8c3a666 100644 --- a/tests/unit/errors_test.py +++ b/tests/unit/errors_test.py @@ -126,7 +126,7 @@ class ContainerErrorTest(unittest.TestCase): err = ContainerError(container, exit_status, command, image, stderr) msg = ("Command '{}' in image '{}' returned non-zero exit status {}" - ).format(command, image, exit_status, stderr) + ).format(command, image, exit_status) assert str(err) == msg def test_container_with_stderr(self): diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index 4fd4d113..4c933295 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -2,7 +2,7 @@ from docker import constants from . import fake_stat -CURRENT_VERSION = 'v{0}'.format(constants.DEFAULT_DOCKER_API_VERSION) +CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}' FAKE_CONTAINER_ID = '3cc2351ab11b' FAKE_IMAGE_ID = 'e9aa60c60128' @@ -526,96 +526,96 @@ if constants.IS_WINDOWS_PLATFORM: prefix = 'http+docker://localnpipe' fake_responses = { - '{0}/version'.format(prefix): + f'{prefix}/version': get_fake_version, - '{1}/{0}/version'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/version': get_fake_version, - '{1}/{0}/info'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/info': get_fake_info, - '{1}/{0}/auth'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/auth': post_fake_auth, - '{1}/{0}/_ping'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/_ping': get_fake_ping, - '{1}/{0}/images/search'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/search': get_fake_search, - '{1}/{0}/images/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/json': get_fake_images, - '{1}/{0}/images/test_image/history'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/history': get_fake_image_history, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/create': post_fake_import_image, - '{1}/{0}/containers/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/json': get_fake_containers, - '{1}/{0}/containers/3cc2351ab11b/start'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start': post_fake_start_container, - '{1}/{0}/containers/3cc2351ab11b/resize'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize': post_fake_resize_container, - '{1}/{0}/containers/3cc2351ab11b/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json': get_fake_inspect_container, - '{1}/{0}/containers/3cc2351ab11b/rename'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename': post_fake_rename_container, - '{1}/{0}/images/e9aa60c60128/tag'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag': post_fake_tag_image, - '{1}/{0}/containers/3cc2351ab11b/wait'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait': get_fake_wait, - '{1}/{0}/containers/3cc2351ab11b/logs'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs': get_fake_logs, - '{1}/{0}/containers/3cc2351ab11b/changes'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes': get_fake_diff, - '{1}/{0}/containers/3cc2351ab11b/export'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export': get_fake_export, - '{1}/{0}/containers/3cc2351ab11b/update'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update': post_fake_update_container, - '{1}/{0}/containers/3cc2351ab11b/exec'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec': post_fake_exec_create, - '{1}/{0}/exec/d5d177f121dc/start'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start': post_fake_exec_start, - '{1}/{0}/exec/d5d177f121dc/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json': get_fake_exec_inspect, - '{1}/{0}/exec/d5d177f121dc/resize'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize': post_fake_exec_resize, - '{1}/{0}/containers/3cc2351ab11b/stats'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats': get_fake_stats, - '{1}/{0}/containers/3cc2351ab11b/top'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top': get_fake_top, - '{1}/{0}/containers/3cc2351ab11b/stop'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop': post_fake_stop_container, - '{1}/{0}/containers/3cc2351ab11b/kill'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill': post_fake_kill_container, - '{1}/{0}/containers/3cc2351ab11b/pause'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause': post_fake_pause_container, - '{1}/{0}/containers/3cc2351ab11b/unpause'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause': post_fake_unpause_container, - '{1}/{0}/containers/3cc2351ab11b/restart'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart': post_fake_restart_container, - '{1}/{0}/containers/3cc2351ab11b'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b': delete_fake_remove_container, - '{1}/{0}/images/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/create': post_fake_image_create, - '{1}/{0}/images/e9aa60c60128'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128': delete_fake_remove_image, - '{1}/{0}/images/e9aa60c60128/get'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get': get_fake_get_image, - '{1}/{0}/images/load'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/load': post_fake_load_image, - '{1}/{0}/images/test_image/json'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/json': get_fake_inspect_image, - '{1}/{0}/images/test_image/insert'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/insert': get_fake_insert_image, - '{1}/{0}/images/test_image/push'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/images/test_image/push': post_fake_push, - '{1}/{0}/commit'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/commit': post_fake_commit, - '{1}/{0}/containers/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/containers/create': post_fake_create_container, - '{1}/{0}/build'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/build': post_fake_build_container, - '{1}/{0}/events'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/events': get_fake_events, - ('{1}/{0}/volumes'.format(CURRENT_VERSION, prefix), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/volumes', 'GET'): get_fake_volume_list, - ('{1}/{0}/volumes/create'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/volumes/create', 'POST'): get_fake_volume, ('{1}/{0}/volumes/{2}'.format( CURRENT_VERSION, prefix, FAKE_VOLUME_NAME @@ -629,11 +629,11 @@ fake_responses = { CURRENT_VERSION, prefix, FAKE_NODE_ID ), 'POST'): post_fake_update_node, - ('{1}/{0}/swarm/join'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/swarm/join', 'POST'): post_fake_join_swarm, - ('{1}/{0}/networks'.format(CURRENT_VERSION, prefix), 'GET'): + (f'{prefix}/{CURRENT_VERSION}/networks', 'GET'): get_fake_network_list, - ('{1}/{0}/networks/create'.format(CURRENT_VERSION, prefix), 'POST'): + (f'{prefix}/{CURRENT_VERSION}/networks/create', 'POST'): post_fake_network, ('{1}/{0}/networks/{2}'.format( CURRENT_VERSION, prefix, FAKE_NETWORK_ID @@ -651,6 +651,6 @@ fake_responses = { CURRENT_VERSION, prefix, FAKE_NETWORK_ID ), 'POST'): post_fake_network_disconnect, - '{1}/{0}/secrets/create'.format(CURRENT_VERSION, prefix): + f'{prefix}/{CURRENT_VERSION}/secrets/create': post_fake_secret, } diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index 5825b6ec..1663ef12 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -7,7 +7,7 @@ from . import fake_api try: from unittest import mock except ImportError: - import mock + from unittest import mock class CopyReturnMagicMock(mock.MagicMock): @@ -15,7 +15,7 @@ class CopyReturnMagicMock(mock.MagicMock): A MagicMock which deep copies every return value. """ def _mock_call(self, *args, **kwargs): - ret = super(CopyReturnMagicMock, self)._mock_call(*args, **kwargs) + ret = super()._mock_call(*args, **kwargs) if isinstance(ret, (dict, list)): ret = copy.deepcopy(ret) return ret diff --git a/tests/unit/models_resources_test.py b/tests/unit/models_resources_test.py index 5af24ee6..11dea294 100644 --- a/tests/unit/models_resources_test.py +++ b/tests/unit/models_resources_test.py @@ -16,7 +16,7 @@ class ModelTest(unittest.TestCase): def test_hash(self): client = make_fake_client() container1 = client.containers.get(FAKE_CONTAINER_ID) - my_set = set([container1]) + my_set = {container1} assert len(my_set) == 1 container2 = client.containers.get(FAKE_CONTAINER_ID) diff --git a/tests/unit/models_secrets_test.py b/tests/unit/models_secrets_test.py index 4ccf4c63..1c261a87 100644 --- a/tests/unit/models_secrets_test.py +++ b/tests/unit/models_secrets_test.py @@ -8,4 +8,4 @@ class CreateServiceTest(unittest.TestCase): def test_secrets_repr(self): client = make_fake_client() secret = client.secrets.create(name="super_secret", data="secret") - assert secret.__repr__() == "".format(FAKE_SECRET_NAME) + assert secret.__repr__() == f"" diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py index 07bb5897..b9192e42 100644 --- a/tests/unit/models_services_test.py +++ b/tests/unit/models_services_test.py @@ -40,10 +40,10 @@ class CreateServiceKwargsTest(unittest.TestCase): 'update_config': {'update': 'config'}, 'endpoint_spec': {'blah': 'blah'}, } - assert set(task_template.keys()) == set([ + assert set(task_template.keys()) == { 'ContainerSpec', 'Resources', 'RestartPolicy', 'Placement', 'LogDriver', 'Networks' - ]) + } assert task_template['Placement'] == { 'Constraints': ['foo=bar'], 'Preferences': ['bar=baz'], @@ -55,7 +55,7 @@ class CreateServiceKwargsTest(unittest.TestCase): 'Options': {'foo': 'bar'} } assert task_template['Networks'] == [{'Target': 'somenet'}] - assert set(task_template['ContainerSpec'].keys()) == set([ + assert set(task_template['ContainerSpec'].keys()) == { 'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User', 'Labels', 'Mounts', 'StopGracePeriod' - ]) + } diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py index 73b73360..41a87f20 100644 --- a/tests/unit/ssladapter_test.py +++ b/tests/unit/ssladapter_test.py @@ -32,30 +32,30 @@ class SSLAdapterTest(unittest.TestCase): class MatchHostnameTest(unittest.TestCase): cert = { 'issuer': ( - (('countryName', u'US'),), - (('stateOrProvinceName', u'California'),), - (('localityName', u'San Francisco'),), - (('organizationName', u'Docker Inc'),), - (('organizationalUnitName', u'Docker-Python'),), - (('commonName', u'localhost'),), - (('emailAddress', u'info@docker.com'),) + (('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'San Francisco'),), + (('organizationName', 'Docker Inc'),), + (('organizationalUnitName', 'Docker-Python'),), + (('commonName', 'localhost'),), + (('emailAddress', 'info@docker.com'),) ), 'notAfter': 'Mar 25 23:08:23 2030 GMT', - 'notBefore': u'Mar 25 23:08:23 2016 GMT', - 'serialNumber': u'BD5F894C839C548F', + 'notBefore': 'Mar 25 23:08:23 2016 GMT', + 'serialNumber': 'BD5F894C839C548F', 'subject': ( - (('countryName', u'US'),), - (('stateOrProvinceName', u'California'),), - (('localityName', u'San Francisco'),), - (('organizationName', u'Docker Inc'),), - (('organizationalUnitName', u'Docker-Python'),), - (('commonName', u'localhost'),), - (('emailAddress', u'info@docker.com'),) + (('countryName', 'US'),), + (('stateOrProvinceName', 'California'),), + (('localityName', 'San Francisco'),), + (('organizationName', 'Docker Inc'),), + (('organizationalUnitName', 'Docker-Python'),), + (('commonName', 'localhost'),), + (('emailAddress', 'info@docker.com'),) ), 'subjectAltName': ( - ('DNS', u'localhost'), - ('DNS', u'*.gensokyo.jp'), - ('IP Address', u'127.0.0.1'), + ('DNS', 'localhost'), + ('DNS', '*.gensokyo.jp'), + ('IP Address', '127.0.0.1'), ), 'version': 3 } diff --git a/tests/unit/swarm_test.py b/tests/unit/swarm_test.py index 43853802..aee1b9e8 100644 --- a/tests/unit/swarm_test.py +++ b/tests/unit/swarm_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import json from . import fake_api diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py index bc6fb5f4..9f183886 100644 --- a/tests/unit/utils_build_test.py +++ b/tests/unit/utils_build_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os import os.path import shutil @@ -82,7 +80,7 @@ class ExcludePathsTest(unittest.TestCase): assert sorted(paths) == sorted(set(paths)) def test_wildcard_exclude(self): - assert self.exclude(['*']) == set(['Dockerfile', '.dockerignore']) + assert self.exclude(['*']) == {'Dockerfile', '.dockerignore'} def test_exclude_dockerfile_dockerignore(self): """ @@ -99,18 +97,18 @@ class ExcludePathsTest(unittest.TestCase): If we're using a custom Dockerfile, make sure that's not excluded. """ - assert self.exclude(['*'], dockerfile='Dockerfile.alt') == set( - ['Dockerfile.alt', '.dockerignore'] - ) + assert self.exclude(['*'], dockerfile='Dockerfile.alt') == { + 'Dockerfile.alt', '.dockerignore' + } assert self.exclude( ['*'], dockerfile='foo/Dockerfile3' - ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + ) == convert_paths({'foo/Dockerfile3', '.dockerignore'}) # https://github.com/docker/docker-py/issues/1956 assert self.exclude( ['*'], dockerfile='./foo/Dockerfile3' - ) == convert_paths(set(['foo/Dockerfile3', '.dockerignore'])) + ) == convert_paths({'foo/Dockerfile3', '.dockerignore'}) def test_exclude_dockerfile_child(self): includes = self.exclude(['foo/'], dockerfile='foo/Dockerfile3') @@ -119,56 +117,56 @@ class ExcludePathsTest(unittest.TestCase): def test_single_filename(self): assert self.exclude(['a.py']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) def test_single_filename_leading_dot_slash(self): assert self.exclude(['./a.py']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) # As odd as it sounds, a filename pattern with a trailing slash on the # end *will* result in that file being excluded. def test_single_filename_trailing_slash(self): assert self.exclude(['a.py/']) == convert_paths( - self.all_paths - set(['a.py']) + self.all_paths - {'a.py'} ) def test_wildcard_filename_start(self): assert self.exclude(['*.py']) == convert_paths( - self.all_paths - set(['a.py', 'b.py', 'cde.py']) + self.all_paths - {'a.py', 'b.py', 'cde.py'} ) def test_wildcard_with_exception(self): assert self.exclude(['*.py', '!b.py']) == convert_paths( - self.all_paths - set(['a.py', 'cde.py']) + self.all_paths - {'a.py', 'cde.py'} ) def test_wildcard_with_wildcard_exception(self): assert self.exclude(['*.*', '!*.go']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'a.py', 'b.py', 'cde.py', 'Dockerfile.alt', - ]) + } ) def test_wildcard_filename_end(self): assert self.exclude(['a.*']) == convert_paths( - self.all_paths - set(['a.py', 'a.go']) + self.all_paths - {'a.py', 'a.go'} ) def test_question_mark(self): assert self.exclude(['?.py']) == convert_paths( - self.all_paths - set(['a.py', 'b.py']) + self.all_paths - {'a.py', 'b.py'} ) def test_single_subdir_single_filename(self): assert self.exclude(['foo/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_single_subdir_single_filename_leading_slash(self): assert self.exclude(['/foo/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_exclude_include_absolute_path(self): @@ -176,57 +174,57 @@ class ExcludePathsTest(unittest.TestCase): assert exclude_paths( base, ['/*', '!/*.py'] - ) == set(['a.py', 'b.py']) + ) == {'a.py', 'b.py'} def test_single_subdir_with_path_traversal(self): assert self.exclude(['foo/whoops/../a.py']) == convert_paths( - self.all_paths - set(['foo/a.py']) + self.all_paths - {'foo/a.py'} ) def test_single_subdir_wildcard_filename(self): assert self.exclude(['foo/*.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'foo/b.py']) + self.all_paths - {'foo/a.py', 'foo/b.py'} ) def test_wildcard_subdir_single_filename(self): assert self.exclude(['*/a.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'bar/a.py']) + self.all_paths - {'foo/a.py', 'bar/a.py'} ) def test_wildcard_subdir_wildcard_filename(self): assert self.exclude(['*/*.py']) == convert_paths( - self.all_paths - set(['foo/a.py', 'foo/b.py', 'bar/a.py']) + self.all_paths - {'foo/a.py', 'foo/b.py', 'bar/a.py'} ) def test_directory(self): assert self.exclude(['foo']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_trailing_slash(self): assert self.exclude(['foo']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo', 'foo/a.py', 'foo/b.py', 'foo/bar', 'foo/bar/a.py', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_single_exception(self): assert self.exclude(['foo', '!foo/bar/a.py']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/bar', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_subdir_exception(self): assert self.exclude(['foo', '!foo/bar']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' - ]) + } ) @pytest.mark.skipif( @@ -234,21 +232,21 @@ class ExcludePathsTest(unittest.TestCase): ) def test_directory_with_subdir_exception_win32_pathsep(self): assert self.exclude(['foo', '!foo\\bar']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/a.py', 'foo/b.py', 'foo', 'foo/Dockerfile3' - ]) + } ) def test_directory_with_wildcard_exception(self): assert self.exclude(['foo', '!foo/*.py']) == convert_paths( - self.all_paths - set([ + self.all_paths - { 'foo/bar', 'foo/bar/a.py', 'foo', 'foo/Dockerfile3' - ]) + } ) def test_subdirectory(self): assert self.exclude(['foo/bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) @pytest.mark.skipif( @@ -256,33 +254,33 @@ class ExcludePathsTest(unittest.TestCase): ) def test_subdirectory_win32_pathsep(self): assert self.exclude(['foo\\bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) def test_double_wildcard(self): assert self.exclude(['**/a.py']) == convert_paths( - self.all_paths - set( - ['a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py'] - ) + self.all_paths - { + 'a.py', 'foo/a.py', 'foo/bar/a.py', 'bar/a.py' + } ) assert self.exclude(['foo/**/bar']) == convert_paths( - self.all_paths - set(['foo/bar', 'foo/bar/a.py']) + self.all_paths - {'foo/bar', 'foo/bar/a.py'} ) def test_single_and_double_wildcard(self): assert self.exclude(['**/target/*/*']) == convert_paths( - self.all_paths - set( - ['target/subdir/file.txt', + self.all_paths - { + 'target/subdir/file.txt', 'subdir/target/subdir/file.txt', - 'subdir/subdir2/target/subdir/file.txt'] - ) + 'subdir/subdir2/target/subdir/file.txt' + } ) def test_trailing_double_wildcard(self): assert self.exclude(['subdir/**']) == convert_paths( - self.all_paths - set( - ['subdir/file.txt', + self.all_paths - { + 'subdir/file.txt', 'subdir/target/file.txt', 'subdir/target/subdir/file.txt', 'subdir/subdir2/file.txt', @@ -292,16 +290,16 @@ class ExcludePathsTest(unittest.TestCase): 'subdir/target/subdir', 'subdir/subdir2', 'subdir/subdir2/target', - 'subdir/subdir2/target/subdir'] - ) + 'subdir/subdir2/target/subdir' + } ) def test_double_wildcard_with_exception(self): assert self.exclude(['**', '!bar', '!foo/bar']) == convert_paths( - set([ + { 'foo/bar', 'foo/bar/a.py', 'bar', 'bar/a.py', 'Dockerfile', '.dockerignore', - ]) + } ) def test_include_wildcard(self): @@ -324,7 +322,7 @@ class ExcludePathsTest(unittest.TestCase): assert exclude_paths( base, ['*.md', '!README*.md', 'README-secret.md'] - ) == set(['README.md', 'README-bis.md']) + ) == {'README.md', 'README-bis.md'} def test_parent_directory(self): base = make_tree( @@ -340,7 +338,7 @@ class ExcludePathsTest(unittest.TestCase): assert exclude_paths( base, ['../a.py', '/../b.py'] - ) == set(['c.py']) + ) == {'c.py'} class TarTest(unittest.TestCase): @@ -374,14 +372,14 @@ class TarTest(unittest.TestCase): '.dockerignore', ] - expected_names = set([ + expected_names = { 'Dockerfile', '.dockerignore', 'a.go', 'b.py', 'bar', 'bar/a.py', - ]) + } base = make_tree(dirs, files) self.addCleanup(shutil.rmtree, base) @@ -413,7 +411,7 @@ class TarTest(unittest.TestCase): with pytest.raises(IOError) as ei: tar(base) - assert 'Can not read file in context: {}'.format(full_path) in ( + assert f'Can not read file in context: {full_path}' in ( ei.exconly() ) diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py index b0934f95..83e04a14 100644 --- a/tests/unit/utils_config_test.py +++ b/tests/unit/utils_config_test.py @@ -11,7 +11,7 @@ from docker.utils import config try: from unittest import mock except ImportError: - import mock + from unittest import mock class FindConfigFileTest(unittest.TestCase): diff --git a/tests/unit/utils_json_stream_test.py b/tests/unit/utils_json_stream_test.py index f7aefd0f..821ebe42 100644 --- a/tests/unit/utils_json_stream_test.py +++ b/tests/unit/utils_json_stream_test.py @@ -1,11 +1,7 @@ -# encoding: utf-8 -from __future__ import absolute_import -from __future__ import unicode_literals - from docker.utils.json_stream import json_splitter, stream_as_text, json_stream -class TestJsonSplitter(object): +class TestJsonSplitter: def test_json_splitter_no_object(self): data = '{"foo": "bar' @@ -20,7 +16,7 @@ class TestJsonSplitter(object): assert json_splitter(data) == ({'foo': 'bar'}, '{"next": "obj"}') -class TestStreamAsText(object): +class TestStreamAsText: def test_stream_with_non_utf_unicode_character(self): stream = [b'\xed\xf3\xf3'] @@ -28,12 +24,12 @@ class TestStreamAsText(object): assert output == '���' def test_stream_with_utf_character(self): - stream = ['ěĝ'.encode('utf-8')] + stream = ['ěĝ'.encode()] output, = stream_as_text(stream) assert output == 'ěĝ' -class TestJsonStream(object): +class TestJsonStream: def test_with_falsy_entries(self): stream = [ diff --git a/tests/unit/utils_proxy_test.py b/tests/unit/utils_proxy_test.py index ff0e14ba..2da60401 100644 --- a/tests/unit/utils_proxy_test.py +++ b/tests/unit/utils_proxy_test.py @@ -1,7 +1,4 @@ -# -*- coding: utf-8 -*- - import unittest -import six from docker.utils.proxy import ProxyConfig @@ -65,7 +62,7 @@ class ProxyConfigTest(unittest.TestCase): # Proxy config is non null, env is None. self.assertSetEqual( set(CONFIG.inject_proxy_environment(None)), - set(['{}={}'.format(k, v) for k, v in six.iteritems(ENV)])) + {f'{k}={v}' for k, v in ENV.items()}) # Proxy config is null, env is None. self.assertIsNone(ProxyConfig().inject_proxy_environment(None), None) @@ -74,7 +71,7 @@ class ProxyConfigTest(unittest.TestCase): # Proxy config is non null, env is non null actual = CONFIG.inject_proxy_environment(env) - expected = ['{}={}'.format(k, v) for k, v in six.iteritems(ENV)] + env + expected = [f'{k}={v}' for k, v in ENV.items()] + env # It's important that the first 8 variables are the ones from the proxy # config, and the last 2 are the ones from the input environment self.assertSetEqual(set(actual[:8]), set(expected[:8])) diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 0d6ff22d..802d9196 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import base64 import json import os @@ -9,7 +7,6 @@ import tempfile import unittest import pytest -import six from docker.api.client import APIClient from docker.constants import IS_WINDOWS_PLATFORM, DEFAULT_DOCKER_API_VERSION from docker.errors import DockerException @@ -195,22 +192,22 @@ class ConverVolumeBindsTest(unittest.TestCase): assert convert_volume_binds(data) == ['/mnt/vol1:/data:rw'] def test_convert_volume_binds_unicode_bytes_input(self): - expected = [u'/mnt/지연:/unicode/박:rw'] + expected = ['/mnt/지연:/unicode/박:rw'] data = { - u'/mnt/지연'.encode('utf-8'): { - 'bind': u'/unicode/박'.encode('utf-8'), + '/mnt/지연'.encode(): { + 'bind': '/unicode/박'.encode(), 'mode': 'rw' } } assert convert_volume_binds(data) == expected def test_convert_volume_binds_unicode_unicode_input(self): - expected = [u'/mnt/지연:/unicode/박:rw'] + expected = ['/mnt/지연:/unicode/박:rw'] data = { - u'/mnt/지연': { - 'bind': u'/unicode/박', + '/mnt/지연': { + 'bind': '/unicode/박', 'mode': 'rw' } } @@ -359,14 +356,14 @@ class ParseRepositoryTagTest(unittest.TestCase): ) def test_index_image_sha(self): - assert parse_repository_tag("root@sha256:{0}".format(self.sha)) == ( - "root", "sha256:{0}".format(self.sha) + assert parse_repository_tag(f"root@sha256:{self.sha}") == ( + "root", f"sha256:{self.sha}" ) def test_private_reg_image_sha(self): assert parse_repository_tag( - "url:5000/repo@sha256:{0}".format(self.sha) - ) == ("url:5000/repo", "sha256:{0}".format(self.sha)) + f"url:5000/repo@sha256:{self.sha}" + ) == ("url:5000/repo", f"sha256:{self.sha}") class ParseDeviceTest(unittest.TestCase): @@ -463,20 +460,13 @@ class UtilsTest(unittest.TestCase): def test_decode_json_header(self): obj = {'a': 'b', 'c': 1} data = None - if six.PY3: - data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8')) - else: - data = base64.urlsafe_b64encode(json.dumps(obj)) + data = base64.urlsafe_b64encode(bytes(json.dumps(obj), 'utf-8')) decoded_data = decode_json_header(data) assert obj == decoded_data class SplitCommandTest(unittest.TestCase): def test_split_command_with_unicode(self): - assert split_command(u'echo μμ') == ['echo', 'μμ'] - - @pytest.mark.skipif(six.PY3, reason="shlex doesn't support bytes in py3") - def test_split_command_with_bytes(self): assert split_command('echo μμ') == ['echo', 'μμ'] @@ -626,7 +616,7 @@ class FormatEnvironmentTest(unittest.TestCase): env_dict = { 'ARTIST_NAME': b'\xec\x86\xa1\xec\xa7\x80\xec\x9d\x80' } - assert format_environment(env_dict) == [u'ARTIST_NAME=송지은'] + assert format_environment(env_dict) == ['ARTIST_NAME=송지은'] def test_format_env_no_value(self): env_dict = { From 19d6cd8a015f1484e147a0bb9d0b4684c2a6aaac Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 6 Aug 2021 09:32:42 -0300 Subject: [PATCH 158/211] Bump requests => 2.26.0 Signed-off-by: Ulysses Souza --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 1d0be30a..f6b17fd5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,6 @@ pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 pywin32==227; sys_platform == 'win32' -requests==2.20.0 +requests==2.26.0 urllib3==1.24.3 websocket-client==0.56.0 From 582f6277ce4dfe67b5be5a52b88bbdef3f349e11 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Aug 2021 12:46:56 +0000 Subject: [PATCH 159/211] Bump urllib3 from 1.24.3 to 1.26.5 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.24.3 to 1.26.5. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.24.3...1.26.5) --- updated-dependencies: - dependency-name: urllib3 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f6b17fd5..42af699b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,5 +13,5 @@ pyOpenSSL==18.0.0 pyparsing==2.2.0 pywin32==227; sys_platform == 'win32' requests==2.26.0 -urllib3==1.24.3 +urllib3==1.26.5 websocket-client==0.56.0 From e0d186d754693feb7d27c2352e455c5febb4a5cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Aug 2021 20:57:04 +0000 Subject: [PATCH 160/211] Bump pywin32 from 227 to 301 Bumps [pywin32](https://github.com/mhammond/pywin32) from 227 to 301. - [Release notes](https://github.com/mhammond/pywin32/releases) - [Changelog](https://github.com/mhammond/pywin32/blob/master/CHANGES.txt) - [Commits](https://github.com/mhammond/pywin32/commits) --- updated-dependencies: - dependency-name: pywin32 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 42af699b..26cbc6fb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ paramiko==2.4.2 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 -pywin32==227; sys_platform == 'win32' +pywin32==301; sys_platform == 'win32' requests==2.26.0 urllib3==1.26.5 websocket-client==0.56.0 From 2fa56879a2f978387d230db087003d79eb2762d0 Mon Sep 17 00:00:00 2001 From: sinarostami Date: Mon, 16 Aug 2021 00:06:45 +0430 Subject: [PATCH 161/211] Improve containers documentation Signed-off-by: sinarostami --- docker/models/containers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/models/containers.py b/docker/models/containers.py index 36cbbc41..957deed4 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -761,6 +761,14 @@ class ContainerCollection(Collection): {'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'}, '/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}} + Or a list of strings which each one of its elements specifies a mount volume. + + For example: + + .. code-block:: python + + ['/home/user1/:/mnt/vol2','/var/www:/mnt/vol1'] + volumes_from (:py:class:`list`): List of container names or IDs to get volumes from. working_dir (str): Path to the working directory. From 8da03e01265f229a91aaffb7af2aa2057e08f1b9 Mon Sep 17 00:00:00 2001 From: Shehzaman Date: Thu, 27 May 2021 00:11:38 +0530 Subject: [PATCH 162/211] Put back identityfile parameter Signed-off-by: Shehzaman --- docker/transport/sshconn.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index fb5c6bbe..e08e3a86 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -205,7 +205,6 @@ class SSHHTTPAdapter(BaseHTTPAdapter): with open(ssh_config_file) as f: conf.parse(f) host_config = conf.lookup(base_url.hostname) - self.ssh_conf = host_config if 'proxycommand' in host_config: self.ssh_params["sock"] = paramiko.ProxyCommand( self.ssh_conf['proxycommand'] @@ -213,9 +212,11 @@ class SSHHTTPAdapter(BaseHTTPAdapter): if 'hostname' in host_config: self.ssh_params['hostname'] = host_config['hostname'] if base_url.port is None and 'port' in host_config: - self.ssh_params['port'] = self.ssh_conf['port'] + self.ssh_params['port'] = host_config['port'] if base_url.username is None and 'user' in host_config: - self.ssh_params['username'] = self.ssh_conf['user'] + self.ssh_params['username'] = host_config['user'] + if 'identityfile' in host_config: + self.ssh_params['key_filename'] = host_config['identityfile'] self.ssh_client.load_system_host_keys() self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) From 4a3cddf4bf926f3aa0d46d5f0318dbb212231377 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Tue, 31 Aug 2021 15:57:32 +0200 Subject: [PATCH 163/211] Update changelog for v5.0.0 Signed-off-by: Anca Iordache --- docker/version.py | 2 +- docs/change-log.md | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 35541041..b95a1ede 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.5.0-dev" +version = "5.0.0-dev" version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/change-log.md b/docs/change-log.md index 8db3fc58..63a029e1 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,24 @@ Change log ========== +5.0.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/70?closed=1) + +### Breaking changes +- Remove support for Python 2.7 +- Make Python 3.6 the minimum version supported + +### Features +- Add `limit` parameter to image search endpoint + +### Bugfixes +- Fix `KeyError` exception on secret create +- Verify TLS keys loaded from docker contexts +- Update PORT_SPEC regex to allow square brackets for IPv6 addresses +- Fix containers and images documentation examples + 4.4.4 ----- From c5fc19385765b2724285689a94c408cfd486f210 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Tue, 31 Aug 2021 16:39:50 +0200 Subject: [PATCH 164/211] Update changelog for 5.0.1 release Signed-off-by: Anca Iordache --- docker/version.py | 2 +- docs/change-log.md | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index b95a1ede..5687086f 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "5.0.0-dev" +version = "5.1.0-dev" version_info = tuple(int(d) for d in version.split("-")[0].split(".")) diff --git a/docs/change-log.md b/docs/change-log.md index 63a029e1..441e91de 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,20 @@ Change log ========== +5.0.1 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/74?closed=1) + +### Bugfixes +- Bring back support for ssh identity file +- Cleanup remaining python-2 dependencies +- Fix image save example in docs + +### Miscellaneous +- Bump urllib3 to 1.26.5 +- Bump requests to 2.26.0 + 5.0.0 ----- From f9b85586ca7244ada8b66a4dab1fd324caccbe24 Mon Sep 17 00:00:00 2001 From: Adam Aposhian Date: Tue, 31 Aug 2021 15:02:04 -0600 Subject: [PATCH 165/211] fix(transport): remove disable_buffering option Signed-off-by: Adam Aposhian --- docker/transport/unixconn.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index adb6f18a..1b00762a 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -23,7 +23,6 @@ class UnixHTTPConnection(httplib.HTTPConnection): self.base_url = base_url self.unix_socket = unix_socket self.timeout = timeout - self.disable_buffering = False def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) @@ -33,13 +32,8 @@ class UnixHTTPConnection(httplib.HTTPConnection): def putheader(self, header, *values): super().putheader(header, *values) - if header == 'Connection' and 'Upgrade' in values: - self.disable_buffering = True def response_class(self, sock, *args, **kwargs): - if self.disable_buffering: - kwargs['disable_buffering'] = True - return httplib.HTTPResponse(sock, *args, **kwargs) From a9265197d262302d34846e26886347f68c83bb5d Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Wed, 1 Sep 2021 19:23:59 +0200 Subject: [PATCH 166/211] Post-release changelog update Signed-off-by: Anca Iordache --- docs/change-log.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 441e91de..2ff0774f 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,14 @@ Change log ========== +5.0.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/75?closed=1) + +### Bugfixes +- Fix `disable_buffering` regression + 5.0.1 ----- From 63618b5e11e9326ed6e4cad6a0b012b9dc02593f Mon Sep 17 00:00:00 2001 From: Segev Finer Date: Thu, 15 Mar 2018 21:46:24 +0200 Subject: [PATCH 167/211] Fix getting a read timeout for logs/attach with a tty and slow output Fixes #931 Signed-off-by: Segev Finer --- docker/api/client.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/api/client.py b/docker/api/client.py index f0cb39b8..2667922d 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -397,6 +397,12 @@ class APIClient( def _stream_raw_result(self, response, chunk_size=1, decode=True): ''' Stream result for TTY-enabled container and raw binary data''' self._raise_for_status(response) + + # Disable timeout on the underlying socket to prevent + # Read timed out(s) for long running processes + socket = self._get_raw_response_socket(response) + self._disable_socket_timeout(socket) + yield from response.iter_content(chunk_size, decode) def _read_from_socket(self, response, stream, tty=True, demux=False): From ecace769f5d81b5ea1a25befed8eebe2c723d33e Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Fri, 8 Oct 2021 00:58:26 +0200 Subject: [PATCH 168/211] Post-release changelog update Signed-off-by: Anca Iordache --- docs/change-log.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 2ff0774f..91f3fe6f 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,21 @@ Change log ========== +5.0.3 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/76?closed=1) + +### Features +- Add `cap_add` and `cap_drop` parameters to service create and ContainerSpec +- Add `templating` parameter to config create + +### Bugfixes +- Fix getting a read timeout for logs/attach with a tty and slow output + +### Miscellaneous +- Fix documentation examples + 5.0.2 ----- From a9de3432103141c7519783ad4d8088797c892914 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 11 Oct 2021 22:30:36 +0300 Subject: [PATCH 169/211] Add support for Python 3.10 Signed-off-by: Hugo van Kemenade --- .github/workflows/ci.yml | 2 +- setup.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b6925082..a73bcbad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,7 +8,7 @@ jobs: strategy: max-parallel: 1 matrix: - python-version: [3.6, 3.7, 3.8, 3.9] + python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v2 diff --git a/setup.py b/setup.py index a966fea2..1e76fdb1 100644 --- a/setup.py +++ b/setup.py @@ -75,6 +75,7 @@ setup( 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Software Development', 'Topic :: Utilities', 'License :: OSI Approved :: Apache Software License', From 4150fc4d9d3c9c68dea3a377410182aa33c02c2b Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 11 Oct 2021 22:30:42 +0300 Subject: [PATCH 170/211] Universal wheels are for code expected to work on both Python 2 and 3 Signed-off-by: Hugo van Kemenade --- setup.cfg | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 907746f0..a37e5521 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,3 @@ -[bdist_wheel] -universal = 1 - [metadata] description_file = README.rst license = Apache License 2.0 From 72bcd1616da7c3d57fd90ec02b2fa7a9255dd08b Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 11 Oct 2021 22:38:56 +0300 Subject: [PATCH 171/211] Bump pytest (and other dependencies) for Python 3.10 Signed-off-by: Hugo van Kemenade --- test-requirements.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 40161bb8..d135792b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,7 @@ -setuptools==54.1.1 -coverage==4.5.2 -flake8==3.6.0 +setuptools==58.2.0 +coverage==6.0.1 +flake8==4.0.1 mock==1.0.1 -pytest==4.3.1 -pytest-cov==2.6.1 -pytest-timeout==1.3.3 +pytest==6.2.5 +pytest-cov==3.0.0 +pytest-timeout==2.0.1 From bbbc29191a8a430a7024cacd460b7e2d35e0dfb0 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 11 Oct 2021 22:43:25 +0300 Subject: [PATCH 172/211] Bump minimum paramiko to support Python 3.10 Signed-off-by: Hugo van Kemenade --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 26cbc6fb..d7c11aaa 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ enum34==1.1.6 idna==2.5 ipaddress==1.0.18 packaging==16.8 -paramiko==2.4.2 +paramiko==2.8.0 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 diff --git a/setup.py b/setup.py index 1e76fdb1..db2d6ebc 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ extras_require = { 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'], # Only required when connecting using the ssh:// protocol - 'ssh': ['paramiko>=2.4.2'], + 'ssh': ['paramiko>=2.4.3'], } From 4bb99311e2911406dde543117438782a9524feea Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Mon, 11 Oct 2021 22:50:39 +0300 Subject: [PATCH 173/211] Don't install mock backport Signed-off-by: Hugo van Kemenade --- test-requirements.txt | 1 - tests/integration/credentials/utils_test.py | 6 +----- tests/unit/api_container_test.py | 6 +----- tests/unit/api_image_test.py | 6 +----- tests/unit/api_network_test.py | 6 +----- tests/unit/api_test.py | 6 +----- tests/unit/auth_test.py | 7 +------ tests/unit/client_test.py | 6 +----- tests/unit/dockertypes_test.py | 6 +----- tests/unit/fake_api_client.py | 6 +----- tests/unit/utils_config_test.py | 6 +----- 11 files changed, 10 insertions(+), 52 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index d135792b..ccc97be4 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,6 @@ setuptools==58.2.0 coverage==6.0.1 flake8==4.0.1 -mock==1.0.1 pytest==6.2.5 pytest-cov==3.0.0 pytest-timeout==2.0.1 diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py index d7b2a1a4..acf018d2 100644 --- a/tests/integration/credentials/utils_test.py +++ b/tests/integration/credentials/utils_test.py @@ -1,11 +1,7 @@ import os from docker.credentials.utils import create_environment_dict - -try: - from unittest import mock -except ImportError: - from unittest import mock +from unittest import mock @mock.patch.dict(os.environ) diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index 1ebd37df..a66aea04 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -4,6 +4,7 @@ import signal import docker from docker.api import APIClient +from unittest import mock import pytest from . import fake_api @@ -13,11 +14,6 @@ from .api_test import ( fake_inspect_container, url_base ) -try: - from unittest import mock -except ImportError: - from unittest import mock - def fake_inspect_container_tty(self, container): return fake_inspect_container(self, container, tty=True) diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 843c11b8..8fb3e9d9 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -3,16 +3,12 @@ import pytest from . import fake_api from docker import auth +from unittest import mock from .api_test import ( BaseAPIClientTest, fake_request, DEFAULT_TIMEOUT_SECONDS, url_prefix, fake_resolve_authconfig ) -try: - from unittest import mock -except ImportError: - from unittest import mock - class ImageTest(BaseAPIClientTest): def test_image_viz(self): diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index 84d65449..8afab737 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -2,11 +2,7 @@ import json from .api_test import BaseAPIClientTest, url_prefix, response from docker.types import IPAMConfig, IPAMPool - -try: - from unittest import mock -except ImportError: - from unittest import mock +from unittest import mock class NetworkTest(BaseAPIClientTest): diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index dfc38164..3234e55b 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -19,14 +19,10 @@ import requests from docker.api import APIClient from docker.constants import DEFAULT_DOCKER_API_VERSION from requests.packages import urllib3 +from unittest import mock from . import fake_api -try: - from unittest import mock -except ImportError: - from unittest import mock - DEFAULT_TIMEOUT_SECONDS = docker.constants.DEFAULT_TIMEOUT_SECONDS diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index 8bd2e165..ea953af0 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -8,14 +8,9 @@ import tempfile import unittest from docker import auth, credentials, errors +from unittest import mock import pytest -try: - from unittest import mock -except ImportError: - from unittest import mock - - class RegressionTest(unittest.TestCase): def test_803_urlsafe_encode(self): auth_data = { diff --git a/tests/unit/client_test.py b/tests/unit/client_test.py index d647d3a1..e7c7eec8 100644 --- a/tests/unit/client_test.py +++ b/tests/unit/client_test.py @@ -9,14 +9,10 @@ from docker.constants import ( DEFAULT_MAX_POOL_SIZE, IS_WINDOWS_PLATFORM ) from docker.utils import kwargs_from_env +from unittest import mock from . import fake_api -try: - from unittest import mock -except ImportError: - from unittest import mock - TEST_CERT_DIR = os.path.join(os.path.dirname(__file__), 'testdata/certs') POOL_SIZE = 20 diff --git a/tests/unit/dockertypes_test.py b/tests/unit/dockertypes_test.py index a0a171be..76a99a62 100644 --- a/tests/unit/dockertypes_test.py +++ b/tests/unit/dockertypes_test.py @@ -9,11 +9,7 @@ from docker.types import ( IPAMPool, LogConfig, Mount, ServiceMode, Ulimit, ) from docker.types.services import convert_service_ports - -try: - from unittest import mock -except: # noqa: E722 - from unittest import mock +from unittest import mock def create_host_config(*args, **kwargs): diff --git a/tests/unit/fake_api_client.py b/tests/unit/fake_api_client.py index 1663ef12..95cf63b4 100644 --- a/tests/unit/fake_api_client.py +++ b/tests/unit/fake_api_client.py @@ -2,13 +2,9 @@ import copy import docker from docker.constants import DEFAULT_DOCKER_API_VERSION +from unittest import mock from . import fake_api -try: - from unittest import mock -except ImportError: - from unittest import mock - class CopyReturnMagicMock(mock.MagicMock): """ diff --git a/tests/unit/utils_config_test.py b/tests/unit/utils_config_test.py index 83e04a14..27d5a7cd 100644 --- a/tests/unit/utils_config_test.py +++ b/tests/unit/utils_config_test.py @@ -5,14 +5,10 @@ import tempfile import json from pytest import mark, fixture +from unittest import mock from docker.utils import config -try: - from unittest import mock -except ImportError: - from unittest import mock - class FindConfigFileTest(unittest.TestCase): From e0a3abfc3786800c8fce82e8efdd60c4383ebc80 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 21:55:39 +0000 Subject: [PATCH 174/211] Bump paramiko from 2.8.0 to 2.10.1 Bumps [paramiko](https://github.com/paramiko/paramiko) from 2.8.0 to 2.10.1. - [Release notes](https://github.com/paramiko/paramiko/releases) - [Changelog](https://github.com/paramiko/paramiko/blob/main/NEWS) - [Commits](https://github.com/paramiko/paramiko/compare/2.8.0...2.10.1) --- updated-dependencies: - dependency-name: paramiko dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index d7c11aaa..a0eb5319 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ enum34==1.1.6 idna==2.5 ipaddress==1.0.18 packaging==16.8 -paramiko==2.8.0 +paramiko==2.10.1 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 From a6db044bd4e0e0dae1d7d87f0c0fc85619757535 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 08:43:45 -0400 Subject: [PATCH 175/211] deps: upgrade pywin32 & relax version constraint (#3004) Upgrade to latest pywin32, which has support for Python 3.10 and resolves a CVE (related to ACL APIs, outside the scope of what `docker-py` relies on, which is npipe support, but still gets flagged by scanners). The version constraint has also been relaxed in `setup.py` to allow newer versions of pywin32. This is similar to how we handle the other packages there, and should be safe from a compatibility perspective. Fixes #2902. Closes #2972 and closes #2980. Signed-off-by: Milas Bowman --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index a0eb5319..c74d8cea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,7 +11,7 @@ paramiko==2.10.1 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 -pywin32==301; sys_platform == 'win32' +pywin32==304; sys_platform == 'win32' requests==2.26.0 urllib3==1.26.5 websocket-client==0.56.0 diff --git a/setup.py b/setup.py index db2d6ebc..3be63ba6 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ requirements = [ extras_require = { # win32 APIs if on Windows (required for npipe support) - ':sys_platform == "win32"': 'pywin32==227', + ':sys_platform == "win32"': 'pywin32>=304', # If using docker-py over TLS, highly recommend this option is # pip-installed or pinned. From 2933af2ca760cda128f1a48145170a56ba732abd Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 08:51:52 -0400 Subject: [PATCH 176/211] ci: remove Python 3.6 and add 3.11 pre-releases (#3005) * Python 3.6 went EOL Dec 2021 * Python 3.11 is in beta and due for GA release in October 2022 Signed-off-by: Milas Bowman --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a73bcbad..29e022a9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,12 +8,12 @@ jobs: strategy: max-parallel: 1 matrix: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-alpha - 3.11.0"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies From f16c4e1147c81afd822fe72191f0f720cb0ba637 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 11:35:44 -0400 Subject: [PATCH 177/211] utils: fix IPv6 address w/ port parsing (#3006) This was using a deprecated function (`urllib.splitnport`), ostensibly to work around issues with brackets on IPv6 addresses. Ironically, its usage was broken, and would result in mangled IPv6 addresses if they had a port specified in some instances. Usage of the deprecated function has been eliminated and extra test cases added where missing. All existing cases pass as-is. (The only other change to the test was to improve assertion messages.) Signed-off-by: Milas Bowman --- docker/utils/utils.py | 38 ++++++++++++++++++++++++-------------- tests/unit/utils_test.py | 11 +++++++++-- 2 files changed, 33 insertions(+), 16 deletions(-) diff --git a/docker/utils/utils.py b/docker/utils/utils.py index f7c3dd7d..7b229099 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -1,4 +1,5 @@ import base64 +import collections import json import os import os.path @@ -8,15 +9,20 @@ from datetime import datetime from distutils.version import StrictVersion from .. import errors -from .. import tls from ..constants import DEFAULT_HTTP_HOST from ..constants import DEFAULT_UNIX_SOCKET from ..constants import DEFAULT_NPIPE from ..constants import BYTE_UNITS +from ..tls import TLSConfig -from urllib.parse import splitnport, urlparse +from urllib.parse import urlparse, urlunparse +URLComponents = collections.namedtuple( + 'URLComponents', + 'scheme netloc url params query fragment', +) + def create_ipam_pool(*args, **kwargs): raise errors.DeprecatedMethod( 'utils.create_ipam_pool has been removed. Please use a ' @@ -201,10 +207,6 @@ def parse_repository_tag(repo_name): def parse_host(addr, is_win32=False, tls=False): - path = '' - port = None - host = None - # Sensible defaults if not addr and is_win32: return DEFAULT_NPIPE @@ -263,20 +265,20 @@ def parse_host(addr, is_win32=False, tls=False): # to be valid and equivalent to unix:///path path = '/'.join((parsed_url.hostname, path)) + netloc = parsed_url.netloc if proto in ('tcp', 'ssh'): - # parsed_url.hostname strips brackets from IPv6 addresses, - # which can be problematic hence our use of splitnport() instead. - host, port = splitnport(parsed_url.netloc) - if port is None or port < 0: + port = parsed_url.port or 0 + if port <= 0: if proto != 'ssh': raise errors.DockerException( 'Invalid bind address format: port is required:' ' {}'.format(addr) ) port = 22 + netloc = f'{parsed_url.netloc}:{port}' - if not host: - host = DEFAULT_HTTP_HOST + if not parsed_url.hostname: + netloc = f'{DEFAULT_HTTP_HOST}:{port}' # Rewrite schemes to fit library internals (requests adapters) if proto == 'tcp': @@ -286,7 +288,15 @@ def parse_host(addr, is_win32=False, tls=False): if proto in ('http+unix', 'npipe'): return f"{proto}://{path}".rstrip('/') - return f'{proto}://{host}:{port}{path}'.rstrip('/') + + return urlunparse(URLComponents( + scheme=proto, + netloc=netloc, + url=path, + params='', + query='', + fragment='', + )).rstrip('/') def parse_devices(devices): @@ -351,7 +361,7 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None): # so if it's not set already then set it to false. assert_hostname = False - params['tls'] = tls.TLSConfig( + params['tls'] = TLSConfig( client_cert=(os.path.join(cert_path, 'cert.pem'), os.path.join(cert_path, 'key.pem')), ca_cert=os.path.join(cert_path, 'ca.pem'), diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 802d9196..12cb7bd6 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -296,17 +296,24 @@ class ParseHostTest(unittest.TestCase): '[fd12::82d1]:2375/docker/engine': ( 'http://[fd12::82d1]:2375/docker/engine' ), + 'ssh://[fd12::82d1]': 'ssh://[fd12::82d1]:22', + 'ssh://user@[fd12::82d1]:8765': 'ssh://user@[fd12::82d1]:8765', 'ssh://': 'ssh://127.0.0.1:22', 'ssh://user@localhost:22': 'ssh://user@localhost:22', 'ssh://user@remote': 'ssh://user@remote:22', } for host in invalid_hosts: - with pytest.raises(DockerException): + msg = f'Should have failed to parse invalid host: {host}' + with self.assertRaises(DockerException, msg=msg): parse_host(host, None) for host, expected in valid_hosts.items(): - assert parse_host(host, None) == expected + self.assertEqual( + parse_host(host, None), + expected, + msg=f'Failed to parse valid host: {host}', + ) def test_parse_host_empty_value(self): unix_socket = 'http+unix:///var/run/docker.sock' From 7168e09b1628b85a09e95cf8bae6bfd94b61a6c4 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Tue, 26 Jul 2022 18:06:51 +0200 Subject: [PATCH 178/211] test: fix for cgroupv2 (#2940) This test was verifying that the container has the right options set (through `docker inspect`), but also checks if the cgroup-rules are set within the container by reading `/sys/fs/cgroup/devices/devices.list` Unlike cgroups v1, on cgroups v2, there is no file interface, and rules are handled through ebpf, which means that the test will fail because this file is not present. From the Linux documentation for cgroups v2: https://github.com/torvalds/linux/blob/v5.16/Documentation/admin-guide/cgroup-v2.rst#device-controller > (...) > Device controller manages access to device files. It includes both creation of > new device files (using mknod), and access to the existing device files. > > Cgroup v2 device controller has no interface files and is implemented on top > of cgroup BPF. To control access to device files, a user may create bpf programs > of type BPF_PROG_TYPE_CGROUP_DEVICE and attach them to cgroups with > BPF_CGROUP_DEVICE flag. (...) Given that setting the right cgroups is not really a responsibility of this SDK, it should be sufficient to verify that the right options were set in the container configuration, so this patch is removing the part that checks the cgroup, to allow this test to be run on a host with cgroups v2 enabled. Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_container_test.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 9da2cfbf..062693ef 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -460,16 +460,13 @@ class CreateContainerTest(BaseAPIIntegrationTest): def test_create_with_device_cgroup_rules(self): rule = 'c 7:128 rwm' ctnr = self.client.create_container( - TEST_IMG, 'cat /sys/fs/cgroup/devices/devices.list', - host_config=self.client.create_host_config( + TEST_IMG, 'true', host_config=self.client.create_host_config( device_cgroup_rules=[rule] ) ) self.tmp_containers.append(ctnr) config = self.client.inspect_container(ctnr) assert config['HostConfig']['DeviceCgroupRules'] == [rule] - self.client.start(ctnr) - assert rule in self.client.logs(ctnr).decode('utf-8') def test_create_with_uts_mode(self): container = self.client.create_container( From 74e0c5eb8c38f0a219cc0120bc51de99c1c8159e Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 12:55:14 -0400 Subject: [PATCH 179/211] test: fix flaky container log test Ensure the container has exited before attempting to grab the logs. Since we are not streaming them, it's possible to attach + grab logs before the output is processed, resulting in a test failure. If the container has exited, it's guaranteed to have logged :) Signed-off-by: Milas Bowman --- tests/integration/api_container_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 062693ef..0d6d9f96 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1217,12 +1217,14 @@ class AttachContainerTest(BaseAPIIntegrationTest): data = read_exactly(pty_stdout, next_size) assert data.decode('utf-8') == line + @pytest.mark.timeout(10) def test_attach_no_stream(self): container = self.client.create_container( TEST_IMG, 'echo hello' ) self.tmp_containers.append(container) self.client.start(container) + self.client.wait(container, condition='not-running') output = self.client.attach(container, stream=False, logs=True) assert output == 'hello\n'.encode(encoding='ascii') From 4765f624419c503012508f0fecbe4f63e492cde1 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 12:56:07 -0400 Subject: [PATCH 180/211] test: mark invalid test as xfail This test looks for some behavior on non-chunked HTTP requests. It now fails because it looks like recent versions of Docker Engine ALWAYS return chunked responses (or perhaps this specific response changed somehow to now trigger chunking whereas it did not previously). The actual logic it's trying to test is also unusual because it's trying to hackily propagate errors under the assumption that it'd get a non-chunked response on failure, which is...not reliable. Arguably, the chunked reader should be refactored somehow but that's a refactor we can't really commit to (and it's evidently been ok enough as is up until now). Signed-off-by: Milas Bowman --- tests/integration/regression_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/integration/regression_test.py b/tests/integration/regression_test.py index deb9aff1..10313a63 100644 --- a/tests/integration/regression_test.py +++ b/tests/integration/regression_test.py @@ -8,6 +8,7 @@ import pytest class TestRegressions(BaseAPIIntegrationTest): + @pytest.mark.xfail(True, reason='Docker API always returns chunked resp') def test_443_handle_nonchunked_response_in_stream(self): dfile = io.BytesIO() with pytest.raises(docker.errors.APIError) as exc: From ce40d4bb34e9324e3ee640f0acc23604498db21d Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 13:48:10 -0400 Subject: [PATCH 181/211] ci: add flake8 job Project is already configured for flake8 but it never gets run in CI. Signed-off-by: Milas Bowman --- .github/workflows/ci.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 29e022a9..0096ddd2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,6 +3,17 @@ name: Python package on: [push, pull_request] jobs: + flake8: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.x' + - run: pip install -U flake8 + - name: Run flake8 + run: flake8 docker/ tests/ + build: runs-on: ubuntu-latest strategy: From 3ffdd8a1c52cb7677d926feaf1a44d585a066dac Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 13:48:47 -0400 Subject: [PATCH 182/211] lint: fix outstanding flake8 violations Since flake8 wasn't actually being run in CI, we'd accumulated some violations. Signed-off-by: Milas Bowman --- docker/api/build.py | 2 +- docker/api/container.py | 13 +++++++++---- docker/api/image.py | 10 ++++++++-- docker/api/volume.py | 19 +++++++++++-------- docker/models/containers.py | 3 ++- docker/models/images.py | 5 ++++- docker/models/plugins.py | 6 +++++- docker/utils/utils.py | 1 + tests/integration/api_config_test.py | 2 +- tests/unit/auth_test.py | 1 + tests/unit/utils_build_test.py | 24 ++++++++++++------------ 11 files changed, 55 insertions(+), 31 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index aac43c46..a48204a9 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -153,7 +153,7 @@ class BuildApiMixin: with open(dockerignore) as f: exclude = list(filter( lambda x: x != '' and x[0] != '#', - [l.strip() for l in f.read().splitlines()] + [line.strip() for line in f.read().splitlines()] )) dockerfile = process_dockerfile(dockerfile, path) context = utils.tar( diff --git a/docker/api/container.py b/docker/api/container.py index 83fcd4f6..17c09726 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -256,7 +256,9 @@ class ContainerApiMixin: .. code-block:: python - client.api.create_host_config(port_bindings={1111: ('127.0.0.1', 4567)}) + client.api.create_host_config( + port_bindings={1111: ('127.0.0.1', 4567)} + ) Or without host port assignment: @@ -579,10 +581,13 @@ class ContainerApiMixin: Example: - >>> client.api.create_host_config(privileged=True, cap_drop=['MKNOD'], - volumes_from=['nostalgic_newton']) + >>> client.api.create_host_config( + ... privileged=True, + ... cap_drop=['MKNOD'], + ... volumes_from=['nostalgic_newton'], + ... ) {'CapDrop': ['MKNOD'], 'LxcConf': None, 'Privileged': True, - 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False} + 'VolumesFrom': ['nostalgic_newton'], 'PublishAllPorts': False} """ if not kwargs: diff --git a/docker/api/image.py b/docker/api/image.py index 772d8895..5e1466ec 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -377,7 +377,8 @@ class ImageApiMixin: Example: - >>> for line in client.api.pull('busybox', stream=True, decode=True): + >>> resp = client.api.pull('busybox', stream=True, decode=True) + ... for line in resp: ... print(json.dumps(line, indent=4)) { "status": "Pulling image (latest) from busybox", @@ -456,7 +457,12 @@ class ImageApiMixin: If the server returns an error. Example: - >>> for line in client.api.push('yourname/app', stream=True, decode=True): + >>> resp = client.api.push( + ... 'yourname/app', + ... stream=True, + ... decode=True, + ... ) + ... for line in resp: ... print(line) {'status': 'Pushing repository yourname/app (1 tags)'} {'status': 'Pushing','progressDetail': {}, 'id': '511136ea3c5a'} diff --git a/docker/api/volume.py b/docker/api/volume.py index 86b00187..98b42a12 100644 --- a/docker/api/volume.py +++ b/docker/api/volume.py @@ -56,15 +56,18 @@ class VolumeApiMixin: Example: - >>> volume = client.api.create_volume(name='foobar', driver='local', - driver_opts={'foo': 'bar', 'baz': 'false'}, - labels={"key": "value"}) - >>> print(volume) + >>> volume = client.api.create_volume( + ... name='foobar', + ... driver='local', + ... driver_opts={'foo': 'bar', 'baz': 'false'}, + ... labels={"key": "value"}, + ... ) + ... print(volume) {u'Driver': u'local', - u'Labels': {u'key': u'value'}, - u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', - u'Name': u'foobar', - u'Scope': u'local'} + u'Labels': {u'key': u'value'}, + u'Mountpoint': u'/var/lib/docker/volumes/foobar/_data', + u'Name': u'foobar', + u'Scope': u'local'} """ url = self._url('/volumes/create') diff --git a/docker/models/containers.py b/docker/models/containers.py index 957deed4..e34659cb 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -761,7 +761,8 @@ class ContainerCollection(Collection): {'/home/user1/': {'bind': '/mnt/vol2', 'mode': 'rw'}, '/var/www': {'bind': '/mnt/vol1', 'mode': 'ro'}} - Or a list of strings which each one of its elements specifies a mount volume. + Or a list of strings which each one of its elements specifies a + mount volume. For example: diff --git a/docker/models/images.py b/docker/models/images.py index 46f8efee..ef668c7d 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -15,7 +15,10 @@ class Image(Model): An image on the server. """ def __repr__(self): - return "<{}: '{}'>".format(self.__class__.__name__, "', '".join(self.tags)) + return "<{}: '{}'>".format( + self.__class__.__name__, + "', '".join(self.tags), + ) @property def labels(self): diff --git a/docker/models/plugins.py b/docker/models/plugins.py index 37ecefbe..69b94f35 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -117,7 +117,11 @@ class Plugin(Model): if remote is None: remote = self.name privileges = self.client.api.plugin_privileges(remote) - yield from self.client.api.upgrade_plugin(self.name, remote, privileges) + yield from self.client.api.upgrade_plugin( + self.name, + remote, + privileges, + ) self.reload() diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 7b229099..71e4014d 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -23,6 +23,7 @@ URLComponents = collections.namedtuple( 'scheme netloc url params query fragment', ) + def create_ipam_pool(*args, **kwargs): raise errors.DeprecatedMethod( 'utils.create_ipam_pool has been removed. Please use a ' diff --git a/tests/integration/api_config_test.py b/tests/integration/api_config_test.py index 82cb5161..982ec468 100644 --- a/tests/integration/api_config_test.py +++ b/tests/integration/api_config_test.py @@ -73,7 +73,7 @@ class ConfigAPITest(BaseAPIIntegrationTest): def test_create_config_with_templating(self): config_id = self.client.create_config( 'favorite_character', 'sakuya izayoi', - templating={ 'name': 'golang'} + templating={'name': 'golang'} ) self.tmp_configs.append(config_id) assert 'ID' in config_id diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index ea953af0..dd5b5f8b 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -11,6 +11,7 @@ from docker import auth, credentials, errors from unittest import mock import pytest + class RegressionTest(unittest.TestCase): def test_803_urlsafe_encode(self): auth_data = { diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py index 9f183886..fa7d833d 100644 --- a/tests/unit/utils_build_test.py +++ b/tests/unit/utils_build_test.py @@ -272,8 +272,8 @@ class ExcludePathsTest(unittest.TestCase): assert self.exclude(['**/target/*/*']) == convert_paths( self.all_paths - { 'target/subdir/file.txt', - 'subdir/target/subdir/file.txt', - 'subdir/subdir2/target/subdir/file.txt' + 'subdir/target/subdir/file.txt', + 'subdir/subdir2/target/subdir/file.txt' } ) @@ -281,16 +281,16 @@ class ExcludePathsTest(unittest.TestCase): assert self.exclude(['subdir/**']) == convert_paths( self.all_paths - { 'subdir/file.txt', - 'subdir/target/file.txt', - 'subdir/target/subdir/file.txt', - 'subdir/subdir2/file.txt', - 'subdir/subdir2/target/file.txt', - 'subdir/subdir2/target/subdir/file.txt', - 'subdir/target', - 'subdir/target/subdir', - 'subdir/subdir2', - 'subdir/subdir2/target', - 'subdir/subdir2/target/subdir' + 'subdir/target/file.txt', + 'subdir/target/subdir/file.txt', + 'subdir/subdir2/file.txt', + 'subdir/subdir2/target/file.txt', + 'subdir/subdir2/target/subdir/file.txt', + 'subdir/target', + 'subdir/target/subdir', + 'subdir/subdir2', + 'subdir/subdir2/target', + 'subdir/subdir2/target/subdir' } ) From bb11197ee3407798a53c50e43aa994fe8cd9c8e7 Mon Sep 17 00:00:00 2001 From: Maor Kleinberger Date: Tue, 26 Jul 2022 22:07:23 +0300 Subject: [PATCH 183/211] client: fix exception semantics in _raise_for_status (#2954) We want "The above exception was the direct cause of the following exception:" instead of "During handling of the above exception, another exception occurred:" Signed-off-by: Maor Kleinberger --- docker/api/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/client.py b/docker/api/client.py index 2667922d..7733d334 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -267,7 +267,7 @@ class APIClient( try: response.raise_for_status() except requests.exceptions.HTTPError as e: - raise create_api_error_from_http_exception(e) + raise create_api_error_from_http_exception(e) from e def _result(self, response, json=False, binary=False): assert not (json and binary) From 56dd6de7dfad9bedc7c8af99308707ecc3fad78e Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Tue, 26 Jul 2022 15:12:03 -0400 Subject: [PATCH 184/211] tls: use auto-negotiated highest version (#3007) Specific TLS versions are deprecated in latest Python, which causes test failures due to treating deprecation errors as warnings. Luckily, the fix here is straightforward: we can eliminate some custom version selection logic by using `PROTOCOL_TLS_CLIENT`, which is the recommended method and will select the highest TLS version supported by both client and server. Signed-off-by: Milas Bowman --- docker/tls.py | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/docker/tls.py b/docker/tls.py index 067d5563..882a50ea 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -37,30 +37,11 @@ class TLSConfig: self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint - # TODO(dperny): according to the python docs, PROTOCOL_TLSvWhatever is - # depcreated, and it's recommended to use OPT_NO_TLSvWhatever instead - # to exclude versions. But I think that might require a bigger - # architectural change, so I've opted not to pursue it at this time - # If the user provides an SSL version, we should use their preference if ssl_version: self.ssl_version = ssl_version else: - # If the user provides no ssl version, we should default to - # TLSv1_2. This option is the most secure, and will work for the - # majority of users with reasonably up-to-date software. However, - # before doing so, detect openssl version to ensure we can support - # it. - if ssl.OPENSSL_VERSION_INFO[:3] >= (1, 0, 1) and hasattr( - ssl, 'PROTOCOL_TLSv1_2'): - # If the OpenSSL version is high enough to support TLSv1_2, - # then we should use it. - self.ssl_version = getattr(ssl, 'PROTOCOL_TLSv1_2') - else: - # Otherwise, TLS v1.0 seems to be the safest default; - # SSLv23 fails in mysterious ways: - # https://github.com/docker/docker-py/issues/963 - self.ssl_version = ssl.PROTOCOL_TLSv1 + self.ssl_version = ssl.PROTOCOL_TLS_CLIENT # "client_cert" must have both or neither cert/key files. In # either case, Alert the user when both are expected, but any are From 4e19cc48dfd88d0a9a8bdbbe4df4357322619d02 Mon Sep 17 00:00:00 2001 From: Guy Lichtman <1395797+glicht@users.noreply.github.com> Date: Tue, 26 Jul 2022 22:16:12 +0300 Subject: [PATCH 185/211] transport: fix ProxyCommand for SSH conn (#2993) Signed-off-by: Guy Lichtman --- docker/transport/sshconn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 8e6beb25..76d1fa44 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -204,7 +204,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): host_config = conf.lookup(base_url.hostname) if 'proxycommand' in host_config: self.ssh_params["sock"] = paramiko.ProxyCommand( - self.ssh_conf['proxycommand'] + host_config['proxycommand'] ) if 'hostname' in host_config: self.ssh_params['hostname'] = host_config['hostname'] From 2e6dad798324a1d993314f39e9a844b705b61e0d Mon Sep 17 00:00:00 2001 From: Francesco Casalegno Date: Tue, 26 Jul 2022 21:45:51 +0200 Subject: [PATCH 186/211] deps: use `packaging` instead of deprecated `distutils` (#2931) Replace `distutils.Version` (deprecated) with `packaging.Version` Signed-off-by: Francesco Casalegno --- docker/transport/ssladapter.py | 4 ++-- docker/utils/utils.py | 6 +++--- requirements.txt | 2 +- setup.py | 1 + 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py index 31e3014e..bdca1d04 100644 --- a/docker/transport/ssladapter.py +++ b/docker/transport/ssladapter.py @@ -4,7 +4,7 @@ """ import sys -from distutils.version import StrictVersion +from packaging.version import Version from requests.adapters import HTTPAdapter from docker.transport.basehttpadapter import BaseHTTPAdapter @@ -70,4 +70,4 @@ class SSLHTTPAdapter(BaseHTTPAdapter): return False if urllib_ver == 'dev': return True - return StrictVersion(urllib_ver) > StrictVersion('1.5') + return Version(urllib_ver) > Version('1.5') diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 7b229099..3683ac54 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -6,7 +6,7 @@ import os.path import shlex import string from datetime import datetime -from distutils.version import StrictVersion +from packaging.version import Version from .. import errors from ..constants import DEFAULT_HTTP_HOST @@ -55,8 +55,8 @@ def compare_version(v1, v2): >>> compare_version(v2, v2) 0 """ - s1 = StrictVersion(v1) - s2 = StrictVersion(v2) + s1 = Version(v1) + s2 = Version(v2) if s1 == s2: return 0 elif s1 > s2: diff --git a/requirements.txt b/requirements.txt index c74d8cea..7bcca763 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ cryptography==3.4.7 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 -packaging==16.8 +packaging==21.3 paramiko==2.10.1 pycparser==2.17 pyOpenSSL==18.0.0 diff --git a/setup.py b/setup.py index 3be63ba6..833de3aa 100644 --- a/setup.py +++ b/setup.py @@ -10,6 +10,7 @@ ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ + 'packaging', 'websocket-client >= 0.32.0', 'requests >= 2.14.2, != 2.18.0', ] From 0ee9f260e48992d04d72c7bb8e4819f6b6a64717 Mon Sep 17 00:00:00 2001 From: Leonard Kinday Date: Tue, 26 Jul 2022 22:33:21 +0200 Subject: [PATCH 187/211] ci: run integration tests & fix race condition (#2947) * Fix integration tests race condition * Run integration tests on CI * Use existing DIND version Signed-off-by: Leonard Kinday Co-authored-by: Milas Bowman --- .github/workflows/ci.yml | 21 ++++++++-- Makefile | 84 +++++++++++++++++++++++++++++++++------- 2 files changed, 86 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0096ddd2..e2987b49 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,10 +14,9 @@ jobs: - name: Run flake8 run: flake8 docker/ tests/ - build: + unit-tests: runs-on: ubuntu-latest strategy: - max-parallel: 1 matrix: python-version: ["3.7", "3.8", "3.9", "3.10", "3.11.0-alpha - 3.11.0"] @@ -26,13 +25,27 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python3 -m pip install --upgrade pip pip3 install -r test-requirements.txt -r requirements.txt - - name: Test with pytest + - name: Run unit tests run: | docker logout rm -rf ~/.docker py.test -v --cov=docker tests/unit + + integration-tests: + runs-on: ubuntu-latest + strategy: + matrix: + variant: [ "integration-dind", "integration-dind-ssl" ] + + steps: + - uses: actions/checkout@v3 + - name: make ${{ matrix.variant }} + run: | + docker logout + rm -rf ~/.docker + make ${{ matrix.variant }} diff --git a/Makefile b/Makefile index 78a0d334..b71479ee 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ TEST_API_VERSION ?= 1.41 -TEST_ENGINE_VERSION ?= 20.10.05 +TEST_ENGINE_VERSION ?= 20.10 .PHONY: all all: test @@ -46,10 +46,32 @@ integration-dind: integration-dind-py3 .PHONY: integration-dind-py3 integration-dind-py3: build-py3 setup-network docker rm -vf dpy-dind-py3 || : - docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ - docker:${TEST_ENGINE_VERSION}-dind dockerd -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} + + docker run \ + --detach \ + --name dpy-dind-py3 \ + --network dpy-tests \ + --privileged \ + docker:${TEST_ENGINE_VERSION}-dind \ + dockerd -H tcp://0.0.0.0:2375 --experimental + + # Wait for Docker-in-Docker to come to life + docker run \ + --network dpy-tests \ + --rm \ + --tty \ + busybox \ + sh -c 'while ! nc -z dpy-dind-py3 2375; do sleep 1; done' + + docker run \ + --env="DOCKER_HOST=tcp://dpy-dind-py3:2375" \ + --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \ + --network dpy-tests \ + --rm \ + --tty \ + docker-sdk-python3 \ + py.test tests/integration/${file} + docker rm -vf dpy-dind-py3 @@ -66,18 +88,50 @@ integration-ssh-py3: build-dind-ssh build-py3 setup-network .PHONY: integration-dind-ssl -integration-dind-ssl: build-dind-certs build-py3 +integration-dind-ssl: build-dind-certs build-py3 setup-network docker rm -vf dpy-dind-certs dpy-dind-ssl || : docker run -d --name dpy-dind-certs dpy-dind-certs - docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\ - --env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\ - --network dpy-tests --network-alias docker -v /tmp --privileged\ - docker:${TEST_ENGINE_VERSION}-dind\ - dockerd --tlsverify --tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\ - --tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental - docker run -t --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\ - --env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python3 py.test tests/integration/${file} + + docker run \ + --detach \ + --env="DOCKER_CERT_PATH=/certs" \ + --env="DOCKER_HOST=tcp://localhost:2375" \ + --env="DOCKER_TLS_VERIFY=1" \ + --name dpy-dind-ssl \ + --network dpy-tests \ + --network-alias docker \ + --privileged \ + --volume /tmp \ + --volumes-from dpy-dind-certs \ + docker:${TEST_ENGINE_VERSION}-dind \ + dockerd \ + --tlsverify \ + --tlscacert=/certs/ca.pem \ + --tlscert=/certs/server-cert.pem \ + --tlskey=/certs/server-key.pem \ + -H tcp://0.0.0.0:2375 \ + --experimental + + # Wait for Docker-in-Docker to come to life + docker run \ + --network dpy-tests \ + --rm \ + --tty \ + busybox \ + sh -c 'while ! nc -z dpy-dind-ssl 2375; do sleep 1; done' + + docker run \ + --env="DOCKER_CERT_PATH=/certs" \ + --env="DOCKER_HOST=tcp://docker:2375" \ + --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \ + --env="DOCKER_TLS_VERIFY=1" \ + --network dpy-tests \ + --rm \ + --volumes-from dpy-dind-ssl \ + --tty \ + docker-sdk-python3 \ + py.test tests/integration/${file} + docker rm -vf dpy-dind-ssl dpy-dind-certs .PHONY: flake8 From da62a2883715e15f8b83ab0e9a073b3655a2d456 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Wed, 27 Jul 2022 14:44:36 -0400 Subject: [PATCH 188/211] deps: test on Python 3.10 by default (#3010) * Upgrade to latest Sphinx / recommonmark * Small CSS fix for issue in new version of Alabaster theme * Fix `Makefile` target for macOS Signed-off-by: Milas Bowman --- .readthedocs.yml | 6 +++++- Dockerfile | 2 +- Dockerfile-docs | 2 +- Jenkinsfile | 4 ++-- Makefile | 12 +++++++++++- docs-requirements.txt | 4 ++-- docs/_static/custom.css | 5 +++++ setup.py | 3 +-- tests/Dockerfile | 2 +- tests/Dockerfile-dind-certs | 2 +- tests/Dockerfile-ssh-dind | 4 ++-- 11 files changed, 32 insertions(+), 14 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 32113fed..464c7826 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -3,8 +3,12 @@ version: 2 sphinx: configuration: docs/conf.py +build: + os: ubuntu-20.04 + tools: + python: '3.10' + python: - version: 3.6 install: - requirements: docs-requirements.txt - requirements: requirements.txt diff --git a/Dockerfile b/Dockerfile index 22732dec..8a0d32e4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=3.7 +ARG PYTHON_VERSION=3.10 FROM python:${PYTHON_VERSION} diff --git a/Dockerfile-docs b/Dockerfile-docs index 9d11312f..98901dfe 100644 --- a/Dockerfile-docs +++ b/Dockerfile-docs @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=3.7 +ARG PYTHON_VERSION=3.10 FROM python:${PYTHON_VERSION} diff --git a/Jenkinsfile b/Jenkinsfile index f524ae7a..f9431eac 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,7 +25,7 @@ def buildImages = { -> imageDindSSH = "${imageNameBase}:sshdind-${gitCommit()}" withDockerRegistry(credentialsId:'dockerbuildbot-index.docker.io') { buildImage(imageDindSSH, "-f tests/Dockerfile-ssh-dind .", "") - buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.7 .", "py3.7") + buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.10 .", "py3.10") } } } @@ -70,7 +70,7 @@ def runTests = { Map settings -> throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`") } if (!pythonVersion) { - throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.7')`") + throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py3.x')`") } { -> diff --git a/Makefile b/Makefile index b71479ee..27144d4d 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,16 @@ TEST_API_VERSION ?= 1.41 TEST_ENGINE_VERSION ?= 20.10 +ifeq ($(OS),Windows_NT) + PLATFORM := Windows +else + PLATFORM := $(shell sh -c 'uname -s 2>/dev/null || echo Unknown') +endif + +ifeq ($(PLATFORM),Linux) + uid_args := "--build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g)" +endif + .PHONY: all all: test @@ -19,7 +29,7 @@ build-py3: .PHONY: build-docs build-docs: - docker build -t docker-sdk-python-docs -f Dockerfile-docs --build-arg uid=$(shell id -u) --build-arg gid=$(shell id -g) . + docker build -t docker-sdk-python-docs -f Dockerfile-docs $(uid_args) . .PHONY: build-dind-certs build-dind-certs: diff --git a/docs-requirements.txt b/docs-requirements.txt index d69373d7..1f342fa2 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,2 +1,2 @@ -recommonmark==0.4.0 -Sphinx==1.4.6 +recommonmark==0.7.1 +Sphinx==5.1.1 diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 5d711eef..b0b2e5d0 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,3 +1,8 @@ dl.hide-signature > dt { display: none; } + +dl.field-list > dt { + /* prevent code blocks from forcing wrapping on the "Parameters" header */ + word-break: initial; +} diff --git a/setup.py b/setup.py index 833de3aa..0b113688 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ setup( install_requires=requirements, tests_require=test_requirements, extras_require=extras_require, - python_requires='>=3.6', + python_requires='>=3.7', zip_safe=False, test_suite='tests', classifiers=[ @@ -72,7 +72,6 @@ setup( 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/tests/Dockerfile b/tests/Dockerfile index 3236f387..1d60cfe4 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=3.7 +ARG PYTHON_VERSION=3.10 FROM python:${PYTHON_VERSION} diff --git a/tests/Dockerfile-dind-certs b/tests/Dockerfile-dind-certs index 8829ff79..6e711892 100644 --- a/tests/Dockerfile-dind-certs +++ b/tests/Dockerfile-dind-certs @@ -1,4 +1,4 @@ -ARG PYTHON_VERSION=3.6 +ARG PYTHON_VERSION=3.10 FROM python:${PYTHON_VERSION} RUN mkdir /tmp/certs diff --git a/tests/Dockerfile-ssh-dind b/tests/Dockerfile-ssh-dind index aba9bb34..6f080182 100644 --- a/tests/Dockerfile-ssh-dind +++ b/tests/Dockerfile-ssh-dind @@ -1,5 +1,5 @@ -ARG API_VERSION=1.39 -ARG ENGINE_VERSION=19.03.12 +ARG API_VERSION=1.41 +ARG ENGINE_VERSION=20.10.17 FROM docker:${ENGINE_VERSION}-dind From 52e29bd4463964a090e3425cf027a3a4a8c4473b Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Wed, 27 Jul 2022 14:44:50 -0400 Subject: [PATCH 189/211] deps: remove backports.ssl_match_hostname (#3011) This is no longer needed as it exists in every supported (non-EOL) version of Python that we target. Signed-off-by: Milas Bowman --- docker/transport/ssladapter.py | 8 -------- requirements.txt | 1 - tests/unit/ssladapter_test.py | 13 +++---------- 3 files changed, 3 insertions(+), 19 deletions(-) diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py index bdca1d04..6aa80037 100644 --- a/docker/transport/ssladapter.py +++ b/docker/transport/ssladapter.py @@ -2,8 +2,6 @@ https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/ https://github.com/kennethreitz/requests/pull/799 """ -import sys - from packaging.version import Version from requests.adapters import HTTPAdapter @@ -17,12 +15,6 @@ except ImportError: PoolManager = urllib3.poolmanager.PoolManager -# Monkey-patching match_hostname with a version that supports -# IP-address checking. Not necessary for Python 3.5 and above -if sys.version_info[0] < 3 or sys.version_info[1] < 5: - from backports.ssl_match_hostname import match_hostname - urllib3.connection.match_hostname = match_hostname - class SSLHTTPAdapter(BaseHTTPAdapter): '''An HTTPS Transport Adapter that uses an arbitrary SSL version.''' diff --git a/requirements.txt b/requirements.txt index 7bcca763..a74e69ea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,5 @@ appdirs==1.4.3 asn1crypto==0.22.0 -backports.ssl-match-hostname==3.5.0.1 cffi==1.14.4 cryptography==3.4.7 enum34==1.1.6 diff --git a/tests/unit/ssladapter_test.py b/tests/unit/ssladapter_test.py index 41a87f20..d3f2407c 100644 --- a/tests/unit/ssladapter_test.py +++ b/tests/unit/ssladapter_test.py @@ -1,15 +1,8 @@ import unittest -from docker.transport import ssladapter -import pytest +from ssl import match_hostname, CertificateError -try: - from backports.ssl_match_hostname import ( - match_hostname, CertificateError - ) -except ImportError: - from ssl import ( - match_hostname, CertificateError - ) +import pytest +from docker.transport import ssladapter try: from ssl import OP_NO_SSLv3, OP_NO_SSLv2, OP_NO_TLSv1 From bb40ba051fc67605d5c9e7fd1eb5f9aa3e0fb501 Mon Sep 17 00:00:00 2001 From: errorcode Date: Thu, 28 Jul 2022 02:57:26 +0800 Subject: [PATCH 190/211] ssh: do not create unnecessary subshell on exec (#2910) Signed-off-by: liubo --- docker/transport/sshconn.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 76d1fa44..ba8c11d1 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -58,9 +58,8 @@ class SSHSocket(socket.socket): env.pop('SSL_CERT_FILE', None) self.proc = subprocess.Popen( - ' '.join(args), + args, env=env, - shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func) From d9298647d91c52e1ee9ac448e43a7fea1c69bdbe Mon Sep 17 00:00:00 2001 From: "Audun V. Nes" Date: Wed, 27 Jul 2022 21:01:41 +0200 Subject: [PATCH 191/211] ssh: reject unknown host keys when using Python SSH impl (#2932) In the Secure Shell (SSH) protocol, host keys are used to verify the identity of remote hosts. Accepting unknown host keys may leave the connection open to man-in-the-middle attacks. Do not accept unknown host keys. In particular, do not set the default missing host key policy for the Paramiko library to either AutoAddPolicy or WarningPolicy. Both of these policies continue even when the host key is unknown. The default setting of RejectPolicy is secure because it throws an exception when it encounters an unknown host key. Reference: https://cwe.mitre.org/data/definitions/295.html NOTE: This only affects SSH connections using the native Python SSH implementation (Paramiko), when `use_ssh_client=False` (default). If using the system SSH client (`use_ssh_client=True`), the host configuration (e.g. `~/.ssh/config`) will apply. Signed-off-by: Audun Nes --- docker/transport/sshconn.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index ba8c11d1..4f748f75 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -215,7 +215,7 @@ class SSHHTTPAdapter(BaseHTTPAdapter): self.ssh_params['key_filename'] = host_config['identityfile'] self.ssh_client.load_system_host_keys() - self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy()) + self.ssh_client.set_missing_host_key_policy(paramiko.RejectPolicy()) def _connect(self): if self.ssh_client: From adf5a97b1203623ae47bf7aa1367b6bb7c261980 Mon Sep 17 00:00:00 2001 From: Karthikeyan Singaravelan Date: Thu, 28 Jul 2022 00:55:11 +0530 Subject: [PATCH 192/211] lint: fix deprecation warnings from threading package (#2823) Set `daemon` attribute instead of using `setDaemon` method that was deprecated in Python 3.10. Signed-off-by: Karthikeyan Singaravelan --- tests/integration/api_image_test.py | 2 +- tests/unit/api_test.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/api_image_test.py b/tests/integration/api_image_test.py index e30de46c..6a6686e3 100644 --- a/tests/integration/api_image_test.py +++ b/tests/integration/api_image_test.py @@ -281,7 +281,7 @@ class ImportImageTest(BaseAPIIntegrationTest): server = socketserver.TCPServer(('', 0), Handler) thread = threading.Thread(target=server.serve_forever) - thread.setDaemon(True) + thread.daemon = True thread.start() yield f'http://{socket.gethostname()}:{server.server_address[1]}' diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index 3234e55b..45d2e4c0 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -378,7 +378,7 @@ class UnixSocketStreamTest(unittest.TestCase): self.server_socket = self._setup_socket() self.stop_server = False server_thread = threading.Thread(target=self.run_server) - server_thread.setDaemon(True) + server_thread.daemon = True server_thread.start() self.response = None self.request_handler = None @@ -488,7 +488,7 @@ class TCPSocketStreamTest(unittest.TestCase): cls.server = socketserver.ThreadingTCPServer( ('', 0), cls.get_handler_class()) cls.thread = threading.Thread(target=cls.server.serve_forever) - cls.thread.setDaemon(True) + cls.thread.daemon = True cls.thread.start() cls.address = 'http://{}:{}'.format( socket.gethostname(), cls.server.server_address[1]) From ea4cefe4fd1e85ef94f477b8e969994117fcb076 Mon Sep 17 00:00:00 2001 From: Vilhelm Prytz Date: Wed, 27 Jul 2022 21:31:04 +0200 Subject: [PATCH 193/211] lint: remove unnecessary pass statements (#2541) Signed-off-by: Vilhelm Prytz --- docker/auth.py | 1 - docker/transport/npipeconn.py | 2 +- docker/transport/sshconn.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/docker/auth.py b/docker/auth.py index 4fa798fc..cb388554 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -383,7 +383,6 @@ def _load_legacy_config(config_file): }} except Exception as e: log.debug(e) - pass log.debug("All parsing attempts failed - returning empty config") return {} diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index df67f212..87033cf2 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -61,7 +61,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): "Pool reached maximum size and no more " "connections are allowed." ) - pass # Oh well, we'll create a new connection then + # Oh well, we'll create a new connection then return conn or self._new_conn() diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 4f748f75..27764069 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -155,7 +155,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): "Pool reached maximum size and no more " "connections are allowed." ) - pass # Oh well, we'll create a new connection then + # Oh well, we'll create a new connection then return conn or self._new_conn() From acdafbc116ac2348dcf41055402dbb5ecfad8be2 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Wed, 27 Jul 2022 16:25:27 -0400 Subject: [PATCH 194/211] ci: run SSH integration tests (#3012) Fix & enable SSH integration test suite. This also adds a new test for connecting to unknown hosts when using the Python SSH implementation (Paramiko). See #2932 for more info. Because of the above, some of the config/static key files have been moved around and adjusted. Signed-off-by: Milas Bowman --- .github/workflows/ci.yml | 2 +- Makefile | 41 ++++++++++++++----- tests/Dockerfile | 4 +- tests/Dockerfile-ssh-dind | 19 ++++----- tests/ssh-keys/authorized_keys | 1 - tests/ssh-keys/config | 3 -- tests/ssh/base.py | 4 ++ tests/{ssh-keys => ssh/config/client}/id_rsa | 0 .../config/client}/id_rsa.pub | 0 tests/ssh/config/server/known_ed25519 | 7 ++++ tests/ssh/config/server/known_ed25519.pub | 1 + tests/ssh/config/server/sshd_config | 3 ++ tests/ssh/config/server/unknown_ed25519 | 7 ++++ tests/ssh/config/server/unknown_ed25519.pub | 1 + tests/ssh/connect_test.py | 22 ++++++++++ 15 files changed, 86 insertions(+), 29 deletions(-) delete mode 100755 tests/ssh-keys/authorized_keys delete mode 100644 tests/ssh-keys/config rename tests/{ssh-keys => ssh/config/client}/id_rsa (100%) rename tests/{ssh-keys => ssh/config/client}/id_rsa.pub (100%) create mode 100644 tests/ssh/config/server/known_ed25519 create mode 100644 tests/ssh/config/server/known_ed25519.pub create mode 100644 tests/ssh/config/server/sshd_config create mode 100644 tests/ssh/config/server/unknown_ed25519 create mode 100644 tests/ssh/config/server/unknown_ed25519.pub create mode 100644 tests/ssh/connect_test.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2987b49..296bf0dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,7 +40,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - variant: [ "integration-dind", "integration-dind-ssl" ] + variant: [ "integration-dind", "integration-dind-ssl", "integration-dind-ssh" ] steps: - uses: actions/checkout@v3 diff --git a/Makefile b/Makefile index 27144d4d..ae6ae34e 100644 --- a/Makefile +++ b/Makefile @@ -21,11 +21,21 @@ clean: .PHONY: build-dind-ssh build-dind-ssh: - docker build -t docker-dind-ssh -f tests/Dockerfile-ssh-dind --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} --build-arg API_VERSION=${TEST_API_VERSION} --build-arg APT_MIRROR . + docker build \ + --pull \ + -t docker-dind-ssh \ + -f tests/Dockerfile-ssh-dind \ + --build-arg ENGINE_VERSION=${TEST_ENGINE_VERSION} \ + --build-arg API_VERSION=${TEST_API_VERSION} \ + --build-arg APT_MIRROR . .PHONY: build-py3 build-py3: - docker build -t docker-sdk-python3 -f tests/Dockerfile --build-arg APT_MIRROR . + docker build \ + --pull \ + -t docker-sdk-python3 \ + -f tests/Dockerfile \ + --build-arg APT_MIRROR . .PHONY: build-docs build-docs: @@ -61,6 +71,7 @@ integration-dind-py3: build-py3 setup-network --detach \ --name dpy-dind-py3 \ --network dpy-tests \ + --pull=always \ --privileged \ docker:${TEST_ENGINE_VERSION}-dind \ dockerd -H tcp://0.0.0.0:2375 --experimental @@ -85,16 +96,23 @@ integration-dind-py3: build-py3 setup-network docker rm -vf dpy-dind-py3 -.PHONY: integration-ssh-py3 -integration-ssh-py3: build-dind-ssh build-py3 setup-network - docker rm -vf dpy-dind-py3 || : - docker run -d --network dpy-tests --name dpy-dind-py3 --privileged\ +.PHONY: integration-dind-ssh +integration-dind-ssh: build-dind-ssh build-py3 setup-network + docker rm -vf dpy-dind-ssh || : + docker run -d --network dpy-tests --name dpy-dind-ssh --privileged \ docker-dind-ssh dockerd --experimental - # start SSH daemon - docker exec dpy-dind-py3 sh -c "/usr/sbin/sshd" - docker run -t --rm --env="DOCKER_HOST=ssh://dpy-dind-py3" --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}"\ - --network dpy-tests docker-sdk-python3 py.test tests/ssh/${file} - docker rm -vf dpy-dind-py3 + # start SSH daemon for known key + docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/known_ed25519 -p 22" + docker exec dpy-dind-ssh sh -c "/usr/sbin/sshd -h /etc/ssh/unknown_ed25519 -p 2222" + docker run \ + --tty \ + --rm \ + --env="DOCKER_HOST=ssh://dpy-dind-ssh" \ + --env="DOCKER_TEST_API_VERSION=${TEST_API_VERSION}" \ + --env="UNKNOWN_DOCKER_SSH_HOST=ssh://dpy-dind-ssh:2222" \ + --network dpy-tests \ + docker-sdk-python3 py.test tests/ssh/${file} + docker rm -vf dpy-dind-ssh .PHONY: integration-dind-ssl @@ -110,6 +128,7 @@ integration-dind-ssl: build-dind-certs build-py3 setup-network --name dpy-dind-ssl \ --network dpy-tests \ --network-alias docker \ + --pull=always \ --privileged \ --volume /tmp \ --volumes-from dpy-dind-certs \ diff --git a/tests/Dockerfile b/tests/Dockerfile index 1d60cfe4..e24da47d 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -11,7 +11,9 @@ RUN apt-get update && apt-get -y install --no-install-recommends \ pass # Add SSH keys and set permissions -COPY tests/ssh-keys /root/.ssh +COPY tests/ssh/config/client /root/.ssh +COPY tests/ssh/config/server/known_ed25519.pub /root/.ssh/known_hosts +RUN sed -i '1s;^;dpy-dind-ssh ;' /root/.ssh/known_hosts RUN chmod -R 600 /root/.ssh COPY ./tests/gpg-keys /gpg-keys diff --git a/tests/Dockerfile-ssh-dind b/tests/Dockerfile-ssh-dind index 6f080182..22c707a0 100644 --- a/tests/Dockerfile-ssh-dind +++ b/tests/Dockerfile-ssh-dind @@ -1,23 +1,18 @@ ARG API_VERSION=1.41 -ARG ENGINE_VERSION=20.10.17 +ARG ENGINE_VERSION=20.10 FROM docker:${ENGINE_VERSION}-dind -RUN apk add --no-cache \ +RUN apk add --no-cache --upgrade \ openssh -# Add the keys and set permissions -RUN ssh-keygen -A - -# copy the test SSH config -RUN echo "IgnoreUserKnownHosts yes" > /etc/ssh/sshd_config && \ - echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config && \ - echo "PermitRootLogin yes" >> /etc/ssh/sshd_config +COPY tests/ssh/config/server /etc/ssh/ +RUN chmod -R 600 /etc/ssh # set authorized keys for client paswordless connection -COPY tests/ssh-keys/authorized_keys /root/.ssh/authorized_keys -RUN chmod 600 /root/.ssh/authorized_keys +COPY tests/ssh/config/client/id_rsa.pub /root/.ssh/authorized_keys +RUN chmod -R 600 /root/.ssh -RUN echo "root:root" | chpasswd +# RUN echo "root:root" | chpasswd RUN ln -s /usr/local/bin/docker /usr/bin/docker EXPOSE 22 diff --git a/tests/ssh-keys/authorized_keys b/tests/ssh-keys/authorized_keys deleted file mode 100755 index 33252fe5..00000000 --- a/tests/ssh-keys/authorized_keys +++ /dev/null @@ -1 +0,0 @@ -ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC/BiXkbL9oEbE3PJv1S2p12XK5BHW3qQT5Rf+CYG0ATYyMPIVM6+IXVyf3QNxpnvPXvbPBQJCs0qHeuPwZy2Gsbt35QnmlgrczFPiXXosCD2N+wrcOQPZGuLjQyUUP2yJRVSTLpp8zk2F8w3laGIB3Jk1hUcMUExemKxQYk/L40b5rXKkarLk5awBuicjRStMrchPRHZ2n715TG+zSvf8tB/UHRXKYPqai/Je5eiH3yGUzCY4zn+uEoqAFb4V8lpIj8Rw3EXmCYVwG0vg+44QIQ2gJnIhTlcmxwkynvZn97nug4NLlGJQ+sDCnIvMapycHfGkNlBz3fFtu/ORsxPpZbTNg/9noa3Zf8OpIwvE/FHNPqDctGltwxEgQxj5fE34x0fYnF08tejAUJJCZE3YsGgNabsS4pD+kRhI83eFZvgj3Q1AeTK0V9bRM7jujcc9Rz+V9Gb5zYEHN/l8PxEVlj0OlURf9ZlknNQK8xRh597jDXTfVQKCMO/nRaWH2bq0= diff --git a/tests/ssh-keys/config b/tests/ssh-keys/config deleted file mode 100644 index 8dd13540..00000000 --- a/tests/ssh-keys/config +++ /dev/null @@ -1,3 +0,0 @@ -Host * - StrictHostKeyChecking no - UserKnownHostsFile=/dev/null diff --git a/tests/ssh/base.py b/tests/ssh/base.py index 4825227f..4b91add4 100644 --- a/tests/ssh/base.py +++ b/tests/ssh/base.py @@ -2,6 +2,8 @@ import os import shutil import unittest +import pytest + import docker from .. import helpers from docker.utils import kwargs_from_env @@ -68,6 +70,8 @@ class BaseIntegrationTest(unittest.TestCase): client.close() +@pytest.mark.skipif(not os.environ.get('DOCKER_HOST', '').startswith('ssh://'), + reason='DOCKER_HOST is not an SSH target') class BaseAPIIntegrationTest(BaseIntegrationTest): """ A test case for `APIClient` integration tests. It sets up an `APIClient` diff --git a/tests/ssh-keys/id_rsa b/tests/ssh/config/client/id_rsa similarity index 100% rename from tests/ssh-keys/id_rsa rename to tests/ssh/config/client/id_rsa diff --git a/tests/ssh-keys/id_rsa.pub b/tests/ssh/config/client/id_rsa.pub similarity index 100% rename from tests/ssh-keys/id_rsa.pub rename to tests/ssh/config/client/id_rsa.pub diff --git a/tests/ssh/config/server/known_ed25519 b/tests/ssh/config/server/known_ed25519 new file mode 100644 index 00000000..b79f217b --- /dev/null +++ b/tests/ssh/config/server/known_ed25519 @@ -0,0 +1,7 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3 +3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q +AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB +FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA== +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/ssh/config/server/known_ed25519.pub b/tests/ssh/config/server/known_ed25519.pub new file mode 100644 index 00000000..ec0296e9 --- /dev/null +++ b/tests/ssh/config/server/known_ed25519.pub @@ -0,0 +1 @@ +ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests known diff --git a/tests/ssh/config/server/sshd_config b/tests/ssh/config/server/sshd_config new file mode 100644 index 00000000..970dca33 --- /dev/null +++ b/tests/ssh/config/server/sshd_config @@ -0,0 +1,3 @@ +IgnoreUserKnownHosts yes +PubkeyAuthentication yes +PermitRootLogin yes diff --git a/tests/ssh/config/server/unknown_ed25519 b/tests/ssh/config/server/unknown_ed25519 new file mode 100644 index 00000000..b79f217b --- /dev/null +++ b/tests/ssh/config/server/unknown_ed25519 @@ -0,0 +1,7 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW +QyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4QAAAJgIMffcCDH3 +3AAAAAtzc2gtZWQyNTUxOQAAACCGsfNXVP18N7XC6IQGuuxXQRbTxlPGLj+5/CByj9eg4Q +AAAEDeXnt5AuNk4oTHjMU1vUsEwh64fuEPu4hXsG6wCVt/6Iax81dU/Xw3tcLohAa67FdB +FtPGU8YuP7n8IHKP16DhAAAAEXJvb3RAMGRkZmQyMWRkYjM3AQIDBA== +-----END OPENSSH PRIVATE KEY----- diff --git a/tests/ssh/config/server/unknown_ed25519.pub b/tests/ssh/config/server/unknown_ed25519.pub new file mode 100644 index 00000000..a24403ed --- /dev/null +++ b/tests/ssh/config/server/unknown_ed25519.pub @@ -0,0 +1 @@ +ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIax81dU/Xw3tcLohAa67FdBFtPGU8YuP7n8IHKP16Dh docker-py integration tests unknown diff --git a/tests/ssh/connect_test.py b/tests/ssh/connect_test.py new file mode 100644 index 00000000..3d33a96d --- /dev/null +++ b/tests/ssh/connect_test.py @@ -0,0 +1,22 @@ +import os +import unittest + +import docker +import paramiko.ssh_exception +import pytest +from .base import TEST_API_VERSION + + +class SSHConnectionTest(unittest.TestCase): + @pytest.mark.skipif('UNKNOWN_DOCKER_SSH_HOST' not in os.environ, + reason='Unknown Docker SSH host not configured') + def test_ssh_unknown_host(self): + with self.assertRaises(paramiko.ssh_exception.SSHException) as cm: + docker.APIClient( + version=TEST_API_VERSION, + timeout=60, + # test only valid with Paramiko + use_ssh_client=False, + base_url=os.environ['UNKNOWN_DOCKER_SSH_HOST'], + ) + self.assertIn('not found in known_hosts', str(cm.exception)) From d2d097efbb1675393a1ac5b17754ba9090d2c52e Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Thu, 28 Jul 2022 22:30:40 +1000 Subject: [PATCH 195/211] docs: fix simple typo, containe -> container (#3015) There is a small typo in docker/types/services.py. Should read `container` rather than `containe`. Signed-off-by: Tim Gates --- docker/types/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/types/services.py b/docker/types/services.py index fe7cc264..15cf511e 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -436,7 +436,7 @@ class UpdateConfig(dict): class RollbackConfig(UpdateConfig): """ - Used to specify the way containe rollbacks should be performed by a service + Used to specify the way container rollbacks should be performed by a service Args: parallelism (int): Maximum number of tasks to be rolled back in one From bf026265e0adfd862373a601ed99e4f3ac8b3bd0 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Thu, 28 Jul 2022 08:31:45 -0400 Subject: [PATCH 196/211] ci: bump version to 6.0.0-dev (#3013) It's been a long time without a release, and we've included a number of fixes as well as raised the minimum Python version, so a major release seems in order. Signed-off-by: Milas Bowman --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 5687086f..88ee8b0f 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "5.1.0-dev" +version = "6.0.0-dev" version_info = tuple(int(d) for d in version.split("-")[0].split(".")) From be942f83902fbd02e05270c39b6917880939c165 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Thu, 28 Jul 2022 08:32:00 -0400 Subject: [PATCH 197/211] deps: upgrade & remove unnecessary dependencies (#3014) The `requirements.txt` and `setup.py` had a lot of extra transitive dependencies to try and address various SSL shortcomings from the Python ecosystem. Thankfully, between modern Python versions (3.6+) and corresponding `requests` versions (2.26+), this is all unnecessary now! As a result, a bunch of transitive dependencies have been removed from `requirements.txt`, the minimum version of `requests` increased, and the `tls` extra made into a no-op. Signed-off-by: Milas Bowman --- README.md | 5 ++--- appveyor.yml | 13 ------------- requirements.txt | 16 +++------------- setup.py | 19 +++++++------------ test-requirements.txt | 8 ++++---- 5 files changed, 16 insertions(+), 45 deletions(-) delete mode 100644 appveyor.yml diff --git a/README.md b/README.md index 4fc31f7d..2db678dc 100644 --- a/README.md +++ b/README.md @@ -10,9 +10,8 @@ The latest stable version [is available on PyPI](https://pypi.python.org/pypi/do pip install docker -If you are intending to connect to a docker host via TLS, add `docker[tls]` to your requirements instead, or install with pip: - - pip install docker[tls] +> Older versions (< 6.0) required installing `docker[tls]` for SSL/TLS support. +> This is no longer necessary and is a no-op, but is supported for backwards compatibility. ## Usage diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 144ab352..00000000 --- a/appveyor.yml +++ /dev/null @@ -1,13 +0,0 @@ -version: '{branch}-{build}' - -install: - - "SET PATH=C:\\Python37-x64;C:\\Python37-x64\\Scripts;%PATH%" - - "python --version" - - "python -m pip install --upgrade pip" - - "pip install tox==2.9.1" - -# Build the binary after tests -build: false - -test_script: - - "tox" diff --git a/requirements.txt b/requirements.txt index a74e69ea..52b5461e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,6 @@ -appdirs==1.4.3 -asn1crypto==0.22.0 -cffi==1.14.4 -cryptography==3.4.7 -enum34==1.1.6 -idna==2.5 -ipaddress==1.0.18 packaging==21.3 -paramiko==2.10.1 -pycparser==2.17 -pyOpenSSL==18.0.0 -pyparsing==2.2.0 +paramiko==2.11.0 pywin32==304; sys_platform == 'win32' -requests==2.26.0 -urllib3==1.26.5 +requests==2.28.1 +urllib3==1.26.11 websocket-client==0.56.0 diff --git a/setup.py b/setup.py index 0b113688..c6346b07 100644 --- a/setup.py +++ b/setup.py @@ -10,28 +10,23 @@ ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ - 'packaging', + 'packaging >= 14.0', + 'requests >= 2.26.0', + 'urllib3 >= 1.26.0', 'websocket-client >= 0.32.0', - 'requests >= 2.14.2, != 2.18.0', ] extras_require = { # win32 APIs if on Windows (required for npipe support) ':sys_platform == "win32"': 'pywin32>=304', - # If using docker-py over TLS, highly recommend this option is - # pip-installed or pinned. - - # TODO: if pip installing both "requests" and "requests[security]", the - # extra package from the "security" option are not installed (see - # https://github.com/pypa/pip/issues/4391). Once that's fixed, instead of - # installing the extra dependencies, install the following instead: - # 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2' - 'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=3.4.7', 'idna>=2.0.0'], + # This is now a no-op, as similarly the requests[security] extra is + # a no-op as of requests 2.26.0, this is always available/by default now + # see https://github.com/psf/requests/pull/5867 + 'tls': [], # Only required when connecting using the ssh:// protocol 'ssh': ['paramiko>=2.4.3'], - } version = None diff --git a/test-requirements.txt b/test-requirements.txt index ccc97be4..979b291c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,6 @@ -setuptools==58.2.0 -coverage==6.0.1 +setuptools==63.2.0 +coverage==6.4.2 flake8==4.0.1 -pytest==6.2.5 +pytest==7.1.2 pytest-cov==3.0.0 -pytest-timeout==2.0.1 +pytest-timeout==2.1.0 From 9bdb5ba2bab682a02bc3348e359822218dad7e96 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Thu, 28 Jul 2022 11:25:17 -0400 Subject: [PATCH 198/211] lint: fix line length violation (#3017) Signed-off-by: Milas Bowman --- docker/types/services.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/types/services.py b/docker/types/services.py index 15cf511e..c2fce9f4 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -436,7 +436,8 @@ class UpdateConfig(dict): class RollbackConfig(UpdateConfig): """ - Used to specify the way container rollbacks should be performed by a service + Used to specify the way container rollbacks should be performed by a + service Args: parallelism (int): Maximum number of tasks to be rolled back in one From ab43018b027e48c53f3cf6d71ce988358e3c204e Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Thu, 28 Jul 2022 16:38:57 -0400 Subject: [PATCH 199/211] docs: fix markdown rendering (#3020) Follow instructions at https://www.sphinx-doc.org/en/master/usage/markdown.html. This switches from `recommonmark` (deprecated) to `myst-parser` (recommended). Only impacts the changelog page, which was broken after recent upgrades to Sphinx for Python 3.10 compatibility. Signed-off-by: Milas Bowman --- docs-requirements.txt | 2 +- docs/conf.py | 17 ++++++----------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 1f342fa2..04d1aff2 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,2 +1,2 @@ -recommonmark==0.7.1 +myst-parser==0.18.0 Sphinx==5.1.1 diff --git a/docs/conf.py b/docs/conf.py index 2b0a7195..1258a423 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,24 +33,19 @@ sys.path.insert(0, os.path.abspath('..')) extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', + 'myst_parser' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] -from recommonmark.parser import CommonMarkParser - -source_parsers = { - '.md': CommonMarkParser, +source_suffix = { + '.rst': 'restructuredtext', + '.txt': 'markdown', + '.md': 'markdown', } -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -source_suffix = ['.rst', '.md'] -# source_suffix = '.md' - # The encoding of source files. # # source_encoding = 'utf-8-sig' @@ -80,7 +75,7 @@ version = f'{version_info[0]}.{version_info[1]}' # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: From 23cf16f03a38e553d1e15493719cdb57e928fd95 Mon Sep 17 00:00:00 2001 From: Ben Fasoli Date: Fri, 29 Jul 2022 06:06:22 -0700 Subject: [PATCH 200/211] client: use 12 character short IDs (#2862) Use 12 characters for Docker resource IDs for consistency with the Docker CLI. Signed-off-by: Ben Fasoli --- docker/models/images.py | 10 ++-- docker/models/resource.py | 4 +- tests/unit/api_container_test.py | 87 ++++++++++++++++------------ tests/unit/api_image_test.py | 12 ++-- tests/unit/api_test.py | 2 +- tests/unit/fake_api.py | 56 +++++++++--------- tests/unit/models_containers_test.py | 5 ++ tests/unit/models_images_test.py | 4 +- 8 files changed, 99 insertions(+), 81 deletions(-) diff --git a/docker/models/images.py b/docker/models/images.py index ef668c7d..e247d351 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -31,12 +31,12 @@ class Image(Model): @property def short_id(self): """ - The ID of the image truncated to 10 characters, plus the ``sha256:`` + The ID of the image truncated to 12 characters, plus the ``sha256:`` prefix. """ if self.id.startswith('sha256:'): - return self.id[:17] - return self.id[:10] + return self.id[:19] + return self.id[:12] @property def tags(self): @@ -141,10 +141,10 @@ class RegistryData(Model): @property def short_id(self): """ - The ID of the image truncated to 10 characters, plus the ``sha256:`` + The ID of the image truncated to 12 characters, plus the ``sha256:`` prefix. """ - return self.id[:17] + return self.id[:19] def pull(self, platform=None): """ diff --git a/docker/models/resource.py b/docker/models/resource.py index dec2349f..89030e59 100644 --- a/docker/models/resource.py +++ b/docker/models/resource.py @@ -35,9 +35,9 @@ class Model: @property def short_id(self): """ - The ID of the object, truncated to 10 characters. + The ID of the object, truncated to 12 characters. """ - return self.id[:10] + return self.id[:12] def reload(self): """ diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index a66aea04..70308416 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -24,7 +24,8 @@ class StartContainerTest(BaseAPIClientTest): self.client.start(fake_api.FAKE_CONTAINER_ID) args = fake_request.call_args - assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start' + assert args[0][1] == (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/start') assert 'data' not in args[1] assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS @@ -117,7 +118,8 @@ class StartContainerTest(BaseAPIClientTest): self.client.start({'Id': fake_api.FAKE_CONTAINER_ID}) args = fake_request.call_args - assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/start' + assert args[0][1] == (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/start') assert 'data' not in args[1] assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS @@ -1079,7 +1081,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/resize', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/resize'), params={'h': 15, 'w': 120}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1092,7 +1095,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/rename', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/rename'), params={'name': 'foobar'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1102,7 +1106,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/wait', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait', timeout=None, params={} ) @@ -1112,7 +1116,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/wait', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/wait', timeout=None, params={} ) @@ -1124,7 +1128,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1140,7 +1144,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1157,7 +1161,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1172,7 +1176,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1186,7 +1190,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1201,7 +1205,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1217,7 +1221,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 10}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1233,7 +1237,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1250,7 +1254,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 0, 'stderr': 1, 'stdout': 1, 'tail': 'all', 'since': ts}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1276,7 +1280,7 @@ class ContainerTest(BaseAPIClientTest): assert m.called fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/logs', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/logs', params={'timestamps': 0, 'follow': 1, 'stderr': 1, 'stdout': 1, 'tail': 'all'}, timeout=DEFAULT_TIMEOUT_SECONDS, @@ -1288,7 +1292,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/changes', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/changes'), timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1297,7 +1302,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/changes', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/changes'), timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1306,7 +1312,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/json', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json', timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1317,7 +1323,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/stop', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) @@ -1330,7 +1336,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/stop', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stop', params={'t': timeout}, timeout=(DEFAULT_TIMEOUT_SECONDS + timeout) ) @@ -1340,7 +1346,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/pause', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/pause'), timeout=(DEFAULT_TIMEOUT_SECONDS) ) @@ -1349,7 +1356,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/unpause', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/unpause'), timeout=(DEFAULT_TIMEOUT_SECONDS) ) @@ -1358,7 +1366,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1368,7 +1376,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1378,7 +1386,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/kill', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/kill', params={'signal': signal.SIGTERM}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1388,7 +1396,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/restart', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/restart'), params={'t': 2}, timeout=(DEFAULT_TIMEOUT_SECONDS + 2) ) @@ -1398,7 +1407,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'containers/3cc2351ab11b/restart', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/restart'), params={'t': 2}, timeout=(DEFAULT_TIMEOUT_SECONDS + 2) ) @@ -1408,7 +1418,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'containers/3cc2351ab11b', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID, params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1418,7 +1428,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'containers/3cc2351ab11b', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID, params={'v': False, 'link': False, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1428,7 +1438,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/export', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/export'), stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1438,7 +1449,8 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/export', + (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/export'), stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1448,7 +1460,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/json', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/json', timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1464,7 +1476,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/stats', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/stats', timeout=60, stream=True ) @@ -1474,7 +1486,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/top', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top', params={}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1484,7 +1496,7 @@ class ContainerTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'containers/3cc2351ab11b/top', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID + '/top', params={'ps_args': 'waux'}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -1496,7 +1508,8 @@ class ContainerTest(BaseAPIClientTest): blkio_weight=345 ) args = fake_request.call_args - assert args[0][1] == url_prefix + 'containers/3cc2351ab11b/update' + assert args[0][1] == (url_prefix + 'containers/' + + fake_api.FAKE_CONTAINER_ID + '/update') assert json.loads(args[1]['data']) == { 'Memory': 2 * 1024, 'CpuShares': 124, 'BlkioWeight': 345 } diff --git a/tests/unit/api_image_test.py b/tests/unit/api_image_test.py index 8fb3e9d9..e2859329 100644 --- a/tests/unit/api_image_test.py +++ b/tests/unit/api_image_test.py @@ -100,7 +100,7 @@ class ImageTest(BaseAPIClientTest): 'repo': None, 'comment': None, 'tag': None, - 'container': '3cc2351ab11b', + 'container': fake_api.FAKE_CONTAINER_ID, 'author': None, 'changes': None }, @@ -112,7 +112,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'images/e9aa60c60128', + url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID, params={'force': False, 'noprune': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) @@ -287,7 +287,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/e9aa60c60128/tag', + url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', params={ 'tag': None, 'repo': 'repo', @@ -305,7 +305,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/e9aa60c60128/tag', + url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', params={ 'tag': 'tag', 'repo': 'repo', @@ -320,7 +320,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'POST', - url_prefix + 'images/e9aa60c60128/tag', + url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/tag', params={ 'tag': None, 'repo': 'repo', @@ -334,7 +334,7 @@ class ImageTest(BaseAPIClientTest): fake_request.assert_called_with( 'GET', - url_prefix + 'images/e9aa60c60128/get', + url_prefix + 'images/' + fake_api.FAKE_IMAGE_ID + '/get', stream=True, timeout=DEFAULT_TIMEOUT_SECONDS ) diff --git a/tests/unit/api_test.py b/tests/unit/api_test.py index 45d2e4c0..a2348f08 100644 --- a/tests/unit/api_test.py +++ b/tests/unit/api_test.py @@ -318,7 +318,7 @@ class DockerApiTest(BaseAPIClientTest): fake_request.assert_called_with( 'DELETE', - url_prefix + 'containers/3cc2351ab11b', + url_prefix + 'containers/' + fake_api.FAKE_CONTAINER_ID, params={'v': False, 'link': True, 'force': False}, timeout=DEFAULT_TIMEOUT_SECONDS ) diff --git a/tests/unit/fake_api.py b/tests/unit/fake_api.py index 4c933295..6acfb64b 100644 --- a/tests/unit/fake_api.py +++ b/tests/unit/fake_api.py @@ -4,10 +4,10 @@ from . import fake_stat CURRENT_VERSION = f'v{constants.DEFAULT_DOCKER_API_VERSION}' -FAKE_CONTAINER_ID = '3cc2351ab11b' -FAKE_IMAGE_ID = 'e9aa60c60128' -FAKE_EXEC_ID = 'd5d177f121dc' -FAKE_NETWORK_ID = '33fb6a3462b8' +FAKE_CONTAINER_ID = '81cf499cc928ce3fedc250a080d2b9b978df20e4517304c45211e8a68b33e254' # noqa: E501 +FAKE_IMAGE_ID = 'sha256:fe7a8fc91d3f17835cbb3b86a1c60287500ab01a53bc79c4497d09f07a3f0688' # noqa: E501 +FAKE_EXEC_ID = 'b098ec855f10434b5c7c973c78484208223a83f663ddaefb0f02a242840cb1c7' # noqa: E501 +FAKE_NETWORK_ID = '1999cfb42e414483841a125ade3c276c3cb80cb3269b14e339354ac63a31b02c' # noqa: E501 FAKE_IMAGE_NAME = 'test_image' FAKE_TARBALL_PATH = '/path/to/tarball' FAKE_REPO_NAME = 'repo' @@ -546,56 +546,56 @@ fake_responses = { post_fake_import_image, f'{prefix}/{CURRENT_VERSION}/containers/json': get_fake_containers, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/start': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/start': post_fake_start_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/resize': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/resize': post_fake_resize_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/json': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/json': get_fake_inspect_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/rename': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/rename': post_fake_rename_container, - f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/tag': + f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/tag': post_fake_tag_image, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/wait': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/wait': get_fake_wait, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/logs': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/logs': get_fake_logs, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/changes': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/changes': get_fake_diff, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/export': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/export': get_fake_export, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/update': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/update': post_fake_update_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/exec': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/exec': post_fake_exec_create, - f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/start': + f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/start': post_fake_exec_start, - f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/json': + f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/json': get_fake_exec_inspect, - f'{prefix}/{CURRENT_VERSION}/exec/d5d177f121dc/resize': + f'{prefix}/{CURRENT_VERSION}/exec/{FAKE_EXEC_ID}/resize': post_fake_exec_resize, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stats': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stats': get_fake_stats, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/top': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/top': get_fake_top, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/stop': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/stop': post_fake_stop_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/kill': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/kill': post_fake_kill_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/pause': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/pause': post_fake_pause_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/unpause': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/unpause': post_fake_unpause_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b/restart': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}/restart': post_fake_restart_container, - f'{prefix}/{CURRENT_VERSION}/containers/3cc2351ab11b': + f'{prefix}/{CURRENT_VERSION}/containers/{FAKE_CONTAINER_ID}': delete_fake_remove_container, f'{prefix}/{CURRENT_VERSION}/images/create': post_fake_image_create, - f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128': + f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}': delete_fake_remove_image, - f'{prefix}/{CURRENT_VERSION}/images/e9aa60c60128/get': + f'{prefix}/{CURRENT_VERSION}/images/{FAKE_IMAGE_ID}/get': get_fake_get_image, f'{prefix}/{CURRENT_VERSION}/images/load': post_fake_load_image, diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index c7aa46b2..785a8490 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -377,6 +377,11 @@ class ContainerCollectionTest(unittest.TestCase): class ContainerTest(unittest.TestCase): + def test_short_id(self): + container = Container(attrs={'Id': '8497fe9244dd45cac543eb3c37d8605077' + '6800eebef1f3ec2ee111e8ccf12db6'}) + assert container.short_id == '8497fe9244dd' + def test_name(self): client = make_fake_client() container = client.containers.get(FAKE_CONTAINER_ID) diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py index f3ca0be4..436fd61f 100644 --- a/tests/unit/models_images_test.py +++ b/tests/unit/models_images_test.py @@ -122,11 +122,11 @@ class ImageTest(unittest.TestCase): def test_short_id(self): image = Image(attrs={'Id': 'sha256:b6846070672ce4e8f1f91564ea6782bd675' 'f69d65a6f73ef6262057ad0a15dcd'}) - assert image.short_id == 'sha256:b684607067' + assert image.short_id == 'sha256:b6846070672c' image = Image(attrs={'Id': 'b6846070672ce4e8f1f91564ea6782bd675' 'f69d65a6f73ef6262057ad0a15dcd'}) - assert image.short_id == 'b684607067' + assert image.short_id == 'b6846070672c' def test_tags(self): image = Image(attrs={ From 05e143429e892fb838bbff058391456ba3d0a19c Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Fri, 29 Jul 2022 11:08:00 -0400 Subject: [PATCH 201/211] api: preserve cause when re-raising error (#3023) Use `from e` to ensure that the error context is propagated correctly. Fixes #2702. Signed-off-by: Milas Bowman --- docker/errors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/errors.py b/docker/errors.py index ba952562..7725295f 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -28,7 +28,7 @@ def create_api_error_from_http_exception(e): cls = ImageNotFound else: cls = NotFound - raise cls(e, response=response, explanation=explanation) + raise cls(e, response=response, explanation=explanation) from e class APIError(requests.exceptions.HTTPError, DockerException): From 26064dd6b584ee14878157b4c8b001eefed70caf Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Fri, 29 Jul 2022 11:09:47 -0400 Subject: [PATCH 202/211] deps: upgrade websocket-client to latest (#3022) * Upgrade websocket-client to latest * Add basic integration test for streaming logs via websocket Signed-off-by: Milas Bowman --- requirements.txt | 2 +- tests/integration/api_container_test.py | 21 ++++++++++++++++++++- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 52b5461e..36660b66 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,4 @@ paramiko==2.11.0 pywin32==304; sys_platform == 'win32' requests==2.28.1 urllib3==1.26.11 -websocket-client==0.56.0 +websocket-client==1.3.3 diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 0d6d9f96..8f69e41f 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1197,7 +1197,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): sock = self.client.attach_socket(container, ws=False) assert sock.fileno() > -1 - def test_run_container_reading_socket(self): + def test_run_container_reading_socket_http(self): line = 'hi there and stuff and things, words!' # `echo` appends CRLF, `printf` doesn't command = f"printf '{line}'" @@ -1217,6 +1217,25 @@ class AttachContainerTest(BaseAPIIntegrationTest): data = read_exactly(pty_stdout, next_size) assert data.decode('utf-8') == line + @pytest.mark.xfail(condition=bool(os.environ.get('DOCKER_CERT_PATH', '')), + reason='DOCKER_CERT_PATH not respected for websockets') + def test_run_container_reading_socket_ws(self): + line = 'hi there and stuff and things, words!' + # `echo` appends CRLF, `printf` doesn't + command = f"printf '{line}'" + container = self.client.create_container(TEST_IMG, command, + detach=True, tty=False) + self.tmp_containers.append(container) + + opts = {"stdout": 1, "stream": 1, "logs": 1} + pty_stdout = self.client.attach_socket(container, opts, ws=True) + self.addCleanup(pty_stdout.close) + + self.client.start(container) + + data = pty_stdout.recv() + assert data.decode('utf-8') == line + @pytest.mark.timeout(10) def test_attach_no_stream(self): container = self.client.create_container( From 1a4cacdfb63f0fbf2299962732c75484c24ad8b0 Mon Sep 17 00:00:00 2001 From: Felix Fontein Date: Fri, 29 Jul 2022 19:57:30 +0200 Subject: [PATCH 203/211] api: add platform to container create (#2927) Add platform parameter for container creation/run Signed-off-by: Felix Fontein Signed-off-by: Milas Bowman Co-authored-by: Milas Bowman --- docker/api/container.py | 13 ++++++++++--- docker/errors.py | 16 +++++++++++---- docker/models/containers.py | 3 ++- tests/unit/api_container_test.py | 16 +++++++++++++++ tests/unit/models_containers_test.py | 29 ++++++++++++++++++++++++++++ 5 files changed, 69 insertions(+), 8 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 17c09726..f600be18 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -223,7 +223,7 @@ class ContainerApiMixin: mac_address=None, labels=None, stop_signal=None, networking_config=None, healthcheck=None, stop_timeout=None, runtime=None, - use_config_proxy=True): + use_config_proxy=True, platform=None): """ Creates a container. Parameters are similar to those for the ``docker run`` command except it doesn't support the attach options (``-a``). @@ -398,6 +398,7 @@ class ContainerApiMixin: configuration file (``~/.docker/config.json`` by default) contains a proxy configuration, the corresponding environment variables will be set in the container being created. + platform (str): Platform in the format ``os[/arch[/variant]]``. Returns: A dictionary with an image 'Id' key and a 'Warnings' key. @@ -427,16 +428,22 @@ class ContainerApiMixin: stop_signal, networking_config, healthcheck, stop_timeout, runtime ) - return self.create_container_from_config(config, name) + return self.create_container_from_config(config, name, platform) def create_container_config(self, *args, **kwargs): return ContainerConfig(self._version, *args, **kwargs) - def create_container_from_config(self, config, name=None): + def create_container_from_config(self, config, name=None, platform=None): u = self._url("/containers/create") params = { 'name': name } + if platform: + if utils.version_lt(self._version, '1.41'): + raise errors.InvalidVersion( + 'platform is not supported for API version < 1.41' + ) + params['platform'] = platform res = self._post_json(u, data=config, params=params) return self._result(res, True) diff --git a/docker/errors.py b/docker/errors.py index 7725295f..8cf8670b 100644 --- a/docker/errors.py +++ b/docker/errors.py @@ -1,5 +1,14 @@ import requests +_image_not_found_explanation_fragments = frozenset( + fragment.lower() for fragment in [ + 'no such image', + 'not found: does not exist or no pull access', + 'repository does not exist', + 'was found but does not match the specified platform', + ] +) + class DockerException(Exception): """ @@ -21,10 +30,9 @@ def create_api_error_from_http_exception(e): explanation = (response.content or '').strip() cls = APIError if response.status_code == 404: - if explanation and ('No such image' in str(explanation) or - 'not found: does not exist or no pull access' - in str(explanation) or - 'repository does not exist' in str(explanation)): + explanation_msg = (explanation or '').lower() + if any(fragment in explanation_msg + for fragment in _image_not_found_explanation_fragments): cls = ImageNotFound else: cls = NotFound diff --git a/docker/models/containers.py b/docker/models/containers.py index e34659cb..7769ed09 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -801,7 +801,7 @@ class ContainerCollection(Collection): image = image.id stream = kwargs.pop('stream', False) detach = kwargs.pop('detach', False) - platform = kwargs.pop('platform', None) + platform = kwargs.get('platform', None) if detach and remove: if version_gte(self.client.api._version, '1.25'): @@ -985,6 +985,7 @@ RUN_CREATE_KWARGS = [ 'mac_address', 'name', 'network_disabled', + 'platform', 'stdin_open', 'stop_signal', 'tty', diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index 70308416..3a2fbde8 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -348,6 +348,22 @@ class CreateContainerTest(BaseAPIClientTest): assert args[1]['headers'] == {'Content-Type': 'application/json'} assert args[1]['params'] == {'name': 'marisa-kirisame'} + def test_create_container_with_platform(self): + self.client.create_container('busybox', 'true', + platform='linux') + + args = fake_request.call_args + assert args[0][1] == url_prefix + 'containers/create' + assert json.loads(args[1]['data']) == json.loads(''' + {"Tty": false, "Image": "busybox", "Cmd": ["true"], + "AttachStdin": false, + "AttachStderr": true, "AttachStdout": true, + "StdinOnce": false, + "OpenStdin": false, "NetworkDisabled": false} + ''') + assert args[1]['headers'] == {'Content-Type': 'application/json'} + assert args[1]['params'] == {'name': None, 'platform': 'linux'} + def test_create_container_with_mem_limit_as_int(self): self.client.create_container( 'busybox', 'true', host_config=self.client.create_host_config( diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index 785a8490..e4ee074d 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -77,6 +77,7 @@ class ContainerCollectionTest(unittest.TestCase): oom_score_adj=5, pid_mode='host', pids_limit=500, + platform='linux', ports={ 1111: 4567, 2222: None @@ -186,6 +187,7 @@ class ContainerCollectionTest(unittest.TestCase): name='somename', network_disabled=False, networking_config={'foo': None}, + platform='linux', ports=[('1111', 'tcp'), ('2222', 'tcp')], stdin_open=True, stop_signal=9, @@ -314,6 +316,33 @@ class ContainerCollectionTest(unittest.TestCase): 'NetworkMode': 'default'} ) + def test_run_platform(self): + client = make_fake_client() + + # raise exception on first call, then return normal value + client.api.create_container.side_effect = [ + docker.errors.ImageNotFound(""), + client.api.create_container.return_value + ] + + client.containers.run(image='alpine', platform='linux/arm64') + + client.api.pull.assert_called_with( + 'alpine', + tag='latest', + all_tags=False, + stream=True, + platform='linux/arm64', + ) + + client.api.create_container.assert_called_with( + detach=False, + platform='linux/arm64', + image='alpine', + command=None, + host_config={'NetworkMode': 'default'}, + ) + def test_create(self): client = make_fake_client() container = client.containers.create( From d69de54d7ce967ecd48db50ceecf1a700e84d7eb Mon Sep 17 00:00:00 2001 From: David Date: Fri, 29 Jul 2022 20:04:47 +0200 Subject: [PATCH 204/211] api: add cgroupns option to container create (#2930) Signed-off-by: David Otto --- docker/models/containers.py | 6 ++++++ docker/types/containers.py | 6 +++++- tests/unit/models_containers_test.py | 2 ++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 7769ed09..313d47d6 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -553,6 +553,11 @@ class ContainerCollection(Collection): ``["SYS_ADMIN", "MKNOD"]``. cap_drop (list of str): Drop kernel capabilities. cgroup_parent (str): Override the default parent cgroup. + cgroupns (str): Override the default cgroup namespace mode for the + container. One of: + - ``private`` the container runs in its own private cgroup + namespace. + - ``host`` use the host system's cgroup namespace. cpu_count (int): Number of usable CPUs (Windows only). cpu_percent (int): Usable percentage of the available CPUs (Windows only). @@ -1002,6 +1007,7 @@ RUN_HOST_CONFIG_KWARGS = [ 'cap_add', 'cap_drop', 'cgroup_parent', + 'cgroupns', 'cpu_count', 'cpu_percent', 'cpu_period', diff --git a/docker/types/containers.py b/docker/types/containers.py index f1b60b2d..84df0f7e 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -272,7 +272,8 @@ class HostConfig(dict): volume_driver=None, cpu_count=None, cpu_percent=None, nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None, cpu_rt_period=None, cpu_rt_runtime=None, - device_cgroup_rules=None, device_requests=None): + device_cgroup_rules=None, device_requests=None, + cgroupns=None): if mem_limit is not None: self['Memory'] = parse_bytes(mem_limit) @@ -646,6 +647,9 @@ class HostConfig(dict): req = DeviceRequest(**req) self['DeviceRequests'].append(req) + if cgroupns: + self['CgroupnsMode'] = cgroupns + def host_config_type_error(param, param_value, expected): error_msg = 'Invalid type for {0} param: expected {1} but found {2}' diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index e4ee074d..101708eb 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -39,6 +39,7 @@ class ContainerCollectionTest(unittest.TestCase): cap_add=['foo'], cap_drop=['bar'], cgroup_parent='foobar', + cgroupns='host', cpu_period=1, cpu_quota=2, cpu_shares=5, @@ -135,6 +136,7 @@ class ContainerCollectionTest(unittest.TestCase): 'BlkioWeight': 2, 'CapAdd': ['foo'], 'CapDrop': ['bar'], + 'CgroupnsMode': 'host', 'CgroupParent': 'foobar', 'CpuPeriod': 1, 'CpuQuota': 2, From b2a18d7209f827d83cc33acb80aa31bf404ffd4b Mon Sep 17 00:00:00 2001 From: Peter Dave Hello Date: Sat, 30 Jul 2022 02:09:06 +0800 Subject: [PATCH 205/211] build: disable pip cache in Dockerfile (#2828) Signed-off-by: Peter Dave Hello --- Dockerfile | 6 +++--- Dockerfile-docs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8a0d32e4..c158a9d6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,10 +6,10 @@ RUN mkdir /src WORKDIR /src COPY requirements.txt /src/requirements.txt -RUN pip install -r requirements.txt +RUN pip install --no-cache-dir -r requirements.txt COPY test-requirements.txt /src/test-requirements.txt -RUN pip install -r test-requirements.txt +RUN pip install --no-cache-dir -r test-requirements.txt COPY . /src -RUN pip install . +RUN pip install --no-cache-dir . diff --git a/Dockerfile-docs b/Dockerfile-docs index 98901dfe..e993822b 100644 --- a/Dockerfile-docs +++ b/Dockerfile-docs @@ -10,6 +10,6 @@ RUN addgroup --gid $gid sphinx \ WORKDIR /src COPY requirements.txt docs-requirements.txt ./ -RUN pip install -r requirements.txt -r docs-requirements.txt +RUN pip install --no-cache-dir -r requirements.txt -r docs-requirements.txt USER sphinx From 0031ac2186406c9b48c6fc5253affd4b62fef0f5 Mon Sep 17 00:00:00 2001 From: Till! Date: Fri, 29 Jul 2022 20:51:43 +0200 Subject: [PATCH 206/211] api: add force to plugin disable (#2843) Signed-off-by: till --- docker/api/plugin.py | 5 +++-- docker/models/plugins.py | 7 +++++-- tests/integration/api_plugin_test.py | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/docker/api/plugin.py b/docker/api/plugin.py index 57110f11..10210c1a 100644 --- a/docker/api/plugin.py +++ b/docker/api/plugin.py @@ -51,19 +51,20 @@ class PluginApiMixin: return True @utils.minimum_version('1.25') - def disable_plugin(self, name): + def disable_plugin(self, name, force=False): """ Disable an installed plugin. Args: name (string): The name of the plugin. The ``:latest`` tag is optional, and is the default if omitted. + force (bool): To enable the force query parameter. Returns: ``True`` if successful """ url = self._url('/plugins/{0}/disable', name) - res = self._post(url) + res = self._post(url, params={'force': force}) self._raise_for_status(res) return True diff --git a/docker/models/plugins.py b/docker/models/plugins.py index 69b94f35..16f5245e 100644 --- a/docker/models/plugins.py +++ b/docker/models/plugins.py @@ -44,16 +44,19 @@ class Plugin(Model): self.client.api.configure_plugin(self.name, options) self.reload() - def disable(self): + def disable(self, force=False): """ Disable the plugin. + Args: + force (bool): Force disable. Default: False + Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ - self.client.api.disable_plugin(self.name) + self.client.api.disable_plugin(self.name, force) self.reload() def enable(self, timeout=0): diff --git a/tests/integration/api_plugin_test.py b/tests/integration/api_plugin_test.py index 38f9d12d..3ecb0283 100644 --- a/tests/integration/api_plugin_test.py +++ b/tests/integration/api_plugin_test.py @@ -22,13 +22,13 @@ class PluginTest(BaseAPIIntegrationTest): def teardown_method(self, method): client = self.get_client_instance() try: - client.disable_plugin(SSHFS) + client.disable_plugin(SSHFS, True) except docker.errors.APIError: pass for p in self.tmp_plugins: try: - client.remove_plugin(p, force=True) + client.remove_plugin(p) except docker.errors.APIError: pass From 26753c81defff28a1a38a34788e9653c8eb87c3d Mon Sep 17 00:00:00 2001 From: ercildoune <49232938+ercildoune@users.noreply.github.com> Date: Sat, 30 Jul 2022 02:54:55 +0800 Subject: [PATCH 207/211] api: add rollback_config to service create (#2917) `rollback_config` was not in the list of `CREATE_SERVICE_KWARGS` which prevented it from being an argument when creating services. It has now been added and the problem fixed, allowing services to have a rollback_config during creation and updating. Fixes #2832. Signed-off-by: Fraser Patten Signed-off-by: Milas Bowman Co-authored-by: Milas Bowman --- docker/models/services.py | 1 + tests/integration/models_services_test.py | 7 ++++++- tests/unit/models_services_test.py | 2 ++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docker/models/services.py b/docker/models/services.py index 200dd333..92550681 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -320,6 +320,7 @@ CREATE_SERVICE_KWARGS = [ 'labels', 'mode', 'update_config', + 'rollback_config', 'endpoint_spec', ] diff --git a/tests/integration/models_services_test.py b/tests/integration/models_services_test.py index 982842b3..f1439a41 100644 --- a/tests/integration/models_services_test.py +++ b/tests/integration/models_services_test.py @@ -30,13 +30,18 @@ class ServiceTest(unittest.TestCase): # ContainerSpec arguments image="alpine", command="sleep 300", - container_labels={'container': 'label'} + container_labels={'container': 'label'}, + rollback_config={'order': 'start-first'} ) assert service.name == name assert service.attrs['Spec']['Labels']['foo'] == 'bar' container_spec = service.attrs['Spec']['TaskTemplate']['ContainerSpec'] assert "alpine" in container_spec['Image'] assert container_spec['Labels'] == {'container': 'label'} + spec_rollback = service.attrs['Spec'].get('RollbackConfig', None) + assert spec_rollback is not None + assert ('Order' in spec_rollback and + spec_rollback['Order'] == 'start-first') def test_create_with_network(self): client = docker.from_env(version=TEST_API_VERSION) diff --git a/tests/unit/models_services_test.py b/tests/unit/models_services_test.py index b9192e42..94a27f0e 100644 --- a/tests/unit/models_services_test.py +++ b/tests/unit/models_services_test.py @@ -11,6 +11,7 @@ class CreateServiceKwargsTest(unittest.TestCase): 'labels': {'key': 'value'}, 'hostname': 'test_host', 'mode': 'global', + 'rollback_config': {'rollback': 'config'}, 'update_config': {'update': 'config'}, 'networks': ['somenet'], 'endpoint_spec': {'blah': 'blah'}, @@ -37,6 +38,7 @@ class CreateServiceKwargsTest(unittest.TestCase): 'name': 'somename', 'labels': {'key': 'value'}, 'mode': 'global', + 'rollback_config': {'rollback': 'config'}, 'update_config': {'update': 'config'}, 'endpoint_spec': {'blah': 'blah'}, } From 868e996269b6934420f0cd2104621b6f45f668e5 Mon Sep 17 00:00:00 2001 From: Milas Bowman Date: Fri, 29 Jul 2022 15:28:16 -0400 Subject: [PATCH 208/211] model: add remove() to Image (#3026) Allow an Image to be deleted by calling the remove() method on it, just like a Volume. Signed-off-by: Ahmon Dancy Signed-off-by: Milas Bowman Co-authored-by: Ahmon Dancy --- docker/models/images.py | 18 ++++++++++++++++++ tests/unit/models_images_test.py | 10 ++++++++++ 2 files changed, 28 insertions(+) diff --git a/docker/models/images.py b/docker/models/images.py index e247d351..79ccbe40 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -61,6 +61,24 @@ class Image(Model): """ return self.client.api.history(self.id) + def remove(self, force=False, noprune=False): + """ + Remove this image. + + Args: + force (bool): Force removal of the image + noprune (bool): Do not delete untagged parents + + Raises: + :py:class:`docker.errors.APIError` + If the server returns an error. + """ + return self.client.api.remove_image( + self.id, + force=force, + noprune=noprune, + ) + def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False): """ Get a tarball of an image. Similar to the ``docker save`` command. diff --git a/tests/unit/models_images_test.py b/tests/unit/models_images_test.py index 436fd61f..3478c3fe 100644 --- a/tests/unit/models_images_test.py +++ b/tests/unit/models_images_test.py @@ -150,6 +150,16 @@ class ImageTest(unittest.TestCase): image.history() client.api.history.assert_called_with(FAKE_IMAGE_ID) + def test_remove(self): + client = make_fake_client() + image = client.images.get(FAKE_IMAGE_ID) + image.remove() + client.api.remove_image.assert_called_with( + FAKE_IMAGE_ID, + force=False, + noprune=False, + ) + def test_save(self): client = make_fake_client() image = client.images.get(FAKE_IMAGE_ID) From 3ee3a2486fe75ed858f8a3defe0fc79b2743d5df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Loiselet?= Date: Fri, 29 Jul 2022 21:33:23 +0200 Subject: [PATCH 209/211] build: trim trailing whitespace from dockerignore entries (#2733) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fix(dockerignore): trim trailing whitespace Signed-off-by: Clément Loiselet --- docker/utils/build.py | 3 +++ tests/integration/api_build_test.py | 9 +++++++++ 2 files changed, 12 insertions(+) diff --git a/docker/utils/build.py b/docker/utils/build.py index ac060434..59564c4c 100644 --- a/docker/utils/build.py +++ b/docker/utils/build.py @@ -224,6 +224,9 @@ class Pattern: @classmethod def normalize(cls, p): + # Remove trailing spaces + p = p.strip() + # Leading and trailing slashes are not relevant. Yes, # "foo.py/" must exclude the "foo.py" regular file. "." # components are not relevant either, even if the whole diff --git a/tests/integration/api_build_test.py b/tests/integration/api_build_test.py index ef48e12e..606c3b7e 100644 --- a/tests/integration/api_build_test.py +++ b/tests/integration/api_build_test.py @@ -100,7 +100,9 @@ class BuildTest(BaseAPIIntegrationTest): 'ignored', 'Dockerfile', '.dockerignore', + ' ignored-with-spaces ', # check that spaces are trimmed '!ignored/subdir/excepted-file', + '! ignored/subdir/excepted-with-spaces ' '', # empty line, '#*', # comment line ])) @@ -111,6 +113,9 @@ class BuildTest(BaseAPIIntegrationTest): with open(os.path.join(base_dir, '#file.txt'), 'w') as f: f.write('this file should not be ignored') + with open(os.path.join(base_dir, 'ignored-with-spaces'), 'w') as f: + f.write("this file should be ignored") + subdir = os.path.join(base_dir, 'ignored', 'subdir') os.makedirs(subdir) with open(os.path.join(subdir, 'file'), 'w') as f: @@ -119,6 +124,9 @@ class BuildTest(BaseAPIIntegrationTest): with open(os.path.join(subdir, 'excepted-file'), 'w') as f: f.write("this file should not be ignored") + with open(os.path.join(subdir, 'excepted-with-spaces'), 'w') as f: + f.write("this file should not be ignored") + tag = 'docker-py-test-build-with-dockerignore' stream = self.client.build( path=base_dir, @@ -136,6 +144,7 @@ class BuildTest(BaseAPIIntegrationTest): assert sorted(list(filter(None, logs.split('\n')))) == sorted([ '/test/#file.txt', + '/test/ignored/subdir/excepted-with-spaces', '/test/ignored/subdir/excepted-file', '/test/not-ignored' ]) From 55f47299c45b0c12531a68e233ea98617b1f7928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Fri, 29 Jul 2022 22:54:27 +0300 Subject: [PATCH 210/211] docs: fix TLS server verify example (#2574) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Leaving out the verify parameter means verification will not be done. Signed-off-by: Ville Skyttä --- docs/tls.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/tls.rst b/docs/tls.rst index 2e2f1ea9..b95b468c 100644 --- a/docs/tls.rst +++ b/docs/tls.rst @@ -15,7 +15,7 @@ For example, to check the server against a specific CA certificate: .. code-block:: python - tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem') + tls_config = docker.tls.TLSConfig(ca_cert='/path/to/ca.pem', verify=True) client = docker.DockerClient(base_url='', tls=tls_config) This is the equivalent of ``docker --tlsverify --tlscacert /path/to/ca.pem ...``. From 73421027be04c97fc6f50da0647ba47388ed60e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Fri, 29 Jul 2022 22:55:14 +0300 Subject: [PATCH 211/211] docs: clarify TLSConfig verify parameter (#2573) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Ville Skyttä --- docker/tls.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docker/tls.py b/docker/tls.py index 882a50ea..f4dffb2e 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -12,8 +12,9 @@ class TLSConfig: Args: client_cert (tuple of str): Path to client cert, path to client key. ca_cert (str): Path to CA cert file. - verify (bool or str): This can be ``False`` or a path to a CA cert - file. + verify (bool or str): This can be a bool or a path to a CA cert + file to verify against. If ``True``, verify using ca_cert; + if ``False`` or not specified, do not verify. ssl_version (int): A valid `SSL version`_. assert_hostname (bool): Verify the hostname of the server.