From 341ae50b86848e7c8bc127182e6cb81c9f90cbac Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 12 Jul 2019 01:28:41 +0200 Subject: [PATCH 01/20] Update credentials-helpers to v0.6.2 Signed-off-by: Sebastiaan van Stijn --- tests/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/Dockerfile b/tests/Dockerfile index df8468ab..27a12673 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -4,7 +4,7 @@ FROM python:${PYTHON_VERSION} ARG APT_MIRROR RUN sed -ri "s/(httpredir|deb).debian.org/${APT_MIRROR:-deb.debian.org}/g" /etc/apt/sources.list \ - && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list + && sed -ri "s/(security).debian.org/${APT_MIRROR:-security.debian.org}/g" /etc/apt/sources.list RUN apt-get update && apt-get -y install \ gnupg2 \ From 25a831165ce4d07bb74eab296dc23c5e16fd28c2 Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 10:38:07 +0200 Subject: [PATCH 02/20] set logging level of paramiko to warn Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 7de0e590..57b55c9e 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,7 +2,10 @@ import paramiko import requests.adapters import six import logging +<<<<<<< HEAD import os +======= +>>>>>>> 2dc569a... set logging level of paramiko to warn from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants From 908b4b3addb0a2e648599c95674979d41818bf52 Mon Sep 17 00:00:00 2001 From: Till Riedel Date: Sun, 14 Apr 2019 13:52:12 +0200 Subject: [PATCH 03/20] obey Hostname Username Port and ProxyCommand settings from .ssh/config Signed-off-by: Till Riedel --- docker/transport/sshconn.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 57b55c9e..7de0e590 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,10 +2,7 @@ import paramiko import requests.adapters import six import logging -<<<<<<< HEAD import os -======= ->>>>>>> 2dc569a... set logging level of paramiko to warn from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants From 424421a2b381194328ada6fdb9b4f6b05d2fd3e3 Mon Sep 17 00:00:00 2001 From: Anca Iordache Date: Fri, 17 Jan 2020 19:25:55 +0100 Subject: [PATCH 04/20] Implement context management, lifecycle and unittests. Signed-off-by: Anca Iordache --- docker/context/api.py | 16 ++++++++++++ docker/context/context.py | 53 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) diff --git a/docker/context/api.py b/docker/context/api.py index c45115bc..d903d9c6 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -38,7 +38,13 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", +<<<<<<< HEAD "Metadata": {}, +======= + "Metadata": { + "StackOrchestrator": "swarm" + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", @@ -55,9 +61,13 @@ class ContextAPI(object): ctx = Context.load_context(name) if ctx: raise errors.ContextAlreadyExists(name) +<<<<<<< HEAD endpoint = "docker" if orchestrator and orchestrator != "swarm": endpoint = orchestrator +======= + endpoint = "docker" if orchestrator == "swarm" else orchestrator +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. ctx = Context(name, orchestrator) ctx.set_endpoint( endpoint, host, tls_cfg, @@ -79,7 +89,13 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", +<<<<<<< HEAD "Metadata": {}, +======= + "Metadata": { + "StackOrchestrator": "swarm" + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", diff --git a/docker/context/context.py b/docker/context/context.py index 2413b2ec..3859db2b 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,7 +57,11 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { +<<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), +======= + "Host": get_context_host(host), +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "SkipTLSVerify": skip_tls_verify } if def_namespace: @@ -71,6 +75,7 @@ class Context: @classmethod def load_context(cls, name): +<<<<<<< HEAD meta = Context._load_meta(name) if meta: instance = cls( @@ -78,6 +83,11 @@ class Context: orchestrator=meta["Metadata"].get("StackOrchestrator", None), endpoints=meta.get("Endpoints", None)) instance.context_type = meta["Metadata"].get("Type", None) +======= + name, orchestrator, endpoints = Context._load_meta(name) + if name: + instance = cls(name, orchestrator, endpoints=endpoints) +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. instance._load_certs() instance.meta_path = get_meta_dir(name) return instance @@ -85,6 +95,7 @@ class Context: @classmethod def _load_meta(cls, name): +<<<<<<< HEAD meta_file = get_meta_file(name) if not os.path.isfile(meta_file): return None @@ -109,6 +120,27 @@ class Context: v.get("SkipTLSVerify", True)) return metadata +======= + metadata = {} + meta_file = get_meta_file(name) + if os.path.isfile(meta_file): + with open(meta_file) as f: + try: + with open(meta_file) as f: + metadata = json.load(f) + for k, v in metadata["Endpoints"].items(): + metadata["Endpoints"][k]["SkipTLSVerify"] = bool( + v["SkipTLSVerify"]) + except (IOError, KeyError, ValueError) as e: + # unknown format + raise Exception("""Detected corrupted meta file for + context {} : {}""".format(name, e)) + + return ( + metadata["Name"], metadata["Metadata"]["StackOrchestrator"], + metadata["Endpoints"]) + return None, None, None +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. def _load_certs(self): certs = {} @@ -177,15 +209,19 @@ class Context: result.update(self.Storage) return result +<<<<<<< HEAD def is_docker_host(self): return self.context_type is None +======= +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Name(self): return self.name @property def Host(self): +<<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) if endpoint: @@ -193,6 +229,11 @@ class Context: return None return self.endpoints[self.orchestrator].get("Host", None) +======= + if self.orchestrator == "swarm": + return self.endpoints["docker"]["Host"] + return self.endpoints[self.orchestrator]["Host"] +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Orchestrator(self): @@ -200,19 +241,31 @@ class Context: @property def Metadata(self): +<<<<<<< HEAD meta = {} if self.orchestrator: meta = {"StackOrchestrator": self.orchestrator} return { "Name": self.name, "Metadata": meta, +======= + return { + "Name": self.name, + "Metadata": { + "StackOrchestrator": self.orchestrator + }, +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator +<<<<<<< HEAD if not key or key == "swarm": +======= + if key == "swarm": +>>>>>>> 64fdb32... Implement context management, lifecycle and unittests. key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From a3b42309e9d216f362133c5050a3e775afb2de60 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Fri, 7 Feb 2020 01:00:18 +0100 Subject: [PATCH 05/20] Post release 4.2.0 update: - Changelog - Next Version Signed-off-by: Ulysses Souza --- docs/change-log.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index ec73b8d8..e1b9e5da 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -36,7 +36,10 @@ Change log - Add support for docker contexts through `docker.ContextAPI` +<<<<<<< HEAD +======= +>>>>>>> 7c4194c... Post release 4.2.0 update: 4.1.0 ----- From 0268b0235153c3162f7075bfb0c0c267399d883d Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 May 2020 20:53:45 +0200 Subject: [PATCH 06/20] Specify when to use `tls` on Context constructor Signed-off-by: Ulysses Souza --- docker/context/context.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docker/context/context.py b/docker/context/context.py index 3859db2b..026a6941 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,11 +57,15 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { +<<<<<<< HEAD <<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), ======= "Host": get_context_host(host), >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + "Host": get_context_host(host, not skip_tls_verify), +>>>>>>> 3ce2d89... Specify when to use `tls` on Context constructor "SkipTLSVerify": skip_tls_verify } if def_namespace: From 9bb7c20938c22ce86c12bb1f6637ca6dfae3cc65 Mon Sep 17 00:00:00 2001 From: aiordache Date: Sat, 30 May 2020 11:01:22 +0200 Subject: [PATCH 07/20] Make orchestrator field optional Signed-off-by: aiordache --- docker/context/api.py | 16 ----------- docker/context/context.py | 56 ++++++++++++--------------------------- 2 files changed, 17 insertions(+), 55 deletions(-) diff --git a/docker/context/api.py b/docker/context/api.py index d903d9c6..c45115bc 100644 --- a/docker/context/api.py +++ b/docker/context/api.py @@ -38,13 +38,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", -<<<<<<< HEAD "Metadata": {}, -======= - "Metadata": { - "StackOrchestrator": "swarm" - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", @@ -61,13 +55,9 @@ class ContextAPI(object): ctx = Context.load_context(name) if ctx: raise errors.ContextAlreadyExists(name) -<<<<<<< HEAD endpoint = "docker" if orchestrator and orchestrator != "swarm": endpoint = orchestrator -======= - endpoint = "docker" if orchestrator == "swarm" else orchestrator ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. ctx = Context(name, orchestrator) ctx.set_endpoint( endpoint, host, tls_cfg, @@ -89,13 +79,7 @@ class ContextAPI(object): >>> print(ctx.Metadata) { "Name": "test", -<<<<<<< HEAD "Metadata": {}, -======= - "Metadata": { - "StackOrchestrator": "swarm" - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. "Endpoints": { "docker": { "Host": "unix:///var/run/docker.sock", diff --git a/docker/context/context.py b/docker/context/context.py index 026a6941..8158803d 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -57,15 +57,7 @@ class Context: self, name="docker", host=None, tls_cfg=None, skip_tls_verify=False, def_namespace=None): self.endpoints[name] = { -<<<<<<< HEAD -<<<<<<< HEAD "Host": get_context_host(host, not skip_tls_verify), -======= - "Host": get_context_host(host), ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - "Host": get_context_host(host, not skip_tls_verify), ->>>>>>> 3ce2d89... Specify when to use `tls` on Context constructor "SkipTLSVerify": skip_tls_verify } if def_namespace: @@ -79,7 +71,6 @@ class Context: @classmethod def load_context(cls, name): -<<<<<<< HEAD meta = Context._load_meta(name) if meta: instance = cls( @@ -87,11 +78,6 @@ class Context: orchestrator=meta["Metadata"].get("StackOrchestrator", None), endpoints=meta.get("Endpoints", None)) instance.context_type = meta["Metadata"].get("Type", None) -======= - name, orchestrator, endpoints = Context._load_meta(name) - if name: - instance = cls(name, orchestrator, endpoints=endpoints) ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. instance._load_certs() instance.meta_path = get_meta_dir(name) return instance @@ -99,7 +85,6 @@ class Context: @classmethod def _load_meta(cls, name): -<<<<<<< HEAD meta_file = get_meta_file(name) if not os.path.isfile(meta_file): return None @@ -124,27 +109,6 @@ class Context: v.get("SkipTLSVerify", True)) return metadata -======= - metadata = {} - meta_file = get_meta_file(name) - if os.path.isfile(meta_file): - with open(meta_file) as f: - try: - with open(meta_file) as f: - metadata = json.load(f) - for k, v in metadata["Endpoints"].items(): - metadata["Endpoints"][k]["SkipTLSVerify"] = bool( - v["SkipTLSVerify"]) - except (IOError, KeyError, ValueError) as e: - # unknown format - raise Exception("""Detected corrupted meta file for - context {} : {}""".format(name, e)) - - return ( - metadata["Name"], metadata["Metadata"]["StackOrchestrator"], - metadata["Endpoints"]) - return None, None, None ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. def _load_certs(self): certs = {} @@ -213,18 +177,16 @@ class Context: result.update(self.Storage) return result -<<<<<<< HEAD def is_docker_host(self): return self.context_type is None -======= ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Name(self): return self.name @property def Host(self): +<<<<<<< HEAD <<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) @@ -235,6 +197,9 @@ class Context: return self.endpoints[self.orchestrator].get("Host", None) ======= if self.orchestrator == "swarm": +======= + if not self.orchestrator or self.orchestrator == "swarm": +>>>>>>> 1e11ece... Make orchestrator field optional return self.endpoints["docker"]["Host"] return self.endpoints[self.orchestrator]["Host"] >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @@ -245,6 +210,7 @@ class Context: @property def Metadata(self): +<<<<<<< HEAD <<<<<<< HEAD meta = {} if self.orchestrator: @@ -259,17 +225,29 @@ class Context: "StackOrchestrator": self.orchestrator }, >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + meta = {} + if self.orchestrator: + meta = {"StackOrchestrator": self.orchestrator} + return { + "Name": self.name, + "Metadata": meta, +>>>>>>> 1e11ece... Make orchestrator field optional "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator +<<<<<<< HEAD <<<<<<< HEAD if not key or key == "swarm": ======= if key == "swarm": >>>>>>> 64fdb32... Implement context management, lifecycle and unittests. +======= + if not key or key == "swarm": +>>>>>>> 1e11ece... Make orchestrator field optional key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 10cea0d2a091090faf5d999b2de2bda6cf6c9e6b Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 30 Jun 2020 18:30:49 +0200 Subject: [PATCH 08/20] Update version to 4.3.0-dev Signed-off-by: Ulysses Souza --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 06d6cc73..a7546092 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.2.2" +version = "4.3.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 4b4379a4c7cda78bb24103579325d38cdbdb0443 Mon Sep 17 00:00:00 2001 From: Mike Haboustak Date: Fri, 24 Apr 2020 06:42:59 -0400 Subject: [PATCH 09/20] Add support for DriverOpts in EndpointConfig Docker API 1.32 added support for providing options to a network driver via EndpointConfig when connecting a container to a network. Signed-off-by: Mike Haboustak --- docker/api/container.py | 2 ++ docker/api/network.py | 5 +++-- docker/models/networks.py | 2 ++ docker/types/networks.py | 11 ++++++++++- tests/integration/api_network_test.py | 21 +++++++++++++++++++++ tests/unit/api_network_test.py | 4 +++- 6 files changed, 41 insertions(+), 4 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 45bd3528..9df22a52 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -636,6 +636,8 @@ class ContainerApiMixin(object): network, using the IPv6 protocol. Defaults to ``None``. link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) addresses. + driver_opt (dict): A dictionary of options to provide to the + network driver. Defaults to ``None``. Returns: (dict) An endpoint config. diff --git a/docker/api/network.py b/docker/api/network.py index 750b91b2..139c2d1a 100644 --- a/docker/api/network.py +++ b/docker/api/network.py @@ -216,7 +216,7 @@ class NetworkApiMixin(object): def connect_container_to_network(self, container, net_id, ipv4_address=None, ipv6_address=None, aliases=None, links=None, - link_local_ips=None): + link_local_ips=None, driver_opt=None): """ Connect a container to a network. @@ -240,7 +240,8 @@ class NetworkApiMixin(object): "Container": container, "EndpointConfig": self.create_endpoint_config( aliases=aliases, links=links, ipv4_address=ipv4_address, - ipv6_address=ipv6_address, link_local_ips=link_local_ips + ipv6_address=ipv6_address, link_local_ips=link_local_ips, + driver_opt=driver_opt ), } diff --git a/docker/models/networks.py b/docker/models/networks.py index f944c8e2..093deb7f 100644 --- a/docker/models/networks.py +++ b/docker/models/networks.py @@ -46,6 +46,8 @@ class Network(Model): network, using the IPv6 protocol. Defaults to ``None``. link_local_ips (:py:class:`list`): A list of link-local (IPv4/IPv6) addresses. + driver_opt (dict): A dictionary of options to provide to the + network driver. Defaults to ``None``. Raises: :py:class:`docker.errors.APIError` diff --git a/docker/types/networks.py b/docker/types/networks.py index 1c7b2c9e..1370dc19 100644 --- a/docker/types/networks.py +++ b/docker/types/networks.py @@ -4,7 +4,7 @@ from ..utils import normalize_links, version_lt class EndpointConfig(dict): def __init__(self, version, aliases=None, links=None, ipv4_address=None, - ipv6_address=None, link_local_ips=None): + ipv6_address=None, link_local_ips=None, driver_opt=None): if version_lt(version, '1.22'): raise errors.InvalidVersion( 'Endpoint config is not supported for API version < 1.22' @@ -33,6 +33,15 @@ class EndpointConfig(dict): if ipam_config: self['IPAMConfig'] = ipam_config + if driver_opt: + if version_lt(version, '1.32'): + raise errors.InvalidVersion( + 'DriverOpts is not supported for API version < 1.32' + ) + if not isinstance(driver_opt, dict): + raise TypeError('driver_opt must be a dictionary') + self['DriverOpts'] = driver_opt + class NetworkingConfig(dict): def __init__(self, endpoints_config=None): diff --git a/tests/integration/api_network_test.py b/tests/integration/api_network_test.py index 0f26827b..af22da8d 100644 --- a/tests/integration/api_network_test.py +++ b/tests/integration/api_network_test.py @@ -275,6 +275,27 @@ class TestNetworks(BaseAPIIntegrationTest): assert 'LinkLocalIPs' in net_cfg['IPAMConfig'] assert net_cfg['IPAMConfig']['LinkLocalIPs'] == ['169.254.8.8'] + @requires_api_version('1.32') + def test_create_with_driveropt(self): + container = self.client.create_container( + TEST_IMG, 'top', + networking_config=self.client.create_networking_config( + { + 'bridge': self.client.create_endpoint_config( + driver_opt={'com.docker-py.setting': 'on'} + ) + } + ), + host_config=self.client.create_host_config(network_mode='bridge') + ) + self.tmp_containers.append(container) + self.client.start(container) + container_data = self.client.inspect_container(container) + net_cfg = container_data['NetworkSettings']['Networks']['bridge'] + assert 'DriverOpts' in net_cfg + assert 'com.docker-py.setting' in net_cfg['DriverOpts'] + assert net_cfg['DriverOpts']['com.docker-py.setting'] == 'on' + @requires_api_version('1.22') def test_create_with_links(self): net_name, net_id = self.create_network() diff --git a/tests/unit/api_network_test.py b/tests/unit/api_network_test.py index c78554da..758f0132 100644 --- a/tests/unit/api_network_test.py +++ b/tests/unit/api_network_test.py @@ -136,7 +136,8 @@ class NetworkTest(BaseAPIClientTest): container={'Id': container_id}, net_id=network_id, aliases=['foo', 'bar'], - links=[('baz', 'quux')] + links=[('baz', 'quux')], + driver_opt={'com.docker-py.setting': 'yes'}, ) assert post.call_args[0][0] == ( @@ -148,6 +149,7 @@ class NetworkTest(BaseAPIClientTest): 'EndpointConfig': { 'Aliases': ['foo', 'bar'], 'Links': ['baz:quux'], + 'DriverOpts': {'com.docker-py.setting': 'yes'}, }, } From d8961ebf12c83c331c601482db8f571893d2d435 Mon Sep 17 00:00:00 2001 From: Ofek Lev Date: Wed, 10 Jun 2020 15:31:19 -0400 Subject: [PATCH 10/20] Upgrade Windows dependency Signed-off-by: Ofek Lev --- requirements.txt | 3 +-- setup.py | 5 +---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/requirements.txt b/requirements.txt index 804a78a0..340e4312 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,8 +11,7 @@ paramiko==2.4.2 pycparser==2.17 pyOpenSSL==18.0.0 pyparsing==2.2.0 -pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' -pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' +pywin32==227; sys_platform == 'win32' requests==2.20.0 six==1.10.0 urllib3==1.24.3 diff --git a/setup.py b/setup.py index c29787b6..c7022950 100644 --- a/setup.py +++ b/setup.py @@ -24,10 +24,7 @@ extras_require = { ':python_version < "3.3"': 'ipaddress >= 1.0.16', # win32 APIs if on Windows (required for npipe support) - # Python 3.6 is only compatible with v220 ; Python < 3.5 is not supported - # on v220 ; ALL versions are broken for v222 (as of 2018-01-26) - ':sys_platform == "win32" and python_version < "3.6"': 'pypiwin32==219', - ':sys_platform == "win32" and python_version >= "3.6"': 'pypiwin32==223', + ':sys_platform == "win32"': 'pywin32==227', # If using docker-py over TLS, highly recommend this option is # pip-installed or pinned. From 00213a274b15b571c7f4f2496480676f7eea2dae Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 17 Jul 2020 14:25:27 +0200 Subject: [PATCH 11/20] Fix CreateContainerTest.test_invalid_log_driver_raises_exception This test was updated in 7d92fbdee1b8621f54faa595ba53d7ef78ef1acc, but omitted the "error" prefix in the message, causing the test to fail; _________ CreateContainerTest.test_invalid_log_driver_raises_exception _________ tests/integration/api_container_test.py:293: in test_invalid_log_driver_raises_exception assert excinfo.value.explanation in expected_msgs E AssertionError: assert 'error looking up logging plugin asdf: plugin "asdf" not found' in ["logger: no log driver named 'asdf' is registered", 'looking up logging plugin asdf: plugin "asdf" not found'] E + where 'error looking up logging plugin asdf: plugin "asdf" not found' = APIError(HTTPError('400 Client Error: Bad Request for url: http+docker://localhost/v1.39/containers/create')).explanation E + where APIError(HTTPError('400 Client Error: Bad Request for url: http+docker://localhost/v1.39/containers/create')) = .value Signed-off-by: Sebastiaan van Stijn --- tests/integration/api_container_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 411d4c2e..65e611b2 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -279,7 +279,7 @@ class CreateContainerTest(BaseAPIIntegrationTest): expected_msgs = [ "logger: no log driver named 'asdf' is registered", - "looking up logging plugin asdf: plugin \"asdf\" not found", + "error looking up logging plugin asdf: plugin \"asdf\" not found", ] with pytest.raises(docker.errors.APIError) as excinfo: # raises an internal server error 500 From 75ed0ecf5a6cbfa519889e024bd3d35ab031ff5a Mon Sep 17 00:00:00 2001 From: Lucidiot Date: Fri, 7 Aug 2020 13:58:35 +0200 Subject: [PATCH 12/20] Add device requests (#2471) * Add DeviceRequest type Signed-off-by: Erwan Rouchet * Add device_requests kwarg in host config Signed-off-by: Erwan Rouchet * Add unit test for device requests Signed-off-by: Erwan Rouchet * Fix unit test Signed-off-by: Erwan Rouchet * Use parentheses for multiline import Signed-off-by: Erwan Rouchet * Create 1.40 client for device-requests test Signed-off-by: Laurie O Co-authored-by: Laurie O Co-authored-by: Bastien Abadie --- docker/api/container.py | 3 + docker/models/containers.py | 4 ++ docker/types/__init__.py | 4 +- docker/types/containers.py | 113 ++++++++++++++++++++++++++++++- tests/unit/api_container_test.py | 64 ++++++++++++++++- 5 files changed, 185 insertions(+), 3 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 9df22a52..2ba08e53 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -480,6 +480,9 @@ class ContainerApiMixin(object): For example, ``/dev/sda:/dev/xvda:rwm`` allows the container to have read-write access to the host's ``/dev/sda`` via a node named ``/dev/xvda`` inside the container. + device_requests (:py:class:`list`): Expose host resources such as + GPUs to the container, as a list of + :py:class:`docker.types.DeviceRequest` instances. dns (:py:class:`list`): Set custom DNS servers. dns_opt (:py:class:`list`): Additional options to be added to the container's ``resolv.conf`` file diff --git a/docker/models/containers.py b/docker/models/containers.py index d1f275f7..e8082ba4 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -579,6 +579,9 @@ class ContainerCollection(Collection): For example, ``/dev/sda:/dev/xvda:rwm`` allows the container to have read-write access to the host's ``/dev/sda`` via a node named ``/dev/xvda`` inside the container. + device_requests (:py:class:`list`): Expose host resources such as + GPUs to the container, as a list of + :py:class:`docker.types.DeviceRequest` instances. dns (:py:class:`list`): Set custom DNS servers. dns_opt (:py:class:`list`): Additional options to be added to the container's ``resolv.conf`` file. @@ -998,6 +1001,7 @@ RUN_HOST_CONFIG_KWARGS = [ 'device_write_bps', 'device_write_iops', 'devices', + 'device_requests', 'dns_opt', 'dns_search', 'dns', diff --git a/docker/types/__init__.py b/docker/types/__init__.py index 5db330e2..b425746e 100644 --- a/docker/types/__init__.py +++ b/docker/types/__init__.py @@ -1,5 +1,7 @@ # flake8: noqa -from .containers import ContainerConfig, HostConfig, LogConfig, Ulimit +from .containers import ( + ContainerConfig, HostConfig, LogConfig, Ulimit, DeviceRequest +) from .daemon import CancellableStream from .healthcheck import Healthcheck from .networks import EndpointConfig, IPAMConfig, IPAMPool, NetworkingConfig diff --git a/docker/types/containers.py b/docker/types/containers.py index fd8cab49..149b85df 100644 --- a/docker/types/containers.py +++ b/docker/types/containers.py @@ -154,6 +154,104 @@ class Ulimit(DictType): self['Hard'] = value +class DeviceRequest(DictType): + """ + Create a device request to be used with + :py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`. + + Args: + + driver (str): Which driver to use for this device. Optional. + count (int): Number or devices to request. Optional. + Set to -1 to request all available devices. + device_ids (list): List of strings for device IDs. Optional. + Set either ``count`` or ``device_ids``. + capabilities (list): List of lists of strings to request + capabilities. Optional. The global list acts like an OR, + and the sub-lists are AND. The driver will try to satisfy + one of the sub-lists. + Available capabilities for the ``nvidia`` driver can be found + `here `_. + options (dict): Driver-specific options. Optional. + """ + + def __init__(self, **kwargs): + driver = kwargs.get('driver', kwargs.get('Driver')) + count = kwargs.get('count', kwargs.get('Count')) + device_ids = kwargs.get('device_ids', kwargs.get('DeviceIDs')) + capabilities = kwargs.get('capabilities', kwargs.get('Capabilities')) + options = kwargs.get('options', kwargs.get('Options')) + + if driver is None: + driver = '' + elif not isinstance(driver, six.string_types): + raise ValueError('DeviceRequest.driver must be a string') + if count is None: + count = 0 + elif not isinstance(count, int): + raise ValueError('DeviceRequest.count must be an integer') + if device_ids is None: + device_ids = [] + elif not isinstance(device_ids, list): + raise ValueError('DeviceRequest.device_ids must be a list') + if capabilities is None: + capabilities = [] + elif not isinstance(capabilities, list): + raise ValueError('DeviceRequest.capabilities must be a list') + if options is None: + options = {} + elif not isinstance(options, dict): + raise ValueError('DeviceRequest.options must be a dict') + + super(DeviceRequest, self).__init__({ + 'Driver': driver, + 'Count': count, + 'DeviceIDs': device_ids, + 'Capabilities': capabilities, + 'Options': options + }) + + @property + def driver(self): + return self['Driver'] + + @driver.setter + def driver(self, value): + self['Driver'] = value + + @property + def count(self): + return self['Count'] + + @count.setter + def count(self, value): + self['Count'] = value + + @property + def device_ids(self): + return self['DeviceIDs'] + + @device_ids.setter + def device_ids(self, value): + self['DeviceIDs'] = value + + @property + def capabilities(self): + return self['Capabilities'] + + @capabilities.setter + def capabilities(self, value): + self['Capabilities'] = value + + @property + def options(self): + return self['Options'] + + @options.setter + def options(self, value): + self['Options'] = value + + class HostConfig(dict): def __init__(self, version, binds=None, port_bindings=None, lxc_conf=None, publish_all_ports=False, links=None, @@ -176,7 +274,7 @@ class HostConfig(dict): volume_driver=None, cpu_count=None, cpu_percent=None, nano_cpus=None, cpuset_mems=None, runtime=None, mounts=None, cpu_rt_period=None, cpu_rt_runtime=None, - device_cgroup_rules=None): + device_cgroup_rules=None, device_requests=None): if mem_limit is not None: self['Memory'] = parse_bytes(mem_limit) @@ -536,6 +634,19 @@ class HostConfig(dict): ) self['DeviceCgroupRules'] = device_cgroup_rules + if device_requests is not None: + if version_lt(version, '1.40'): + raise host_config_version_error('device_requests', '1.40') + if not isinstance(device_requests, list): + raise host_config_type_error( + 'device_requests', device_requests, 'list' + ) + self['DeviceRequests'] = [] + for req in device_requests: + if not isinstance(req, DeviceRequest): + req = DeviceRequest(**req) + self['DeviceRequests'].append(req) + def host_config_type_error(param, param_value, expected): error_msg = 'Invalid type for {0} param: expected {1} but found {2}' diff --git a/tests/unit/api_container_test.py b/tests/unit/api_container_test.py index a7e183c8..8a0577e7 100644 --- a/tests/unit/api_container_test.py +++ b/tests/unit/api_container_test.py @@ -5,6 +5,7 @@ import json import signal import docker +from docker.api import APIClient import pytest import six @@ -12,7 +13,7 @@ from . import fake_api from ..helpers import requires_api_version from .api_test import ( BaseAPIClientTest, url_prefix, fake_request, DEFAULT_TIMEOUT_SECONDS, - fake_inspect_container + fake_inspect_container, url_base ) try: @@ -767,6 +768,67 @@ class CreateContainerTest(BaseAPIClientTest): assert args[1]['headers'] == {'Content-Type': 'application/json'} assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS + def test_create_container_with_device_requests(self): + client = APIClient(version='1.40') + fake_api.fake_responses.setdefault( + '{0}/v1.40/containers/create'.format(fake_api.prefix), + fake_api.post_fake_create_container, + ) + client.create_container( + 'busybox', 'true', host_config=client.create_host_config( + device_requests=[ + { + 'device_ids': [ + '0', + 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a' + ] + }, + { + 'driver': 'nvidia', + 'Count': -1, + 'capabilities': [ + ['gpu', 'utility'] + ], + 'options': { + 'key': 'value' + } + } + ] + ) + ) + + args = fake_request.call_args + assert args[0][1] == url_base + 'v1.40/' + 'containers/create' + expected_payload = self.base_create_payload() + expected_payload['HostConfig'] = client.create_host_config() + expected_payload['HostConfig']['DeviceRequests'] = [ + { + 'Driver': '', + 'Count': 0, + 'DeviceIDs': [ + '0', + 'GPU-3a23c669-1f69-c64e-cf85-44e9b07e7a2a' + ], + 'Capabilities': [], + 'Options': {} + }, + { + 'Driver': 'nvidia', + 'Count': -1, + 'DeviceIDs': [], + 'Capabilities': [ + ['gpu', 'utility'] + ], + 'Options': { + 'key': 'value' + } + } + ] + assert json.loads(args[1]['data']) == expected_payload + assert args[1]['headers']['Content-Type'] == 'application/json' + assert set(args[1]['headers']) <= {'Content-Type', 'User-Agent'} + assert args[1]['timeout'] == DEFAULT_TIMEOUT_SECONDS + def test_create_container_with_labels_dict(self): labels_dict = { six.text_type('foo'): six.text_type('1'), From 687f23afe4fcbff6f343d795e4d1272599e714c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Fri, 7 Aug 2020 15:33:19 +0300 Subject: [PATCH 13/20] Spelling fixes (#2571) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Ville Skyttä --- docker/api/container.py | 2 +- docs/change-log.md | 4 ++-- tests/unit/utils_build_test.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 2ba08e53..ee3b4c3f 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1125,7 +1125,7 @@ class ContainerApiMixin(object): else: if decode: raise errors.InvalidArgument( - "decode is only available in conjuction with stream=True" + "decode is only available in conjunction with stream=True" ) return self._result(self._get(url, params={'stream': False}), json=True) diff --git a/docs/change-log.md b/docs/change-log.md index e1b9e5da..3f616904 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -133,7 +133,7 @@ Change log ### Bugfixes -* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the +* Fix base_url to keep TCP protocol on utils.py by letting the responsibility of changing the protocol to `parse_host` afterwards, letting `base_url` with the original value. * XFAIL test_attach_stream_and_cancel on TLS @@ -1237,7 +1237,7 @@ like the others (`Client.volumes`, `Client.create_volume`, `Client.inspect_volume`, `Client.remove_volume`). * Added support for the `group_add` parameter in `create_host_config`. -* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameteres +* Added support for the CPU CFS (`cpu_quota` and `cpu_period`) parameters in `create_host_config`. * Added support for the archive API endpoint (`Client.get_archive`, `Client.put_archive`). diff --git a/tests/unit/utils_build_test.py b/tests/unit/utils_build_test.py index 012f15b4..bc6fb5f4 100644 --- a/tests/unit/utils_build_test.py +++ b/tests/unit/utils_build_test.py @@ -335,7 +335,7 @@ class ExcludePathsTest(unittest.TestCase): # Dockerignore reference stipulates that absolute paths are # equivalent to relative paths, hence /../foo should be # equivalent to ../foo. It also stipulates that paths are run - # through Go's filepath.Clean, which explicitely "replace + # through Go's filepath.Clean, which explicitly "replace # "/.." by "/" at the beginning of a path". assert exclude_paths( base, From 351b131fe9ab5768c3429890ae0e59cb22f1bd29 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Wed, 3 Jun 2020 10:26:41 +0200 Subject: [PATCH 14/20] Update test engine version to 19.03.12 Signed-off-by: Sebastiaan van Stijn --- Jenkinsfile | 4 ++-- Makefile | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 8777214c..88c21592 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,7 +31,7 @@ def buildImages = { -> } def getDockerVersions = { -> - def dockerVersions = ["19.03.5"] + def dockerVersions = ["19.03.12"] wrappedNode(label: "amd64 && ubuntu-1804 && overlay2") { def result = sh(script: """docker run --rm \\ --entrypoint=python \\ @@ -66,7 +66,7 @@ def runTests = { Map settings -> throw new Exception("Need test image object, e.g.: `runTests(testImage: img)`") } if (!dockerVersion) { - throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '1.12.3')`") + throw new Exception("Need Docker version to test, e.g.: `runTests(dockerVersion: '19.03.12')`") } if (!pythonVersion) { throw new Exception("Need Python version being tested, e.g.: `runTests(pythonVersion: 'py2.7')`") diff --git a/Makefile b/Makefile index 551868ec..4795c63c 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,7 @@ integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} TEST_API_VERSION ?= 1.35 -TEST_ENGINE_VERSION ?= 19.03.5 +TEST_ENGINE_VERSION ?= 19.03.12 .PHONY: setup-network setup-network: From 281bc31e2178fde3c58735cdfbeea16407e21616 Mon Sep 17 00:00:00 2001 From: Niklas Saari Date: Wed, 26 Feb 2020 22:34:40 +0200 Subject: [PATCH 15/20] Disable compression by default when using get_archive method Signed-off-by: Niklas Saari --- docker/api/container.py | 12 ++++++++++-- docker/models/containers.py | 8 ++++++-- tests/unit/models_containers_test.py | 2 +- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index ee3b4c3f..cf5caebb 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -699,7 +699,8 @@ class ContainerApiMixin(object): return self._stream_raw_result(res, chunk_size, False) @utils.check_resource('container') - def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE): + def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, + encode_stream=False): """ Retrieve a file or folder from a container in the form of a tar archive. @@ -710,6 +711,8 @@ class ContainerApiMixin(object): chunk_size (int): The number of bytes returned by each iteration of the generator. If ``None``, data will be streamed as it is received. Default: 2 MB + encode_stream (bool): Determines if data should be encoded + (gzip-compressed) during transmission. Default: False Returns: (tuple): First element is a raw tar data stream. Second element is @@ -734,8 +737,13 @@ class ContainerApiMixin(object): params = { 'path': path } + headers = { + "Accept-Encoding": "gzip, deflate" + } if encode_stream else { + "Accept-Encoding": "identity" + } url = self._url('/containers/{0}/archive', container) - res = self._get(url, params=params, stream=True) + res = self._get(url, params=params, stream=True, headers=headers) self._raise_for_status(res) encoded_stat = res.headers.get('x-docker-container-path-stat') return ( diff --git a/docker/models/containers.py b/docker/models/containers.py index e8082ba4..0c2b855a 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -225,7 +225,8 @@ class Container(Model): """ return self.client.api.export(self.id, chunk_size) - def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE): + def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE, + encode_stream=False): """ Retrieve a file or folder from the container in the form of a tar archive. @@ -235,6 +236,8 @@ class Container(Model): chunk_size (int): The number of bytes returned by each iteration of the generator. If ``None``, data will be streamed as it is received. Default: 2 MB + encode_stream (bool): Determines if data should be encoded + (gzip-compressed) during transmission. Default: False Returns: (tuple): First element is a raw tar data stream. Second element is @@ -255,7 +258,8 @@ class Container(Model): ... f.write(chunk) >>> f.close() """ - return self.client.api.get_archive(self.id, path, chunk_size) + return self.client.api.get_archive(self.id, path, + chunk_size, encode_stream) def kill(self, signal=None): """ diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index da5f0ab9..c9f73f37 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -450,7 +450,7 @@ class ContainerTest(unittest.TestCase): container = client.containers.get(FAKE_CONTAINER_ID) container.get_archive('foo') client.api.get_archive.assert_called_with( - FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE + FAKE_CONTAINER_ID, 'foo', DEFAULT_DATA_CHUNK_SIZE, False ) def test_image(self): From 88234f39160ac214c352c094f220dea9062bb4f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ville=20Skytt=C3=A4?= Date: Mon, 25 May 2020 08:31:24 +0300 Subject: [PATCH 16/20] Fix parameter names in TLSConfig error messages and comments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Ville Skyttä --- docker/tls.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/tls.py b/docker/tls.py index d4671d12..1b297ab6 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -32,7 +32,7 @@ class TLSConfig(object): # https://docs.docker.com/engine/articles/https/ # This diverges from the Docker CLI in that users can specify 'tls' # here, but also disable any public/default CA pool verification by - # leaving tls_verify=False + # leaving verify=False self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint @@ -62,7 +62,7 @@ class TLSConfig(object): # https://github.com/docker/docker-py/issues/963 self.ssl_version = ssl.PROTOCOL_TLSv1 - # "tls" and "tls_verify" must have both or neither cert/key files In + # "client_cert" must have both or neither cert/key files. In # either case, Alert the user when both are expected, but any are # missing. @@ -71,7 +71,7 @@ class TLSConfig(object): tls_cert, tls_key = client_cert except ValueError: raise errors.TLSParameterError( - 'client_config must be a tuple of' + 'client_cert must be a tuple of' ' (client certificate, key file)' ) @@ -79,7 +79,7 @@ class TLSConfig(object): not os.path.isfile(tls_key)): raise errors.TLSParameterError( 'Path to a certificate and key files must be provided' - ' through the client_config param' + ' through the client_cert param' ) self.cert = (tls_cert, tls_key) @@ -88,7 +88,7 @@ class TLSConfig(object): self.ca_cert = ca_cert if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert): raise errors.TLSParameterError( - 'Invalid CA certificate provided for `tls_ca_cert`.' + 'Invalid CA certificate provided for `ca_cert`.' ) def configure_client(self, client): From e6a64e36712f4505d77c6853d5b1606e598fcb2f Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Fri, 7 Aug 2020 16:45:20 +0200 Subject: [PATCH 17/20] Update default API version to v1.39 (#2512) * Update default API version to v1.39 When running the docker-py integration tests in the Moby repository, some tests were skipped because the API version used was too low: SKIPPED [1] tests/integration/api_service_test.py:882: API version is too low (< 1.38) SKIPPED [1] tests/integration/api_swarm_test.py:59: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:38: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:45: API version is too low (< 1.39) SKIPPED [1] tests/integration/api_swarm_test.py:52: API version is too low (< 1.39) While it's possible to override the API version to use for testing using the `DOCKER_TEST_API_VERSION` environment variable, we may want to set the default to a version that supports all features that were added. This patch updates the default API version to v1.39, which is the minimum version required for those features, and corresponds with Docker 18.09. Note that the API version of the current (19.03) Docker release is v1.40, but using that version as default would exclude users that did not update their Docker version yet (and would not be needed yet for the features provided). Signed-off-by: Sebastiaan van Stijn * Makefile: set DOCKER_TEST_API_VERSION to v1.39 Signed-off-by: Sebastiaan van Stijn --- Makefile | 2 +- docker/constants.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 4795c63c..6765d4d7 100644 --- a/Makefile +++ b/Makefile @@ -41,7 +41,7 @@ integration-test: build integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} -TEST_API_VERSION ?= 1.35 +TEST_API_VERSION ?= 1.39 TEST_ENGINE_VERSION ?= 19.03.12 .PHONY: setup-network diff --git a/docker/constants.py b/docker/constants.py index e4daed5d..c09eedab 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -1,7 +1,7 @@ import sys from .version import version -DEFAULT_DOCKER_API_VERSION = '1.35' +DEFAULT_DOCKER_API_VERSION = '1.39' MINIMUM_DOCKER_API_VERSION = '1.21' DEFAULT_TIMEOUT_SECONDS = 60 STREAM_HEADER_SIZE_BYTES = 8 From 2d219ff739cbb743db5fd6d5e2988cede39a03e8 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 15:29:34 +0200 Subject: [PATCH 18/20] Prepare release 4.3.0 Signed-off-by: aiordache --- docker/version.py | 2 +- docs/change-log.md | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index a7546092..29c6b00e 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.3.0-dev" +version = "4.3.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) diff --git a/docs/change-log.md b/docs/change-log.md index 3f616904..183ea9e4 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,21 @@ Change log ========== +4.3.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1) + +### Features +- Add `DeviceRequest` type to expose host resources such as GPUs +- Add support for `DriverOpts` in EndpointConfig +- Disable compression by default when using container.get_archive method + +### Miscellaneous +- Update default API version to v1.39 +- Update test engine version to 19.03.12 + + 4.2.2 ----- From 7e967c9e80bf606ea15795ed190fb0778a53954f Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 18:15:18 +0200 Subject: [PATCH 19/20] Fix merge Signed-off-by: aiordache --- docker/context/context.py | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/docker/context/context.py b/docker/context/context.py index 8158803d..2413b2ec 100644 --- a/docker/context/context.py +++ b/docker/context/context.py @@ -186,8 +186,6 @@ class Context: @property def Host(self): -<<<<<<< HEAD -<<<<<<< HEAD if not self.orchestrator or self.orchestrator == "swarm": endpoint = self.endpoints.get("docker", None) if endpoint: @@ -195,14 +193,6 @@ class Context: return None return self.endpoints[self.orchestrator].get("Host", None) -======= - if self.orchestrator == "swarm": -======= - if not self.orchestrator or self.orchestrator == "swarm": ->>>>>>> 1e11ece... Make orchestrator field optional - return self.endpoints["docker"]["Host"] - return self.endpoints[self.orchestrator]["Host"] ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. @property def Orchestrator(self): @@ -210,44 +200,19 @@ class Context: @property def Metadata(self): -<<<<<<< HEAD -<<<<<<< HEAD meta = {} if self.orchestrator: meta = {"StackOrchestrator": self.orchestrator} return { "Name": self.name, "Metadata": meta, -======= - return { - "Name": self.name, - "Metadata": { - "StackOrchestrator": self.orchestrator - }, ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - meta = {} - if self.orchestrator: - meta = {"StackOrchestrator": self.orchestrator} - return { - "Name": self.name, - "Metadata": meta, ->>>>>>> 1e11ece... Make orchestrator field optional "Endpoints": self.endpoints } @property def TLSConfig(self): key = self.orchestrator -<<<<<<< HEAD -<<<<<<< HEAD if not key or key == "swarm": -======= - if key == "swarm": ->>>>>>> 64fdb32... Implement context management, lifecycle and unittests. -======= - if not key or key == "swarm": ->>>>>>> 1e11ece... Make orchestrator field optional key = "docker" if key in self.tls_cfg.keys(): return self.tls_cfg[key] From 7db995b1ce94d8ec012793037acb789c17f95779 Mon Sep 17 00:00:00 2001 From: aiordache Date: Mon, 10 Aug 2020 18:21:57 +0200 Subject: [PATCH 20/20] Fix changelog merge Signed-off-by: aiordache --- docs/change-log.md | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/docs/change-log.md b/docs/change-log.md index 183ea9e4..cecce9d8 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -4,7 +4,7 @@ Change log 4.3.0 ----- -[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/66?closed=1) +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/64?closed=1) ### Features - Add `DeviceRequest` type to expose host resources such as GPUs @@ -51,10 +51,6 @@ Change log - Add support for docker contexts through `docker.ContextAPI` -<<<<<<< HEAD - -======= ->>>>>>> 7c4194c... Post release 4.2.0 update: 4.1.0 -----