From 28c9100a7c57c856686e2db0a83853343f6d03bb Mon Sep 17 00:00:00 2001 From: Christopher Crone Date: Thu, 10 Jan 2019 18:26:02 +0100 Subject: [PATCH 01/56] Bump to next dev version Signed-off-by: Christopher Crone --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index c3edb8a3..b6302bad 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "3.7.0" +version = "3.8.0-dev" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From b6f6e7270ef1acfe7398b99b575d22d0d37ae8bf Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Fri, 11 Jan 2019 16:39:16 -0800 Subject: [PATCH 02/56] Add registry auth header to inspect_distribution requests Update docstring for auth_config parameter in pull, push, and inspect_distribution Signed-off-by: Joffrey F --- docker/api/image.py | 33 +++++++++++++++++++++++---------- docker/models/images.py | 15 +++++++++------ 2 files changed, 32 insertions(+), 16 deletions(-) diff --git a/docker/api/image.py b/docker/api/image.py index d3fed5c0..b370b7d8 100644 --- a/docker/api/image.py +++ b/docker/api/image.py @@ -247,12 +247,15 @@ class ImageApiMixin(object): @utils.minimum_version('1.30') @utils.check_resource('image') - def inspect_distribution(self, image): + def inspect_distribution(self, image, auth_config=None): """ Get image digest and platform information by contacting the registry. Args: image (str): The image name to inspect + auth_config (dict): Override the credentials that are found in the + config for this request. ``auth_config`` should contain the + ``username`` and ``password`` keys to be valid. Returns: (dict): A dict containing distribution data @@ -261,9 +264,21 @@ class ImageApiMixin(object): :py:class:`docker.errors.APIError` If the server returns an error. """ + registry, _ = auth.resolve_repository_name(image) + + headers = {} + if auth_config is None: + header = auth.get_config_header(self, registry) + if header: + headers['X-Registry-Auth'] = header + else: + log.debug('Sending supplied auth config') + headers['X-Registry-Auth'] = auth.encode_header(auth_config) + + url = self._url("/distribution/{0}/json", image) return self._result( - self._get(self._url("/distribution/{0}/json", image)), True + self._get(url, headers=headers), True ) def load_image(self, data, quiet=None): @@ -336,10 +351,9 @@ class ImageApiMixin(object): tag (str): The tag to pull stream (bool): Stream the output as a generator. Make sure to consume the generator, otherwise pull might get cancelled. - auth_config (dict): Override the credentials that - :py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for - this request. ``auth_config`` should contain the ``username`` - and ``password`` keys to be valid. + auth_config (dict): Override the credentials that are found in the + config for this request. ``auth_config`` should contain the + ``username`` and ``password`` keys to be valid. decode (bool): Decode the JSON data from the server into dicts. Only applies with ``stream=True`` platform (str): Platform in the format ``os[/arch[/variant]]`` @@ -414,10 +428,9 @@ class ImageApiMixin(object): repository (str): The repository to push to tag (str): An optional tag to push stream (bool): Stream the output as a blocking generator - auth_config (dict): Override the credentials that - :py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for - this request. ``auth_config`` should contain the ``username`` - and ``password`` keys to be valid. + auth_config (dict): Override the credentials that are found in the + config for this request. ``auth_config`` should contain the + ``username`` and ``password`` keys to be valid. decode (bool): Decode the JSON data from the server into dicts. Only applies with ``stream=True`` diff --git a/docker/models/images.py b/docker/models/images.py index af94520d..54196829 100644 --- a/docker/models/images.py +++ b/docker/models/images.py @@ -315,22 +315,26 @@ class ImageCollection(Collection): """ return self.prepare_model(self.client.api.inspect_image(name)) - def get_registry_data(self, name): + def get_registry_data(self, name, auth_config=None): """ Gets the registry data for an image. Args: name (str): The name of the image. + auth_config (dict): Override the credentials that are found in the + config for this request. ``auth_config`` should contain the + ``username`` and ``password`` keys to be valid. Returns: (:py:class:`RegistryData`): The data object. + Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ return RegistryData( image_name=name, - attrs=self.client.api.inspect_distribution(name), + attrs=self.client.api.inspect_distribution(name, auth_config), client=self.client, collection=self, ) @@ -404,10 +408,9 @@ class ImageCollection(Collection): Args: repository (str): The repository to pull tag (str): The tag to pull - auth_config (dict): Override the credentials that - :py:meth:`~docker.client.DockerClient.login` has set for - this request. ``auth_config`` should contain the ``username`` - and ``password`` keys to be valid. + auth_config (dict): Override the credentials that are found in the + config for this request. ``auth_config`` should contain the + ``username`` and ``password`` keys to be valid. platform (str): Platform in the format ``os[/arch[/variant]]`` Returns: From 24f7c6db669de9bd01baf449d82d0d264316ada1 Mon Sep 17 00:00:00 2001 From: wvaske Date: Thu, 17 Jan 2019 10:40:06 -0600 Subject: [PATCH 03/56] Added missing options from RUN_HOST_CONFIG_KWARGS list in docker.models.containers to the docstring for client.containers.run() Signed-off-by: wvaske --- docker/models/containers.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docker/models/containers.py b/docker/models/containers.py index 10f667d7..86cb1535 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -540,12 +540,15 @@ class ContainerCollection(Collection): cap_add (list of str): Add kernel capabilities. For example, ``["SYS_ADMIN", "MKNOD"]``. cap_drop (list of str): Drop kernel capabilities. + cgroup_parent (str): Override the default parent cgroup. cpu_count (int): Number of usable CPUs (Windows only). cpu_percent (int): Usable percentage of the available CPUs (Windows only). cpu_period (int): The length of a CPU period in microseconds. cpu_quota (int): Microseconds of CPU time that the container can get in a CPU period. + cpu_rt_period (int): Limit CPU real-time period in microseconds. + cpu_rt_runtime (int): Limit CPU real-time runtime in microseconds. cpu_shares (int): CPU shares (relative weight). cpuset_cpus (str): CPUs in which to allow execution (``0-3``, ``0,1``). @@ -589,6 +592,7 @@ class ContainerCollection(Collection): init_path (str): Path to the docker-init binary ipc_mode (str): Set the IPC mode for the container. isolation (str): Isolation technology to use. Default: `None`. + kernel_memory (int or str): Kernel memory limit labels (dict or list): A dictionary of name-value labels (e.g. ``{"label1": "value1", "label2": "value2"}``) or a list of names of labels to set with empty values (e.g. @@ -598,6 +602,7 @@ class ContainerCollection(Collection): Containers declared in this dict will be linked to the new container using the provided alias. Default: ``None``. log_config (LogConfig): Logging configuration. + lxc_conf (dict): LXC config. mac_address (str): MAC address to assign to the container. mem_limit (int or str): Memory limit. Accepts float values (which represent the memory limit of the created container in @@ -605,6 +610,7 @@ class ContainerCollection(Collection): (``100000b``, ``1000k``, ``128m``, ``1g``). If a string is specified without a units character, bytes are assumed as an intended unit. + mem_reservation (int or str): Memory soft limit mem_swappiness (int): Tune a container's memory swappiness behavior. Accepts number between 0 and 100. memswap_limit (str or int): Maximum amount of memory + swap a @@ -718,6 +724,10 @@ class ContainerCollection(Collection): userns_mode (str): Sets the user namespace mode for the container when user namespace remapping option is enabled. Supported values are: ``host`` + uts_mode (str): Sets the UTS namespace mode for the container. + Supported values are: ``host`` + version (str): The version of the API to use. Set to ``auto`` to + automatically detect the server's version. Default: ``1.30`` volume_driver (str): The name of a volume driver/plugin. volumes (dict or list): A dictionary to configure volumes mounted inside the container. The key is either the host path or a From d429a823ed33032d1980903922251996c551ca5c Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Fri, 18 Jan 2019 21:50:31 +0100 Subject: [PATCH 04/56] Make PlacementPreference build correct context Signed-off-by: Hannes Ljungberg --- docker/types/services.py | 2 +- tests/integration/api_service_test.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/docker/types/services.py b/docker/types/services.py index ac1c181a..a0721f60 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -692,7 +692,7 @@ class PlacementPreference(dict): 'PlacementPreference strategy value is invalid ({}):' ' must be "spread".'.format(strategy) ) - self['SpreadOver'] = descriptor + self['Spread'] = {'SpreadDescriptor': descriptor} class DNSConfig(dict): diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index a53ca1c8..57a8d331 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -427,6 +427,21 @@ class ServiceTest(BaseAPIIntegrationTest): assert 'Placement' in svc_info['Spec']['TaskTemplate'] assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt + @requires_api_version('1.27') + def test_create_service_with_placement_preferences_tuple(self): + container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) + placemt = docker.types.Placement(preferences=( + ('spread', 'com.dockerpy.test'), + )) + task_tmpl = docker.types.TaskTemplate( + container_spec, placement=placemt + ) + name = self.get_service_name() + svc_id = self.client.create_service(task_tmpl, name=name) + svc_info = self.client.inspect_service(svc_id) + assert 'Placement' in svc_info['Spec']['TaskTemplate'] + assert svc_info['Spec']['TaskTemplate']['Placement'] == placemt + def test_create_service_with_endpoint_spec(self): container_spec = docker.types.ContainerSpec(BUSYBOX, ['true']) task_tmpl = docker.types.TaskTemplate(container_spec) From 6935ce88192db5b0465d53e3005e15be47f4ed58 Mon Sep 17 00:00:00 2001 From: Tsuyoshi Hombashi Date: Sat, 26 Jan 2019 12:39:14 +0900 Subject: [PATCH 05/56] Fix descriptions of the default API version in docs 1.30 -> 1.35 Signed-off-by: Tsuyoshi Hombashi --- docker/api/client.py | 2 +- docker/client.py | 4 ++-- docker/models/containers.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index 668dfeef..9b705549 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -82,7 +82,7 @@ class APIClient( base_url (str): URL to the Docker server. For example, ``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``. version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.30`` + automatically detect the server's version. Default: ``1.35`` timeout (int): Default timeout for API calls, in seconds. tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass ``True`` to enable it with default options, or pass a diff --git a/docker/client.py b/docker/client.py index 8d4a52b2..99ae1962 100644 --- a/docker/client.py +++ b/docker/client.py @@ -26,7 +26,7 @@ class DockerClient(object): base_url (str): URL to the Docker server. For example, ``unix:///var/run/docker.sock`` or ``tcp://127.0.0.1:1234``. version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.30`` + automatically detect the server's version. Default: ``1.35`` timeout (int): Default timeout for API calls, in seconds. tls (bool or :py:class:`~docker.tls.TLSConfig`): Enable TLS. Pass ``True`` to enable it with default options, or pass a @@ -62,7 +62,7 @@ class DockerClient(object): Args: version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.30`` + automatically detect the server's version. Default: ``1.35`` timeout (int): Default timeout for API calls, in seconds. ssl_version (int): A valid `SSL version`_. assert_hostname (bool): Verify the hostname of the server. diff --git a/docker/models/containers.py b/docker/models/containers.py index 86cb1535..089e78c7 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -727,7 +727,7 @@ class ContainerCollection(Collection): uts_mode (str): Sets the UTS namespace mode for the container. Supported values are: ``host`` version (str): The version of the API to use. Set to ``auto`` to - automatically detect the server's version. Default: ``1.30`` + automatically detect the server's version. Default: ``1.35`` volume_driver (str): The name of a volume driver/plugin. volumes (dict or list): A dictionary to configure volumes mounted inside the container. The key is either the host path or a From 189552eb57016c5a49a1ca6f0f48d616fe5c04d7 Mon Sep 17 00:00:00 2001 From: p1100i Date: Thu, 21 Feb 2019 07:55:38 +0100 Subject: [PATCH 06/56] Fix `network_mode` API documentation wording Signed-off-by: p1100i --- docker/api/container.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/container.py b/docker/api/container.py index 43ae5320..83e96577 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -512,7 +512,7 @@ class ContainerApiMixin(object): network_mode (str): One of: - ``bridge`` Create a new network stack for the container on - on the bridge network. + the bridge network. - ``none`` No networking for this container. - ``container:`` Reuse another container's network stack. From 37e096f6add7e26ada3d6840ce9a9ce341bbdf23 Mon Sep 17 00:00:00 2001 From: Leks Date: Fri, 1 Mar 2019 14:05:39 +0300 Subject: [PATCH 07/56] set buildargs default value if None Signed-off-by: Leks --- docker/api/build.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/api/build.py b/docker/api/build.py index 53c94b0d..5176afb3 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -121,6 +121,7 @@ class BuildApiMixin(object): remote = context = None headers = {} container_limits = container_limits or {} + buildargs = buildargs or {} if path is None and fileobj is None: raise TypeError("Either path or fileobj needs to be provided.") if gzip and encoding is not None: From 8d1e9670b1b4a5ee4ea3881236988bfbe40792be Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Wed, 13 Mar 2019 10:12:17 +0100 Subject: [PATCH 08/56] Return API response on service update Signed-off-by: Hannes Ljungberg --- docker/api/service.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docker/api/service.py b/docker/api/service.py index 08e25917..02f3380e 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -387,7 +387,7 @@ class ServiceApiMixin(object): current specification of the service. Default: ``False`` Returns: - ``True`` if successful. + A dictionary containing a ``Warnings`` key. Raises: :py:class:`docker.errors.APIError` @@ -471,5 +471,4 @@ class ServiceApiMixin(object): resp = self._post_json( url, data=data, params={'version': version}, headers=headers ) - self._raise_for_status(resp) - return True + return self._result(resp, json=True) From e48a1a94e6f76f9fd1b2882522eb714b1f70d5d6 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 12 Mar 2019 15:36:58 +0100 Subject: [PATCH 09/56] Sets a different default number of pools to SSH This is because default the number of connections in OpenSSH is 10 Signed-off-by: Ulysses Souza --- docker/api/client.py | 10 +++++++--- docker/constants.py | 6 ++++++ 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/docker/api/client.py b/docker/api/client.py index 9b705549..76096515 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -22,8 +22,8 @@ from .volume import VolumeApiMixin from .. import auth from ..constants import ( DEFAULT_TIMEOUT_SECONDS, DEFAULT_USER_AGENT, IS_WINDOWS_PLATFORM, - DEFAULT_DOCKER_API_VERSION, STREAM_HEADER_SIZE_BYTES, DEFAULT_NUM_POOLS, - MINIMUM_DOCKER_API_VERSION + DEFAULT_DOCKER_API_VERSION, MINIMUM_DOCKER_API_VERSION, + STREAM_HEADER_SIZE_BYTES, DEFAULT_NUM_POOLS_SSH, DEFAULT_NUM_POOLS ) from ..errors import ( DockerException, InvalidVersion, TLSParameterError, @@ -101,7 +101,7 @@ class APIClient( def __init__(self, base_url=None, version=None, timeout=DEFAULT_TIMEOUT_SECONDS, tls=False, - user_agent=DEFAULT_USER_AGENT, num_pools=DEFAULT_NUM_POOLS, + user_agent=DEFAULT_USER_AGENT, num_pools=None, credstore_env=None): super(APIClient, self).__init__() @@ -132,6 +132,10 @@ class APIClient( base_url = utils.parse_host( base_url, IS_WINDOWS_PLATFORM, tls=bool(tls) ) + # SSH has a different default for num_pools to all other adapters + num_pools = num_pools or DEFAULT_NUM_POOLS_SSH if \ + base_url.startswith('ssh://') else DEFAULT_NUM_POOLS + if base_url.startswith('http+unix://'): self._custom_adapter = UnixAdapter( base_url, timeout, pool_connections=num_pools diff --git a/docker/constants.py b/docker/constants.py index 1ab11ec0..dcba0de2 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -18,4 +18,10 @@ WINDOWS_LONGPATH_PREFIX = '\\\\?\\' DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version) DEFAULT_NUM_POOLS = 25 + +# The OpenSSH server default value for MaxSessions is 10 which means we can +# use up to 9, leaving the final session for the underlying SSH connection. +# For more details see: https://github.com/docker/docker-py/issues/2246 +DEFAULT_NUM_POOLS_SSH = 9 + DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048 From 4d7d4084138fa6161ef5f31d410b0d326d41f777 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Wed, 13 Mar 2019 19:07:50 +0100 Subject: [PATCH 10/56] Homogenize adapters close() behaviour. - Adds a BaseHTTPAdapter with a close method to ensure that the pools is clean on close() - Makes SSHHTTPAdapter reopen a closed connection when needed like the others Signed-off-by: Ulysses Souza --- docker/api/client.py | 15 ++++++++------- docker/tls.py | 4 ++-- docker/transport/__init__.py | 8 ++++---- docker/transport/basehttpadapter.py | 6 ++++++ docker/transport/npipeconn.py | 8 +++----- docker/transport/sshconn.py | 23 ++++++++++++++++------- docker/transport/ssladapter.py | 8 +++++--- docker/transport/unixconn.py | 8 +++----- 8 files changed, 47 insertions(+), 33 deletions(-) create mode 100644 docker/transport/basehttpadapter.py diff --git a/docker/api/client.py b/docker/api/client.py index 76096515..35dc84e7 100644 --- a/docker/api/client.py +++ b/docker/api/client.py @@ -30,18 +30,18 @@ from ..errors import ( create_api_error_from_http_exception ) from ..tls import TLSConfig -from ..transport import SSLAdapter, UnixAdapter +from ..transport import SSLHTTPAdapter, UnixHTTPAdapter from ..utils import utils, check_resource, update_headers, config from ..utils.socket import frames_iter, consume_socket_output, demux_adaptor from ..utils.json_stream import json_stream from ..utils.proxy import ProxyConfig try: - from ..transport import NpipeAdapter + from ..transport import NpipeHTTPAdapter except ImportError: pass try: - from ..transport import SSHAdapter + from ..transport import SSHHTTPAdapter except ImportError: pass @@ -137,7 +137,7 @@ class APIClient( base_url.startswith('ssh://') else DEFAULT_NUM_POOLS if base_url.startswith('http+unix://'): - self._custom_adapter = UnixAdapter( + self._custom_adapter = UnixHTTPAdapter( base_url, timeout, pool_connections=num_pools ) self.mount('http+docker://', self._custom_adapter) @@ -151,7 +151,7 @@ class APIClient( 'The npipe:// protocol is only supported on Windows' ) try: - self._custom_adapter = NpipeAdapter( + self._custom_adapter = NpipeHTTPAdapter( base_url, timeout, pool_connections=num_pools ) except NameError: @@ -162,7 +162,7 @@ class APIClient( self.base_url = 'http+docker://localnpipe' elif base_url.startswith('ssh://'): try: - self._custom_adapter = SSHAdapter( + self._custom_adapter = SSHHTTPAdapter( base_url, timeout, pool_connections=num_pools ) except NameError: @@ -177,7 +177,8 @@ class APIClient( if isinstance(tls, TLSConfig): tls.configure_client(self) elif tls: - self._custom_adapter = SSLAdapter(pool_connections=num_pools) + self._custom_adapter = SSLHTTPAdapter( + pool_connections=num_pools) self.mount('https://', self._custom_adapter) self.base_url = base_url diff --git a/docker/tls.py b/docker/tls.py index 4900e9fd..d4671d12 100644 --- a/docker/tls.py +++ b/docker/tls.py @@ -2,7 +2,7 @@ import os import ssl from . import errors -from .transport import SSLAdapter +from .transport import SSLHTTPAdapter class TLSConfig(object): @@ -105,7 +105,7 @@ class TLSConfig(object): if self.cert: client.cert = self.cert - client.mount('https://', SSLAdapter( + client.mount('https://', SSLHTTPAdapter( ssl_version=self.ssl_version, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint, diff --git a/docker/transport/__init__.py b/docker/transport/__init__.py index d2cf2a7a..e37fc3ba 100644 --- a/docker/transport/__init__.py +++ b/docker/transport/__init__.py @@ -1,13 +1,13 @@ # flake8: noqa -from .unixconn import UnixAdapter -from .ssladapter import SSLAdapter +from .unixconn import UnixHTTPAdapter +from .ssladapter import SSLHTTPAdapter try: - from .npipeconn import NpipeAdapter + from .npipeconn import NpipeHTTPAdapter from .npipesocket import NpipeSocket except ImportError: pass try: - from .sshconn import SSHAdapter + from .sshconn import SSHHTTPAdapter except ImportError: pass diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py new file mode 100644 index 00000000..d10c115b --- /dev/null +++ b/docker/transport/basehttpadapter.py @@ -0,0 +1,6 @@ +import requests.adapters + + +class BaseHTTPAdapter(requests.adapters.HTTPAdapter): + def close(self): + self.pools.clear() diff --git a/docker/transport/npipeconn.py b/docker/transport/npipeconn.py index ab9b9048..aa05538d 100644 --- a/docker/transport/npipeconn.py +++ b/docker/transport/npipeconn.py @@ -1,6 +1,7 @@ import six import requests.adapters +from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants from .npipesocket import NpipeSocket @@ -68,7 +69,7 @@ class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): return conn or self._new_conn() -class NpipeAdapter(requests.adapters.HTTPAdapter): +class NpipeHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['npipe_path', 'pools', @@ -81,7 +82,7 @@ class NpipeAdapter(requests.adapters.HTTPAdapter): self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(NpipeAdapter, self).__init__() + super(NpipeHTTPAdapter, self).__init__() def get_connection(self, url, proxies=None): with self.pools.lock: @@ -103,6 +104,3 @@ class NpipeAdapter(requests.adapters.HTTPAdapter): # anyway, we simply return the path URL directly. # See also: https://github.com/docker/docker-sdk-python/issues/811 return request.path_url - - def close(self): - self.pools.clear() diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py index 0f6bb51f..5a8ceb08 100644 --- a/docker/transport/sshconn.py +++ b/docker/transport/sshconn.py @@ -2,6 +2,7 @@ import paramiko import requests.adapters import six +from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants if six.PY3: @@ -68,7 +69,7 @@ class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool): return conn or self._new_conn() -class SSHAdapter(requests.adapters.HTTPAdapter): +class SSHHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [ 'pools', 'timeout', 'ssh_client', @@ -79,15 +80,19 @@ class SSHAdapter(requests.adapters.HTTPAdapter): self.ssh_client = paramiko.SSHClient() self.ssh_client.load_system_host_keys() - parsed = six.moves.urllib_parse.urlparse(base_url) - self.ssh_client.connect( - parsed.hostname, parsed.port, parsed.username, - ) + self.base_url = base_url + self._connect() self.timeout = timeout self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(SSHAdapter, self).__init__() + super(SSHHTTPAdapter, self).__init__() + + def _connect(self): + parsed = six.moves.urllib_parse.urlparse(self.base_url) + self.ssh_client.connect( + parsed.hostname, parsed.port, parsed.username, + ) def get_connection(self, url, proxies=None): with self.pools.lock: @@ -95,6 +100,10 @@ class SSHAdapter(requests.adapters.HTTPAdapter): if pool: return pool + # Connection is closed try a reconnect + if not self.ssh_client.get_transport(): + self._connect() + pool = SSHConnectionPool( self.ssh_client, self.timeout ) @@ -103,5 +112,5 @@ class SSHAdapter(requests.adapters.HTTPAdapter): return pool def close(self): - self.pools.clear() + super(SSHHTTPAdapter, self).close() self.ssh_client.close() diff --git a/docker/transport/ssladapter.py b/docker/transport/ssladapter.py index 8fafec35..12de76cd 100644 --- a/docker/transport/ssladapter.py +++ b/docker/transport/ssladapter.py @@ -7,6 +7,8 @@ import sys from distutils.version import StrictVersion from requests.adapters import HTTPAdapter +from docker.transport.basehttpadapter import BaseHTTPAdapter + try: import requests.packages.urllib3 as urllib3 except ImportError: @@ -22,7 +24,7 @@ if sys.version_info[0] < 3 or sys.version_info[1] < 5: urllib3.connection.match_hostname = match_hostname -class SSLAdapter(HTTPAdapter): +class SSLHTTPAdapter(BaseHTTPAdapter): '''An HTTPS Transport Adapter that uses an arbitrary SSL version.''' __attrs__ = HTTPAdapter.__attrs__ + ['assert_fingerprint', @@ -34,7 +36,7 @@ class SSLAdapter(HTTPAdapter): self.ssl_version = ssl_version self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint - super(SSLAdapter, self).__init__(**kwargs) + super(SSLHTTPAdapter, self).__init__(**kwargs) def init_poolmanager(self, connections, maxsize, block=False): kwargs = { @@ -57,7 +59,7 @@ class SSLAdapter(HTTPAdapter): But we still need to take care of when there is a proxy poolmanager """ - conn = super(SSLAdapter, self).get_connection(*args, **kwargs) + conn = super(SSLHTTPAdapter, self).get_connection(*args, **kwargs) if conn.assert_hostname != self.assert_hostname: conn.assert_hostname = self.assert_hostname return conn diff --git a/docker/transport/unixconn.py b/docker/transport/unixconn.py index c59821a8..b6191032 100644 --- a/docker/transport/unixconn.py +++ b/docker/transport/unixconn.py @@ -3,6 +3,7 @@ import requests.adapters import socket from six.moves import http_client as httplib +from docker.transport.basehttpadapter import BaseHTTPAdapter from .. import constants try: @@ -69,7 +70,7 @@ class UnixHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool): ) -class UnixAdapter(requests.adapters.HTTPAdapter): +class UnixHTTPAdapter(BaseHTTPAdapter): __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + ['pools', 'socket_path', @@ -85,7 +86,7 @@ class UnixAdapter(requests.adapters.HTTPAdapter): self.pools = RecentlyUsedContainer( pool_connections, dispose_func=lambda p: p.close() ) - super(UnixAdapter, self).__init__() + super(UnixHTTPAdapter, self).__init__() def get_connection(self, url, proxies=None): with self.pools.lock: @@ -107,6 +108,3 @@ class UnixAdapter(requests.adapters.HTTPAdapter): # anyway, we simply return the path URL directly. # See also: https://github.com/docker/docker-py/issues/811 return request.path_url - - def close(self): - self.pools.clear() From 55ffb761bffd8b6382332faf3e3375817706c690 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 13 Dec 2018 14:53:09 -0800 Subject: [PATCH 11/56] Terminate support for Python 3.3 (EOL in 2018) Signed-off-by: Joffrey F --- requirements.txt | 5 ++--- setup.py | 6 +----- test-requirements.txt | 6 ++---- 3 files changed, 5 insertions(+), 12 deletions(-) diff --git a/requirements.txt b/requirements.txt index f1c9bdbc..461bf530 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,8 +2,7 @@ appdirs==1.4.3 asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.10.0 -cryptography==1.9; python_version == '3.3' -cryptography==2.3; python_version > '3.3' +cryptography==2.3 docker-pycreds==0.4.0 enum34==1.1.6 idna==2.5 @@ -17,5 +16,5 @@ pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' requests==2.20.0 six==1.10.0 +urllib3==1.24.1 websocket-client==0.40.0 -urllib3==1.21.1; python_version == '3.3' \ No newline at end of file diff --git a/setup.py b/setup.py index 94fbdf44..677bc204 100644 --- a/setup.py +++ b/setup.py @@ -29,9 +29,6 @@ extras_require = { ':sys_platform == "win32" and python_version < "3.6"': 'pypiwin32==219', ':sys_platform == "win32" and python_version >= "3.6"': 'pypiwin32==223', - # urllib3 drops support for Python 3.3 in 1.23 - ':python_version == "3.3"': 'urllib3 < 1.23', - # If using docker-py over TLS, highly recommend this option is # pip-installed or pinned. @@ -75,7 +72,7 @@ setup( install_requires=requirements, tests_require=test_requirements, extras_require=extras_require, - python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*', + python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', zip_safe=False, test_suite='tests', classifiers=[ @@ -87,7 +84,6 @@ setup( 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', diff --git a/test-requirements.txt b/test-requirements.txt index 510fa295..df369881 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -2,8 +2,6 @@ coverage==4.5.2 flake8==3.6.0; python_version != '3.3' flake8==3.4.1; python_version == '3.3' mock==1.0.1 -pytest==2.9.1; python_version == '3.3' -pytest==4.1.0; python_version != '3.3' -pytest-cov==2.6.1; python_version != '3.3' -pytest-cov==2.5.1; python_version == '3.3' +pytest==4.1.0 +pytest-cov==2.6.1 pytest-timeout==1.3.3 From 5d76e8e13ea85dc583c805120db349a96917f312 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Mon, 18 Mar 2019 15:34:36 +0100 Subject: [PATCH 12/56] Support sctp as protocol Signed-off-by: Hannes Ljungberg --- docker/api/container.py | 7 ++++--- docker/models/containers.py | 4 ++-- docker/utils/ports.py | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 83e96577..60691817 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -915,9 +915,10 @@ class ContainerApiMixin(object): if '/' in private_port: return port_settings.get(private_port) - h_ports = port_settings.get(private_port + '/tcp') - if h_ports is None: - h_ports = port_settings.get(private_port + '/udp') + for protocol in ['tcp', 'udp', 'sctp']: + h_ports = port_settings.get(private_port + '/' + protocol) + if h_ports: + break return h_ports diff --git a/docker/models/containers.py b/docker/models/containers.py index 089e78c7..effa1073 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -649,8 +649,8 @@ class ContainerCollection(Collection): The keys of the dictionary are the ports to bind inside the container, either as an integer or a string in the form - ``port/protocol``, where the protocol is either ``tcp`` or - ``udp``. + ``port/protocol``, where the protocol is either ``tcp``, + ``udp``, or ``sctp``. The values of the dictionary are the corresponding ports to open on the host, which can be either: diff --git a/docker/utils/ports.py b/docker/utils/ports.py index cf5987c9..a50cc029 100644 --- a/docker/utils/ports.py +++ b/docker/utils/ports.py @@ -7,7 +7,7 @@ PORT_SPEC = re.compile( r"(?P[\d]*)(-(?P[\d]+))?:" # External range ")?" r"(?P[\d]+)(-(?P[\d]+))?" # Internal range - "(?P/(udp|tcp))?" # Protocol + "(?P/(udp|tcp|sctp))?" # Protocol "$" # Match full string ) From 35714c46b10e9900c127341e5a91cabddbb4271d Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Mon, 18 Mar 2019 15:35:22 +0100 Subject: [PATCH 13/56] Test all split_port with all valid protocols Signed-off-by: Hannes Ljungberg --- tests/unit/utils_test.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index a4e9c9c5..3cb3be91 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -491,9 +491,12 @@ class PortsTest(unittest.TestCase): assert external_port == [("127.0.0.1", "1000")] def test_split_port_with_protocol(self): - internal_port, external_port = split_port("127.0.0.1:1000:2000/udp") - assert internal_port == ["2000/udp"] - assert external_port == [("127.0.0.1", "1000")] + for protocol in ['tcp', 'udp', 'sctp']: + internal_port, external_port = split_port( + "127.0.0.1:1000:2000/" + protocol + ) + assert internal_port == ["2000/" + protocol] + assert external_port == [("127.0.0.1", "1000")] def test_split_port_with_host_ip_no_port(self): internal_port, external_port = split_port("127.0.0.1::2000") @@ -546,6 +549,10 @@ class PortsTest(unittest.TestCase): with pytest.raises(ValueError): split_port("0.0.0.0:1000:2000:tcp") + def test_split_port_invalid_protocol(self): + with pytest.raises(ValueError): + split_port("0.0.0.0:1000:2000/ftp") + def test_non_matching_length_port_ranges(self): with pytest.raises(ValueError): split_port("0.0.0.0:1000-1010:2000-2002/tcp") From 7143cf02abece116df63de8a63956cfff185177c Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Mon, 18 Mar 2019 15:36:02 +0100 Subject: [PATCH 14/56] Test port lookup with protocols Signed-off-by: Hannes Ljungberg --- tests/integration/api_container_test.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 83df3424..eb3fd661 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1083,11 +1083,17 @@ class PortTest(BaseAPIIntegrationTest): port_bindings = { '1111': ('127.0.0.1', '4567'), - '2222': ('127.0.0.1', '4568') + '2222': ('127.0.0.1', '4568'), + '3333/udp': ('127.0.0.1', '4569'), } + ports = [ + 1111, + 2222, + (3333, 'udp'), + ] container = self.client.create_container( - BUSYBOX, ['sleep', '60'], ports=list(port_bindings.keys()), + BUSYBOX, ['sleep', '60'], ports=ports, host_config=self.client.create_host_config( port_bindings=port_bindings, network_mode='bridge' ) @@ -1098,13 +1104,15 @@ class PortTest(BaseAPIIntegrationTest): # Call the port function on each biding and compare expected vs actual for port in port_bindings: + port, _, protocol = port.partition('/') actual_bindings = self.client.port(container, port) port_binding = actual_bindings.pop() ip, host_port = port_binding['HostIp'], port_binding['HostPort'] - assert ip == port_bindings[port][0] - assert host_port == port_bindings[port][1] + port_binding = port if not protocol else port + "/" + protocol + assert ip == port_bindings[port_binding][0] + assert host_port == port_bindings[port_binding][1] self.client.kill(id) From 0f7af860d8df01d1c614b20d687ff6d0393d6938 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Mon, 18 Mar 2019 15:42:54 +0100 Subject: [PATCH 15/56] Fix BaseHTTPAdapter for the SSL case Signed-off-by: Ulysses Souza --- docker/transport/basehttpadapter.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/transport/basehttpadapter.py b/docker/transport/basehttpadapter.py index d10c115b..4d819b66 100644 --- a/docker/transport/basehttpadapter.py +++ b/docker/transport/basehttpadapter.py @@ -3,4 +3,6 @@ import requests.adapters class BaseHTTPAdapter(requests.adapters.HTTPAdapter): def close(self): - self.pools.clear() + super(BaseHTTPAdapter, self).close() + if hasattr(self, 'pools'): + self.pools.clear() From 729c2e783079a9c2948318c70fe7aa22681f1ebe Mon Sep 17 00:00:00 2001 From: Yincen Xia Date: Tue, 19 Mar 2019 21:27:13 +0800 Subject: [PATCH 16/56] Update doc for container.exec_run & exec_api about demux Signed-off-by: Yincen Xia --- docker/api/exec_api.py | 3 ++- docker/models/containers.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docker/api/exec_api.py b/docker/api/exec_api.py index d13b1289..4c49ac33 100644 --- a/docker/api/exec_api.py +++ b/docker/api/exec_api.py @@ -137,7 +137,8 @@ class ExecApiMixin(object): (generator or str or tuple): If ``stream=True``, a generator yielding response chunks. If ``socket=True``, a socket object for the connection. A string containing response data otherwise. If - ``demux=True``, stdout and stderr are separated. + ``demux=True``, a tuple with two elements of type byte: stdout and + stderr. Raises: :py:class:`docker.errors.APIError` diff --git a/docker/models/containers.py b/docker/models/containers.py index 089e78c7..502251d5 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -173,9 +173,10 @@ class Container(Model): exit_code: (int): Exit code for the executed command or ``None`` if either ``stream```or ``socket`` is ``True``. - output: (generator or bytes): + output: (generator, bytes, or tuple): If ``stream=True``, a generator yielding response chunks. If ``socket=True``, a socket object for the connection. + If ``demux=True``, a tuple of two bytes: stdout and stderr. A bytestring containing response data otherwise. Raises: From 5d69a0a62e9294bdcedfa9cc6d6d4d1f7c4fe961 Mon Sep 17 00:00:00 2001 From: Barry Shapira Date: Tue, 11 Dec 2018 22:06:59 -0800 Subject: [PATCH 17/56] Added arguments to creeate a swarm with a custom address pool and subnet size. Signed-off-by: Barry Shapira --- docker/api/swarm.py | 9 +++++++++ docker/models/swarm.py | 10 ++++++++++ tests/integration/api_swarm_test.py | 23 +++++++++++++++++++++++ 3 files changed, 42 insertions(+) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 04595da1..bec3efdf 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -82,6 +82,7 @@ class SwarmApiMixin(object): @utils.minimum_version('1.24') def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377', + default_addr_pool=[], subnet_size=24, force_new_cluster=False, swarm_spec=None): """ Initialize a new Swarm using the current connected engine as the first @@ -102,6 +103,12 @@ class SwarmApiMixin(object): or an interface followed by a port number, like ``eth0:4567``. If the port number is omitted, the default swarm listening port is used. Default: '0.0.0.0:2377' + default_addr_pool (list of strings): Default Address Pool specifies + default subnet pools for global scope networks. Each pool + should be specified as a CIDR block, like '10.0.0.0/16'. + Default: [] + subnet_size (int): SubnetSize specifies the subnet size of the + networks created from the default subnet pool. Default: 24 force_new_cluster (bool): Force creating a new Swarm, even if already part of one. Default: False swarm_spec (dict): Configuration settings of the new Swarm. Use @@ -122,6 +129,8 @@ class SwarmApiMixin(object): data = { 'AdvertiseAddr': advertise_addr, 'ListenAddr': listen_addr, + 'DefaultAddrPool': default_addr_pool, + 'SubnetSize': subnet_size, 'ForceNewCluster': force_new_cluster, 'Spec': swarm_spec, } diff --git a/docker/models/swarm.py b/docker/models/swarm.py index 3a02ae37..e39e6f35 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -34,6 +34,7 @@ class Swarm(Model): get_unlock_key.__doc__ = APIClient.get_unlock_key.__doc__ def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', + default_addr_pool=[], subnet_size=24, force_new_cluster=False, **kwargs): """ Initialize a new swarm on this Engine. @@ -54,6 +55,12 @@ class Swarm(Model): or an interface followed by a port number, like ``eth0:4567``. If the port number is omitted, the default swarm listening port is used. Default: ``0.0.0.0:2377`` + default_addr_pool (list of str): Default Address Pool specifies + default subnet pools for global scope networks. Each pool + should be specified as a CIDR block, like '10.0.0.0/16'. + Default: [] + subnet_size (int): SubnetSize specifies the subnet size of the + networks created from the default subnet pool. Default: 24 force_new_cluster (bool): Force creating a new Swarm, even if already part of one. Default: False task_history_retention_limit (int): Maximum number of tasks @@ -99,6 +106,7 @@ class Swarm(Model): >>> client.swarm.init( advertise_addr='eth0', listen_addr='0.0.0.0:5000', + default_addr_pool=['10.20.0.0/16], subnet_size=24, force_new_cluster=False, snapshot_interval=5000, log_entries_for_slow_followers=1200 ) @@ -107,6 +115,8 @@ class Swarm(Model): init_kwargs = { 'advertise_addr': advertise_addr, 'listen_addr': listen_addr, + 'default_addr_pool': default_addr_pool, + 'subnet_size': subnet_size, 'force_new_cluster': force_new_cluster } init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index b58dabc6..5ef651d2 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -35,6 +35,29 @@ class SwarmTest(BaseAPIIntegrationTest): version_2 = self.client.inspect_swarm()['Version']['Index'] assert version_2 != version_1 + @requires_api_version('1.39') + def test_init_swarm_custom_addr_pool(self): + assert self.init_swarm() + results_1 = self.client.inspect_swarm() + assert results_1['DefaultAddrPool'] is None + assert results_1['SubnetSize'] == 24 + + assert self.init_swarm(default_addr_pool=['2.0.0.0/16'], + force_new_cluster=True) + results_2 = self.client.inspect_swarm() + assert set(results_2['DefaultAddrPool']) == ( + {'2.0.0.0/16'} + ) + assert results_2['SubnetSize'] == 24 + + assert self.init_swarm(default_addr_pool=['2.0.0.0/16', '3.0.0.0/16'], + subnet_size=28, force_new_cluster=True) + results_3 = self.client.inspect_swarm() + assert set(results_3['DefaultAddrPool']) == ( + {'2.0.0.0/16', '3.0.0.0/16'} + ) + assert results_3['SubnetSize'] == 28 + @requires_api_version('1.24') def test_init_already_in_cluster(self): assert self.init_swarm() From 781dc30ad425286ede981d639647cae6afd1a2e9 Mon Sep 17 00:00:00 2001 From: Barry Shapira Date: Fri, 14 Dec 2018 07:30:55 +0000 Subject: [PATCH 18/56] Check API version before setting swarm addr pool. Also corrected a documentation error: the default API version from constants is currently 1.35, not 1.30 as was sometimes listed. Signed-off-by: Barry Shapira Removed accidental whitespace. Signed-off-by: Barry Shapira --- docker/api/swarm.py | 27 ++++++++++++++++++++++++--- docker/constants.py | 3 +++ docker/models/swarm.py | 6 +++--- tests/integration/api_swarm_test.py | 23 ++++++++++++++--------- 4 files changed, 44 insertions(+), 15 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index bec3efdf..4a39782a 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -1,5 +1,6 @@ import logging from six.moves import http_client +from ..constants import DEFAULT_SWARM_ADDR_POOL, DEFAULT_SWARM_SUBNET_SIZE from .. import errors from .. import types from .. import utils @@ -82,7 +83,7 @@ class SwarmApiMixin(object): @utils.minimum_version('1.24') def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - default_addr_pool=[], subnet_size=24, + default_addr_pool=None, subnet_size=None, force_new_cluster=False, swarm_spec=None): """ Initialize a new Swarm using the current connected engine as the first @@ -106,9 +107,9 @@ class SwarmApiMixin(object): default_addr_pool (list of strings): Default Address Pool specifies default subnet pools for global scope networks. Each pool should be specified as a CIDR block, like '10.0.0.0/16'. - Default: [] + Default: None subnet_size (int): SubnetSize specifies the subnet size of the - networks created from the default subnet pool. Default: 24 + networks created from the default subnet pool. Default: None force_new_cluster (bool): Force creating a new Swarm, even if already part of one. Default: False swarm_spec (dict): Configuration settings of the new Swarm. Use @@ -124,8 +125,28 @@ class SwarmApiMixin(object): """ url = self._url('/swarm/init') + if swarm_spec is not None and not isinstance(swarm_spec, dict): raise TypeError('swarm_spec must be a dictionary') + + if default_addr_pool is not None: + if utils.version_lt(self._version, '1.39'): + raise errors.InvalidVersion( + 'Address pool is only available for API version >= 1.39' + ) + # subnet_size becomes 0 if not set with default_addr_pool + if subnet_size is None: + subnet_size = DEFAULT_SWARM_SUBNET_SIZE + + if subnet_size is not None: + if utils.version_lt(self._version, '1.39'): + raise errors.InvalidVersion( + 'Subnet size is only available for API version >= 1.39' + ) + # subnet_size is ignored if set without default_addr_pool + if default_addr_pool is None: + default_addr_pool = DEFAULT_SWARM_ADDR_POOL + data = { 'AdvertiseAddr': advertise_addr, 'ListenAddr': listen_addr, diff --git a/docker/constants.py b/docker/constants.py index dcba0de2..4b96e1ce 100644 --- a/docker/constants.py +++ b/docker/constants.py @@ -25,3 +25,6 @@ DEFAULT_NUM_POOLS = 25 DEFAULT_NUM_POOLS_SSH = 9 DEFAULT_DATA_CHUNK_SIZE = 1024 * 2048 + +DEFAULT_SWARM_ADDR_POOL = ['10.0.0.0/8'] +DEFAULT_SWARM_SUBNET_SIZE = 24 diff --git a/docker/models/swarm.py b/docker/models/swarm.py index e39e6f35..1106ce26 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -34,7 +34,7 @@ class Swarm(Model): get_unlock_key.__doc__ = APIClient.get_unlock_key.__doc__ def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - default_addr_pool=[], subnet_size=24, + default_addr_pool=None, subnet_size=None, force_new_cluster=False, **kwargs): """ Initialize a new swarm on this Engine. @@ -58,9 +58,9 @@ class Swarm(Model): default_addr_pool (list of str): Default Address Pool specifies default subnet pools for global scope networks. Each pool should be specified as a CIDR block, like '10.0.0.0/16'. - Default: [] + Default: None subnet_size (int): SubnetSize specifies the subnet size of the - networks created from the default subnet pool. Default: 24 + networks created from the default subnet pool. Default: None force_new_cluster (bool): Force creating a new Swarm, even if already part of one. Default: False task_history_retention_limit (int): Maximum number of tasks diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index 5ef651d2..41fae578 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -37,26 +37,31 @@ class SwarmTest(BaseAPIIntegrationTest): @requires_api_version('1.39') def test_init_swarm_custom_addr_pool(self): + # test defaults assert self.init_swarm() results_1 = self.client.inspect_swarm() - assert results_1['DefaultAddrPool'] is None + assert set(results_1['DefaultAddrPool']) == {'10.0.0.0/8'} assert results_1['SubnetSize'] == 24 - + # test addr pool alone assert self.init_swarm(default_addr_pool=['2.0.0.0/16'], force_new_cluster=True) results_2 = self.client.inspect_swarm() - assert set(results_2['DefaultAddrPool']) == ( - {'2.0.0.0/16'} - ) + assert set(results_2['DefaultAddrPool']) == {'2.0.0.0/16'} assert results_2['SubnetSize'] == 24 - + # test subnet size alone + assert self.init_swarm(subnet_size=26, + force_new_cluster=True) + results_3 = self.client.inspect_swarm() + assert set(results_3['DefaultAddrPool']) == {'10.0.0.0/8'} + assert results_3['SubnetSize'] == 26 + # test both arguments together assert self.init_swarm(default_addr_pool=['2.0.0.0/16', '3.0.0.0/16'], subnet_size=28, force_new_cluster=True) - results_3 = self.client.inspect_swarm() - assert set(results_3['DefaultAddrPool']) == ( + results_4 = self.client.inspect_swarm() + assert set(results_4['DefaultAddrPool']) == ( {'2.0.0.0/16', '3.0.0.0/16'} ) - assert results_3['SubnetSize'] == 28 + assert results_4['SubnetSize'] == 28 @requires_api_version('1.24') def test_init_already_in_cluster(self): From d6cc972cd9955b1aadd373391673314f79e82679 Mon Sep 17 00:00:00 2001 From: Barry Shapira Date: Thu, 3 Jan 2019 17:31:06 -0800 Subject: [PATCH 19/56] Split monolithic integration tests into individual tests. The integration tests require restarting the swarm once for each test. I had done so manually with self.init_swarm(force_new_cluster=True) but that wasn't resetting the swarm state correctly. The usual test teardown procedure cleans up correctly. Signed-off-by: Barry Shapira --- tests/integration/api_swarm_test.py | 49 +++++++++++++++-------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index 41fae578..37f5fa79 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -36,32 +36,33 @@ class SwarmTest(BaseAPIIntegrationTest): assert version_2 != version_1 @requires_api_version('1.39') - def test_init_swarm_custom_addr_pool(self): - # test defaults + def test_init_swarm_custom_addr_pool_defaults(self): assert self.init_swarm() - results_1 = self.client.inspect_swarm() - assert set(results_1['DefaultAddrPool']) == {'10.0.0.0/8'} - assert results_1['SubnetSize'] == 24 - # test addr pool alone - assert self.init_swarm(default_addr_pool=['2.0.0.0/16'], - force_new_cluster=True) - results_2 = self.client.inspect_swarm() - assert set(results_2['DefaultAddrPool']) == {'2.0.0.0/16'} - assert results_2['SubnetSize'] == 24 - # test subnet size alone - assert self.init_swarm(subnet_size=26, - force_new_cluster=True) - results_3 = self.client.inspect_swarm() - assert set(results_3['DefaultAddrPool']) == {'10.0.0.0/8'} - assert results_3['SubnetSize'] == 26 - # test both arguments together + results = self.client.inspect_swarm() + assert set(results['DefaultAddrPool']) == {'10.0.0.0/8'} + assert results['SubnetSize'] == 24 + + @requires_api_version('1.39') + def test_init_swarm_custom_addr_pool_only_pool(self): + assert self.init_swarm(default_addr_pool=['2.0.0.0/16']) + results = self.client.inspect_swarm() + assert set(results['DefaultAddrPool']) == {'2.0.0.0/16'} + assert results['SubnetSize'] == 24 + + @requires_api_version('1.39') + def test_init_swarm_custom_addr_pool_only_subnet_size(self): + assert self.init_swarm(subnet_size=26) + results = self.client.inspect_swarm() + assert set(results['DefaultAddrPool']) == {'10.0.0.0/8'} + assert results['SubnetSize'] == 26 + + @requires_api_version('1.39') + def test_init_swarm_custom_addr_pool_both_args(self): assert self.init_swarm(default_addr_pool=['2.0.0.0/16', '3.0.0.0/16'], - subnet_size=28, force_new_cluster=True) - results_4 = self.client.inspect_swarm() - assert set(results_4['DefaultAddrPool']) == ( - {'2.0.0.0/16', '3.0.0.0/16'} - ) - assert results_4['SubnetSize'] == 28 + subnet_size=28) + results = self.client.inspect_swarm() + assert set(results['DefaultAddrPool']) == {'2.0.0.0/16', '3.0.0.0/16'} + assert results['SubnetSize'] == 28 @requires_api_version('1.24') def test_init_already_in_cluster(self): From 68a271cef4e2afe881d5c4dfe18a97496dc3adb0 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Fri, 22 Mar 2019 16:55:10 +0100 Subject: [PATCH 20/56] Fix documentation and order of arguments Following https://github.com/docker/docker-py/pull/2201#pullrequestreview-192571911 Signed-off-by: Hannes Ljungberg Co-authored-by: Hannes Ljungberg Co-authored-by: bluikko <14869000+bluikko@users.noreply.github.com> --- docker/api/swarm.py | 17 ++++++++--------- docker/models/swarm.py | 18 +++++++++--------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 4a39782a..e7db5e29 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -83,8 +83,8 @@ class SwarmApiMixin(object): @utils.minimum_version('1.24') def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - default_addr_pool=None, subnet_size=None, - force_new_cluster=False, swarm_spec=None): + force_new_cluster=False, swarm_spec=None, + default_addr_pool=None, subnet_size=None): """ Initialize a new Swarm using the current connected engine as the first node. @@ -104,17 +104,17 @@ class SwarmApiMixin(object): or an interface followed by a port number, like ``eth0:4567``. If the port number is omitted, the default swarm listening port is used. Default: '0.0.0.0:2377' - default_addr_pool (list of strings): Default Address Pool specifies - default subnet pools for global scope networks. Each pool - should be specified as a CIDR block, like '10.0.0.0/16'. - Default: None - subnet_size (int): SubnetSize specifies the subnet size of the - networks created from the default subnet pool. Default: None force_new_cluster (bool): Force creating a new Swarm, even if already part of one. Default: False swarm_spec (dict): Configuration settings of the new Swarm. Use ``APIClient.create_swarm_spec`` to generate a valid configuration. Default: None + default_addr_pool (list of strings): Default Address Pool specifies + default subnet pools for global scope networks. Each pool + should be specified as a CIDR block, like '10.0.0.0/8'. + Default: None + subnet_size (int): SubnetSize specifies the subnet size of the + networks created from the default subnet pool. Default: None Returns: ``True`` if successful. @@ -125,7 +125,6 @@ class SwarmApiMixin(object): """ url = self._url('/swarm/init') - if swarm_spec is not None and not isinstance(swarm_spec, dict): raise TypeError('swarm_spec must be a dictionary') diff --git a/docker/models/swarm.py b/docker/models/swarm.py index 1106ce26..cb27467d 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -34,8 +34,8 @@ class Swarm(Model): get_unlock_key.__doc__ = APIClient.get_unlock_key.__doc__ def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', - default_addr_pool=None, subnet_size=None, - force_new_cluster=False, **kwargs): + force_new_cluster=False, default_addr_pool=None, + subnet_size=None, **kwargs): """ Initialize a new swarm on this Engine. @@ -55,14 +55,14 @@ class Swarm(Model): or an interface followed by a port number, like ``eth0:4567``. If the port number is omitted, the default swarm listening port is used. Default: ``0.0.0.0:2377`` + force_new_cluster (bool): Force creating a new Swarm, even if + already part of one. Default: False default_addr_pool (list of str): Default Address Pool specifies default subnet pools for global scope networks. Each pool - should be specified as a CIDR block, like '10.0.0.0/16'. + should be specified as a CIDR block, like '10.0.0.0/8'. Default: None subnet_size (int): SubnetSize specifies the subnet size of the networks created from the default subnet pool. Default: None - force_new_cluster (bool): Force creating a new Swarm, even if - already part of one. Default: False task_history_retention_limit (int): Maximum number of tasks history stored. snapshot_interval (int): Number of logs entries between snapshot. @@ -106,8 +106,8 @@ class Swarm(Model): >>> client.swarm.init( advertise_addr='eth0', listen_addr='0.0.0.0:5000', - default_addr_pool=['10.20.0.0/16], subnet_size=24, - force_new_cluster=False, snapshot_interval=5000, + force_new_cluster=False, default_addr_pool=['10.20.0.0/16], + subnet_size=24, snapshot_interval=5000, log_entries_for_slow_followers=1200 ) @@ -115,9 +115,9 @@ class Swarm(Model): init_kwargs = { 'advertise_addr': advertise_addr, 'listen_addr': listen_addr, + 'force_new_cluster': force_new_cluster, 'default_addr_pool': default_addr_pool, - 'subnet_size': subnet_size, - 'force_new_cluster': force_new_cluster + 'subnet_size': subnet_size } init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs) self.client.api.init_swarm(**init_kwargs) From 523371e21d41e5afdb800aadec123853b8c37f2b Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Sat, 23 Mar 2019 20:57:14 +0100 Subject: [PATCH 21/56] Move volume_driver to RUN_HOST_CONFIG_KWARGS Fixes #2271 Signed-off-by: Hannes Ljungberg --- docker/models/containers.py | 2 +- tests/integration/models_containers_test.py | 10 ++++++++++ tests/unit/models_containers_test.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 502251d5..6659e6cf 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -964,7 +964,6 @@ RUN_CREATE_KWARGS = [ 'tty', 'use_config_proxy', 'user', - 'volume_driver', 'working_dir', ] @@ -1028,6 +1027,7 @@ RUN_HOST_CONFIG_KWARGS = [ 'userns_mode', 'uts_mode', 'version', + 'volume_driver', 'volumes_from', 'runtime' ] diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 92eca36d..cbc57461 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -378,3 +378,13 @@ class ContainerTest(BaseIntegrationTest): detach=True) self.tmp_containers.append(container.id) assert container.wait()['StatusCode'] == 1 + + def test_create_with_volume_driver(self): + client = docker.from_env(version=TEST_API_VERSION) + container = client.containers.create( + 'alpine', + 'sleep 300', + volume_driver='foo' + ) + self.tmp_containers.append(container.id) + assert container.attrs['HostConfig']['VolumeDriver'] == 'foo' diff --git a/tests/unit/models_containers_test.py b/tests/unit/models_containers_test.py index f44e3658..da5f0ab9 100644 --- a/tests/unit/models_containers_test.py +++ b/tests/unit/models_containers_test.py @@ -176,6 +176,7 @@ class ContainerCollectionTest(unittest.TestCase): 'Ulimits': [{"Name": "nofile", "Soft": 1024, "Hard": 2048}], 'UsernsMode': 'host', 'UTSMode': 'host', + 'VolumeDriver': 'some_driver', 'VolumesFrom': ['container'], }, healthcheck={'test': 'true'}, @@ -190,7 +191,6 @@ class ContainerCollectionTest(unittest.TestCase): stop_signal=9, tty=True, user='bob', - volume_driver='some_driver', volumes=[ '/mnt/vol2', '/mnt/vol1', From 8f42dd14841c43aa8081fe67c9af305391e4952b Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 19 Mar 2019 17:38:24 +0100 Subject: [PATCH 22/56] Avoid race condition on short execution - Add a sleep of 2 seconds to be sure the logs can be requested before the daemon removes the container when run with auto_remove=True Signed-off-by: Ulysses Souza --- tests/integration/models_containers_test.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 92eca36d..872f753e 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -123,7 +123,9 @@ class ContainerCollectionTest(BaseIntegrationTest): def test_run_with_auto_remove(self): client = docker.from_env(version=TEST_API_VERSION) out = client.containers.run( - 'alpine', 'echo hello', auto_remove=True + # sleep(2) to allow any communication with the container + # before it gets removed by the host. + 'alpine', 'sh -c "echo hello && sleep 2"', auto_remove=True ) assert out == b'hello\n' @@ -132,7 +134,10 @@ class ContainerCollectionTest(BaseIntegrationTest): client = docker.from_env(version=TEST_API_VERSION) with pytest.raises(docker.errors.ContainerError) as e: client.containers.run( - 'alpine', 'sh -c ">&2 echo error && exit 1"', auto_remove=True + # sleep(2) to allow any communication with the container + # before it gets removed by the host. + 'alpine', 'sh -c ">&2 echo error && sleep 2 && exit 1"', + auto_remove=True ) assert e.value.exit_status == 1 assert e.value.stderr is None From 15862eacbf863cb7371a6629a9ab951bc05d86a3 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 15:15:40 +0100 Subject: [PATCH 23/56] Xfail test_attach_stream_and_cancel on TLS This test is quite flaky on ssl integration test Signed-off-by: Ulysses Souza --- tests/integration/api_container_test.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index eb3fd661..5a8ba5a8 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1080,7 +1080,6 @@ class KillTest(BaseAPIIntegrationTest): class PortTest(BaseAPIIntegrationTest): def test_port(self): - port_bindings = { '1111': ('127.0.0.1', '4567'), '2222': ('127.0.0.1', '4568'), @@ -1268,6 +1267,9 @@ class AttachContainerTest(BaseAPIIntegrationTest): @pytest.mark.timeout(5) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') + @pytest.mark.xfail(condition=os.environ.get('DOCKER_TLS_VERIFY') or + os.environ.get('DOCKER_CERT_PATH'), + reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( BUSYBOX, 'sh -c "echo hello && sleep 60"', From b2175c9475b0c3bffd268768136fa30fba8ecf96 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 12:07:25 +0100 Subject: [PATCH 24/56] Fix base_url to keep TCP protocol This fix lets the responsability of changing the protocol to `parse_host` afterwards, letting `base_url` with the original value. Signed-off-by: Ulysses Souza --- docker/utils/utils.py | 4 +--- tests/unit/utils_test.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/docker/utils/utils.py b/docker/utils/utils.py index 61e307ad..7819ace4 100644 --- a/docker/utils/utils.py +++ b/docker/utils/utils.py @@ -352,9 +352,7 @@ def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None): params = {} if host: - params['base_url'] = ( - host.replace('tcp://', 'https://') if enable_tls else host - ) + params['base_url'] = host if not enable_tls: return params diff --git a/tests/unit/utils_test.py b/tests/unit/utils_test.py index 3cb3be91..d9cb0028 100644 --- a/tests/unit/utils_test.py +++ b/tests/unit/utils_test.py @@ -11,6 +11,7 @@ import unittest from docker.api.client import APIClient +from docker.constants import IS_WINDOWS_PLATFORM from docker.errors import DockerException from docker.utils import ( convert_filters, convert_volume_binds, decode_json_header, kwargs_from_env, @@ -83,15 +84,17 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='1') kwargs = kwargs_from_env(assert_hostname=False) - assert 'https://192.168.59.103:2376' == kwargs['base_url'] + assert 'tcp://192.168.59.103:2376' == kwargs['base_url'] assert 'ca.pem' in kwargs['tls'].ca_cert assert 'cert.pem' in kwargs['tls'].cert[0] assert 'key.pem' in kwargs['tls'].cert[1] assert kwargs['tls'].assert_hostname is False assert kwargs['tls'].verify + + parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) try: client = APIClient(**kwargs) - assert kwargs['base_url'] == client.base_url + assert parsed_host == client.base_url assert kwargs['tls'].ca_cert == client.verify assert kwargs['tls'].cert == client.cert except TypeError as e: @@ -102,15 +105,16 @@ class KwargsFromEnvTest(unittest.TestCase): DOCKER_CERT_PATH=TEST_CERT_DIR, DOCKER_TLS_VERIFY='') kwargs = kwargs_from_env(assert_hostname=True) - assert 'https://192.168.59.103:2376' == kwargs['base_url'] + assert 'tcp://192.168.59.103:2376' == kwargs['base_url'] assert 'ca.pem' in kwargs['tls'].ca_cert assert 'cert.pem' in kwargs['tls'].cert[0] assert 'key.pem' in kwargs['tls'].cert[1] assert kwargs['tls'].assert_hostname is True assert kwargs['tls'].verify is False + parsed_host = parse_host(kwargs['base_url'], IS_WINDOWS_PLATFORM, True) try: client = APIClient(**kwargs) - assert kwargs['base_url'] == client.base_url + assert parsed_host == client.base_url assert kwargs['tls'].cert == client.cert assert not kwargs['tls'].verify except TypeError as e: From 4890864d65a427847cca8d58b9281e3eaab82994 Mon Sep 17 00:00:00 2001 From: Karl Kuehn Date: Tue, 30 Jan 2018 14:28:37 -0800 Subject: [PATCH 25/56] add ports to containers Signed-off-by: Karl Kuehn --- docker/models/containers.py | 7 +++++++ tests/integration/models_containers_test.py | 22 +++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/docker/models/containers.py b/docker/models/containers.py index 6cd33a61..11d8f0a3 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -62,6 +62,13 @@ class Container(Model): return self.attrs['State']['Status'] return self.attrs['State'] + @property + def ports(self): + """ + The ports that the container exposes as a dictionary. + """ + return self.attrs.get('NetworkSettings', {}).get('Ports', {}) + def attach(self, **kwargs): """ Attach to this container. diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index c7d897eb..f0c3083b 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -346,6 +346,28 @@ class ContainerTest(BaseIntegrationTest): 'memory_stats', 'blkio_stats']: assert key in stats + def test_ports(self): + client = docker.from_env(version=TEST_API_VERSION) + target_ports = {'2222/tcp': None} + container = client.containers.run( + "alpine", "sleep 100", detach=True, + ports=target_ports + ) + self.tmp_containers.append(container.id) + container.reload() # required to get auto-assigned ports + actual_ports = container.ports + assert sorted(target_ports.keys()) == sorted(actual_ports.keys()) + for target_client, target_host in target_ports.items(): + for actual_port in actual_ports[target_client]: + actual_keys = sorted(actual_port.keys()) + assert sorted(['HostIp', 'HostPort']) == actual_keys + if target_host is None: + int(actual_port['HostPort']) + elif isinstance(target_host, (list, tuple)): + raise NotImplementedError() + else: + assert actual_port['HostPort'] == target_host.split('/', 1) + def test_stop(self): client = docker.from_env(version=TEST_API_VERSION) container = client.containers.run("alpine", "top", detach=True) From d1f7979f24fbc2ad0d33fbce6399ff60d791eca2 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 17:28:49 +0100 Subject: [PATCH 26/56] Refactor and add tests Signed-off-by: Ulysses Souza --- tests/integration/models_containers_test.py | 54 ++++++++++++++++++--- 1 file changed, 46 insertions(+), 8 deletions(-) diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index f0c3083b..951a08ae 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -346,9 +346,10 @@ class ContainerTest(BaseIntegrationTest): 'memory_stats', 'blkio_stats']: assert key in stats - def test_ports(self): + def test_ports_target_none(self): client = docker.from_env(version=TEST_API_VERSION) - target_ports = {'2222/tcp': None} + ports = None + target_ports = {'2222/tcp': ports} container = client.containers.run( "alpine", "sleep 100", detach=True, ports=target_ports @@ -361,12 +362,49 @@ class ContainerTest(BaseIntegrationTest): for actual_port in actual_ports[target_client]: actual_keys = sorted(actual_port.keys()) assert sorted(['HostIp', 'HostPort']) == actual_keys - if target_host is None: - int(actual_port['HostPort']) - elif isinstance(target_host, (list, tuple)): - raise NotImplementedError() - else: - assert actual_port['HostPort'] == target_host.split('/', 1) + assert target_host is ports + assert int(actual_port['HostPort']) > 0 + client.close() + + def test_ports_target_tuple(self): + client = docker.from_env(version=TEST_API_VERSION) + ports = ('127.0.0.1', 1111) + target_ports = {'2222/tcp': ports} + container = client.containers.run( + "alpine", "sleep 100", detach=True, + ports=target_ports + ) + self.tmp_containers.append(container.id) + container.reload() # required to get auto-assigned ports + actual_ports = container.ports + assert sorted(target_ports.keys()) == sorted(actual_ports.keys()) + for target_client, target_host in target_ports.items(): + for actual_port in actual_ports[target_client]: + actual_keys = sorted(actual_port.keys()) + assert sorted(['HostIp', 'HostPort']) == actual_keys + assert target_host == ports + assert int(actual_port['HostPort']) > 0 + client.close() + + def test_ports_target_list(self): + client = docker.from_env(version=TEST_API_VERSION) + ports = [1234, 4567] + target_ports = {'2222/tcp': ports} + container = client.containers.run( + "alpine", "sleep 100", detach=True, + ports=target_ports + ) + self.tmp_containers.append(container.id) + container.reload() # required to get auto-assigned ports + actual_ports = container.ports + assert sorted(target_ports.keys()) == sorted(actual_ports.keys()) + for target_client, target_host in target_ports.items(): + for actual_port in actual_ports[target_client]: + actual_keys = sorted(actual_port.keys()) + assert sorted(['HostIp', 'HostPort']) == actual_keys + assert target_host == ports + assert int(actual_port['HostPort']) > 0 + client.close() def test_stop(self): client = docker.from_env(version=TEST_API_VERSION) From b0abdac90c7e6aef444368781bcc3df24f69cab0 Mon Sep 17 00:00:00 2001 From: Ian Campbell Date: Thu, 28 Mar 2019 11:42:02 +0000 Subject: [PATCH 27/56] scripts/version.py: Use regex grouping to extract the version The `lstrip` and `rstrip` functions take a set of characters to remove, not a prefix/suffix. Thus `rstrip('-x86_64')` will remove any trailing characters in the string `'-x86_64'` in any order (in effect it strips the suffix matching the regex `[-_x468]*`). So with `18.09.4` it removes the `4` suffix resulting in trying to `int('')` later on: Traceback (most recent call last): File "/src/scripts/versions.py", line 80, in main() File "/src/scripts/versions.py", line 73, in main versions, reverse=True, key=operator.attrgetter('order') File "/src/scripts/versions.py", line 52, in order return (int(self.major), int(self.minor), int(self.patch)) + stage ValueError: invalid literal for int() with base 10: '' Since we no longer need to check for the arch suffix (since it no longer appears in the URLs we are traversing) we could just drop the `rstrip` and invent a local prefix stripping helper to replace `lstrip('docker-')`. Instead lets take advantage of the behaviour of `re.findall` which is that if the regex contains a single `()` match that will be returned. This lets us match exactly the sub-section of the regex we require. While editing the regex, also ensure that the suffix is precisely `.tgz` and not merely `tgz` by adding an explicit `\.`, previously the literal `.` would be swallowed by the `.*` instead. Signed-off-by: Ian Campbell --- scripts/versions.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/scripts/versions.py b/scripts/versions.py index 7ad1d56a..93fe0d7f 100644 --- a/scripts/versions.py +++ b/scripts/versions.py @@ -62,13 +62,9 @@ def main(): for url in [base_url.format(cat) for cat in categories]: res = requests.get(url) content = res.text - versions = [ - Version.parse( - v.strip('"').lstrip('docker-').rstrip('.tgz').rstrip('-x86_64') - ) for v in re.findall( - r'"docker-[0-9]+\.[0-9]+\.[0-9]+-?.*tgz"', content - ) - ] + versions = [Version.parse(v) for v in re.findall( + r'"docker-([0-9]+\.[0-9]+\.[0-9]+)-?.*tgz"', content + )] sorted_versions = sorted( versions, reverse=True, key=operator.attrgetter('order') ) From 0d5aacc464df9765bbc54e9aaaaedb14e51b78f7 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Thu, 28 Mar 2019 11:31:28 +0100 Subject: [PATCH 28/56] Add support for setting init on services Signed-off-by: Hannes Ljungberg --- docker/api/service.py | 4 ++++ docker/models/services.py | 3 +++ docker/types/services.py | 7 ++++++- tests/integration/api_service_test.py | 14 ++++++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) diff --git a/docker/api/service.py b/docker/api/service.py index 02f3380e..372dd10b 100644 --- a/docker/api/service.py +++ b/docker/api/service.py @@ -88,6 +88,10 @@ def _check_api_features(version, task_template, update_config, endpoint_spec, if container_spec.get('Isolation') is not None: raise_version_error('ContainerSpec.isolation', '1.35') + if utils.version_lt(version, '1.38'): + if container_spec.get('Init') is not None: + raise_version_error('ContainerSpec.init', '1.38') + if task_template.get('Resources'): if utils.version_lt(version, '1.32'): if task_template['Resources'].get('GenericResources'): diff --git a/docker/models/services.py b/docker/models/services.py index 5d2bd9b3..2b6479f2 100644 --- a/docker/models/services.py +++ b/docker/models/services.py @@ -165,6 +165,8 @@ class ServiceCollection(Collection): env (list of str): Environment variables, in the form ``KEY=val``. hostname (string): Hostname to set on the container. + init (boolean): Run an init inside the container that forwards + signals and reaps processes isolation (string): Isolation technology used by the service's containers. Only used for Windows containers. labels (dict): Labels to apply to the service. @@ -280,6 +282,7 @@ CONTAINER_SPEC_KWARGS = [ 'hostname', 'hosts', 'image', + 'init', 'isolation', 'labels', 'mounts', diff --git a/docker/types/services.py b/docker/types/services.py index a0721f60..5722b0e3 100644 --- a/docker/types/services.py +++ b/docker/types/services.py @@ -110,13 +110,15 @@ class ContainerSpec(dict): privileges (Privileges): Security options for the service's containers. isolation (string): Isolation technology used by the service's containers. Only used for Windows containers. + init (boolean): Run an init inside the container that forwards signals + and reaps processes. """ def __init__(self, image, command=None, args=None, hostname=None, env=None, workdir=None, user=None, labels=None, mounts=None, stop_grace_period=None, secrets=None, tty=None, groups=None, open_stdin=None, read_only=None, stop_signal=None, healthcheck=None, hosts=None, dns_config=None, configs=None, - privileges=None, isolation=None): + privileges=None, isolation=None, init=None): self['Image'] = image if isinstance(command, six.string_types): @@ -183,6 +185,9 @@ class ContainerSpec(dict): if isolation is not None: self['Isolation'] = isolation + if init is not None: + self['Init'] = init + class Mount(dict): """ diff --git a/tests/integration/api_service_test.py b/tests/integration/api_service_test.py index 57a8d331..71e0869e 100644 --- a/tests/integration/api_service_test.py +++ b/tests/integration/api_service_test.py @@ -850,6 +850,20 @@ class ServiceTest(BaseAPIIntegrationTest): ) assert privileges['SELinuxContext']['Disable'] is True + @requires_api_version('1.38') + def test_create_service_with_init(self): + container_spec = docker.types.ContainerSpec( + 'busybox', ['sleep', '999'], init=True + ) + task_tmpl = docker.types.TaskTemplate(container_spec) + name = self.get_service_name() + svc_id = self.client.create_service(task_tmpl, name=name) + svc_info = self.client.inspect_service(svc_id) + assert 'Init' in svc_info['Spec']['TaskTemplate']['ContainerSpec'] + assert ( + svc_info['Spec']['TaskTemplate']['ContainerSpec']['Init'] is True + ) + @requires_api_version('1.25') def test_update_service_with_defaults_name(self): container_spec = docker.types.ContainerSpec( From 8010d8ba1e0119351a1b5e864ce466882be11bc7 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Sun, 31 Mar 2019 23:17:51 +0200 Subject: [PATCH 29/56] Document correct listen_addr on join swarm Signed-off-by: Hannes Ljungberg --- docker/api/swarm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index e7db5e29..bab91ee4 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -205,7 +205,7 @@ class SwarmApiMixin(object): listen_addr (string): Listen address used for inter-manager communication if the node gets promoted to manager, as well as determining the networking interface used for the VXLAN Tunnel - Endpoint (VTEP). Default: ``None`` + Endpoint (VTEP). Default: ``'0.0.0.0:2377`` advertise_addr (string): Externally reachable address advertised to other nodes. This can either be an address/port combination in the form ``192.168.1.1:4567``, or an interface followed by a From ef043559c4bbd3d1fbc06277160c253fab6df879 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 4 Apr 2019 10:29:36 +0200 Subject: [PATCH 30/56] Add 'sleep 2' to avoid race condition on attach Signed-off-by: Ulysses Souza --- tests/integration/api_container_test.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 5a8ba5a8..b364f94c 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1267,19 +1267,16 @@ class AttachContainerTest(BaseAPIIntegrationTest): @pytest.mark.timeout(5) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') - @pytest.mark.xfail(condition=os.environ.get('DOCKER_TLS_VERIFY') or - os.environ.get('DOCKER_CERT_PATH'), - reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( - BUSYBOX, 'sh -c "echo hello && sleep 60"', + BUSYBOX, 'sh -c "sleep 2 && echo hello && sleep 60"', tty=True ) self.tmp_containers.append(container) self.client.start(container) output = self.client.attach(container, stream=True, logs=True) - threading.Timer(1, output.close).start() + threading.Timer(3, output.close).start() lines = [] for line in output: From 50d475797a3866767289d81a74c1f720662004a5 Mon Sep 17 00:00:00 2001 From: Sebastiaan van Stijn Date: Wed, 10 Apr 2019 02:42:23 +0200 Subject: [PATCH 31/56] Remove init_path from create This option was never functional, and was not intended to be added to the "container create" API, so let's remove it, because it has been removed in Docker 17.05, and was broken in versions before that; see - https://github.com/moby/moby/issues/32355 --init-path does not seem to work - https://github.com/moby/moby/pull/32470 remove --init-path from client Signed-off-by: Sebastiaan van Stijn --- docker/api/container.py | 1 - tests/integration/api_container_test.py | 13 ------------- 2 files changed, 14 deletions(-) diff --git a/docker/api/container.py b/docker/api/container.py index 60691817..94f53ff2 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -487,7 +487,6 @@ class ContainerApiMixin(object): IDs that the container process will run as. init (bool): Run an init inside the container that forwards signals and reaps processes - init_path (str): Path to the docker-init binary ipc_mode (str): Set the IPC mode for the container. isolation (str): Isolation technology to use. Default: ``None``. links (dict): Mapping of links using the diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index b364f94c..730c9eeb 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -448,19 +448,6 @@ class CreateContainerTest(BaseAPIIntegrationTest): config = self.client.inspect_container(ctnr) assert config['HostConfig']['Init'] is True - @pytest.mark.xfail(True, reason='init-path removed in 17.05.0') - @requires_api_version('1.25') - def test_create_with_init_path(self): - ctnr = self.client.create_container( - BUSYBOX, 'true', - host_config=self.client.create_host_config( - init_path="/usr/libexec/docker-init" - ) - ) - self.tmp_containers.append(ctnr['Id']) - config = self.client.inspect_container(ctnr) - assert config['HostConfig']['InitPath'] == "/usr/libexec/docker-init" - @requires_api_version('1.24') @pytest.mark.xfail(not os.path.exists('/sys/fs/cgroup/cpu.rt_runtime_us'), reason='CONFIG_RT_GROUP_SCHED isn\'t enabled') From 221d64f427e5578faba604dd03376aa52ed08f6d Mon Sep 17 00:00:00 2001 From: Adam Dangoor Date: Sat, 27 Apr 2019 09:14:00 +0100 Subject: [PATCH 32/56] Replace triple backtick in exec_run documentation which caused a rendering error. Signed-off-by: Adam Dangoor --- docker/models/containers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/models/containers.py b/docker/models/containers.py index 11d8f0a3..d321a580 100644 --- a/docker/models/containers.py +++ b/docker/models/containers.py @@ -179,7 +179,7 @@ class Container(Model): (ExecResult): A tuple of (exit_code, output) exit_code: (int): Exit code for the executed command or ``None`` if - either ``stream```or ``socket`` is ``True``. + either ``stream`` or ``socket`` is ``True``. output: (generator, bytes, or tuple): If ``stream=True``, a generator yielding response chunks. If ``socket=True``, a socket object for the connection. From 20a5c067243bb8736595e92addb873f828fb4e1b Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 30 Apr 2019 23:16:09 -0700 Subject: [PATCH 33/56] Fix versions.py to include release stage Signed-off-by: Joffrey F --- scripts/versions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) mode change 100644 => 100755 scripts/versions.py diff --git a/scripts/versions.py b/scripts/versions.py old mode 100644 new mode 100755 index 93fe0d7f..4bdcb74d --- a/scripts/versions.py +++ b/scripts/versions.py @@ -26,8 +26,8 @@ class Version(namedtuple('_Version', 'major minor patch stage edition')): edition = stage stage = None elif '-' in stage: - edition, stage = stage.split('-') - major, minor, patch = version.split('.', 3) + edition, stage = stage.split('-', 1) + major, minor, patch = version.split('.', 2) return cls(major, minor, patch, stage, edition) @property @@ -63,7 +63,7 @@ def main(): res = requests.get(url) content = res.text versions = [Version.parse(v) for v in re.findall( - r'"docker-([0-9]+\.[0-9]+\.[0-9]+)-?.*tgz"', content + r'"docker-([0-9]+\.[0-9]+\.[0-9]+-?.*)\.tgz"', content )] sorted_versions = sorted( versions, reverse=True, key=operator.attrgetter('order') From a823acc2cae10c4635db2fb963cc37d8a23cc0c4 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 9 Jan 2019 16:18:21 -0800 Subject: [PATCH 34/56] Make dockerpycreds part of the SDK under docker.credentials Signed-off-by: Joffrey F --- MANIFEST.in | 1 + Makefile | 6 +- docker/auth.py | 8 +- docker/credentials/__init__.py | 4 + docker/credentials/constants.py | 4 + docker/credentials/errors.py | 25 ++++ docker/credentials/store.py | 107 ++++++++++++++++++ docker/credentials/utils.py | 38 +++++++ requirements.txt | 1 - setup.py | 1 - tests/Dockerfile | 28 +++++ tests/gpg-keys/ownertrust | 3 + tests/gpg-keys/secret | Bin 0 -> 966 bytes tests/integration/credentials/__init__.py | 0 .../integration/credentials/create_gpg_key.sh | 12 ++ tests/integration/credentials/store_test.py | 87 ++++++++++++++ tests/integration/credentials/utils_test.py | 22 ++++ tests/unit/auth_test.py | 7 +- 18 files changed, 341 insertions(+), 13 deletions(-) create mode 100644 docker/credentials/__init__.py create mode 100644 docker/credentials/constants.py create mode 100644 docker/credentials/errors.py create mode 100644 docker/credentials/store.py create mode 100644 docker/credentials/utils.py create mode 100644 tests/Dockerfile create mode 100644 tests/gpg-keys/ownertrust create mode 100644 tests/gpg-keys/secret create mode 100644 tests/integration/credentials/__init__.py create mode 100644 tests/integration/credentials/create_gpg_key.sh create mode 100644 tests/integration/credentials/store_test.py create mode 100644 tests/integration/credentials/utils_test.py diff --git a/MANIFEST.in b/MANIFEST.in index 41b3fa9f..2ba6e027 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -6,3 +6,4 @@ include LICENSE recursive-include tests *.py recursive-include tests/unit/testdata * recursive-include tests/integration/testdata * +recursive-include tests/gpg-keys * diff --git a/Makefile b/Makefile index 434d40e1..8cf2b74d 100644 --- a/Makefile +++ b/Makefile @@ -8,11 +8,11 @@ clean: .PHONY: build build: - docker build -t docker-sdk-python . + docker build -t docker-sdk-python -f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 . .PHONY: build-py3 build-py3: - docker build -t docker-sdk-python3 -f Dockerfile-py3 . + docker build -t docker-sdk-python3 -f tests/Dockerfile . .PHONY: build-docs build-docs: @@ -39,7 +39,7 @@ integration-test: build .PHONY: integration-test-py3 integration-test-py3: build-py3 - docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test tests/integration/${file} + docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test -v tests/integration/${file} TEST_API_VERSION ?= 1.35 TEST_ENGINE_VERSION ?= 17.12.0-ce diff --git a/docker/auth.py b/docker/auth.py index 638ab9b0..5f34ac08 100644 --- a/docker/auth.py +++ b/docker/auth.py @@ -2,9 +2,9 @@ import base64 import json import logging -import dockerpycreds import six +from . import credentials from . import errors from .utils import config @@ -273,17 +273,17 @@ class AuthConfig(dict): 'Password': data['Secret'], }) return res - except dockerpycreds.CredentialsNotFound: + except credentials.CredentialsNotFound: log.debug('No entry found') return None - except dockerpycreds.StoreError as e: + except credentials.StoreError as e: raise errors.DockerException( 'Credentials store error: {0}'.format(repr(e)) ) def _get_store_instance(self, name): if name not in self._stores: - self._stores[name] = dockerpycreds.Store( + self._stores[name] = credentials.Store( name, environment=self._credstore_env ) return self._stores[name] diff --git a/docker/credentials/__init__.py b/docker/credentials/__init__.py new file mode 100644 index 00000000..31ad28e3 --- /dev/null +++ b/docker/credentials/__init__.py @@ -0,0 +1,4 @@ +# flake8: noqa +from .store import Store +from .errors import StoreError, CredentialsNotFound +from .constants import * diff --git a/docker/credentials/constants.py b/docker/credentials/constants.py new file mode 100644 index 00000000..6a82d8da --- /dev/null +++ b/docker/credentials/constants.py @@ -0,0 +1,4 @@ +PROGRAM_PREFIX = 'docker-credential-' +DEFAULT_LINUX_STORE = 'secretservice' +DEFAULT_OSX_STORE = 'osxkeychain' +DEFAULT_WIN32_STORE = 'wincred' diff --git a/docker/credentials/errors.py b/docker/credentials/errors.py new file mode 100644 index 00000000..42a1bc1a --- /dev/null +++ b/docker/credentials/errors.py @@ -0,0 +1,25 @@ +class StoreError(RuntimeError): + pass + + +class CredentialsNotFound(StoreError): + pass + + +class InitializationError(StoreError): + pass + + +def process_store_error(cpe, program): + message = cpe.output.decode('utf-8') + if 'credentials not found in native keychain' in message: + return CredentialsNotFound( + 'No matching credentials in {}'.format( + program + ) + ) + return StoreError( + 'Credentials store {} exited with "{}".'.format( + program, cpe.output.decode('utf-8').strip() + ) + ) diff --git a/docker/credentials/store.py b/docker/credentials/store.py new file mode 100644 index 00000000..3f51e4a7 --- /dev/null +++ b/docker/credentials/store.py @@ -0,0 +1,107 @@ +import json +import os +import subprocess + +import six + +from . import constants +from . import errors +from .utils import create_environment_dict +from .utils import find_executable + + +class Store(object): + def __init__(self, program, environment=None): + """ Create a store object that acts as an interface to + perform the basic operations for storing, retrieving + and erasing credentials using `program`. + """ + self.program = constants.PROGRAM_PREFIX + program + self.exe = find_executable(self.program) + self.environment = environment + if self.exe is None: + raise errors.InitializationError( + '{} not installed or not available in PATH'.format( + self.program + ) + ) + + def get(self, server): + """ Retrieve credentials for `server`. If no credentials are found, + a `StoreError` will be raised. + """ + if not isinstance(server, six.binary_type): + server = server.encode('utf-8') + data = self._execute('get', server) + result = json.loads(data.decode('utf-8')) + + # docker-credential-pass will return an object for inexistent servers + # whereas other helpers will exit with returncode != 0. For + # consistency, if no significant data is returned, + # raise CredentialsNotFound + if result['Username'] == '' and result['Secret'] == '': + raise errors.CredentialsNotFound( + 'No matching credentials in {}'.format(self.program) + ) + + return result + + def store(self, server, username, secret): + """ Store credentials for `server`. Raises a `StoreError` if an error + occurs. + """ + data_input = json.dumps({ + 'ServerURL': server, + 'Username': username, + 'Secret': secret + }).encode('utf-8') + return self._execute('store', data_input) + + def erase(self, server): + """ Erase credentials for `server`. Raises a `StoreError` if an error + occurs. + """ + if not isinstance(server, six.binary_type): + server = server.encode('utf-8') + self._execute('erase', server) + + def list(self): + """ List stored credentials. Requires v0.4.0+ of the helper. + """ + data = self._execute('list', None) + return json.loads(data.decode('utf-8')) + + def _execute(self, subcmd, data_input): + output = None + env = create_environment_dict(self.environment) + try: + if six.PY3: + output = subprocess.check_output( + [self.exe, subcmd], input=data_input, env=env, + ) + else: + process = subprocess.Popen( + [self.exe, subcmd], stdin=subprocess.PIPE, + stdout=subprocess.PIPE, env=env, + ) + output, err = process.communicate(data_input) + if process.returncode != 0: + raise subprocess.CalledProcessError( + returncode=process.returncode, cmd='', output=output + ) + except subprocess.CalledProcessError as e: + raise errors.process_store_error(e, self.program) + except OSError as e: + if e.errno == os.errno.ENOENT: + raise errors.StoreError( + '{} not installed or not available in PATH'.format( + self.program + ) + ) + else: + raise errors.StoreError( + 'Unexpected OS error "{}", errno={}'.format( + e.strerror, e.errno + ) + ) + return output diff --git a/docker/credentials/utils.py b/docker/credentials/utils.py new file mode 100644 index 00000000..3f720ef1 --- /dev/null +++ b/docker/credentials/utils.py @@ -0,0 +1,38 @@ +import distutils.spawn +import os +import sys + + +def find_executable(executable, path=None): + """ + As distutils.spawn.find_executable, but on Windows, look up + every extension declared in PATHEXT instead of just `.exe` + """ + if sys.platform != 'win32': + return distutils.spawn.find_executable(executable, path) + + if path is None: + path = os.environ['PATH'] + + paths = path.split(os.pathsep) + extensions = os.environ.get('PATHEXT', '.exe').split(os.pathsep) + base, ext = os.path.splitext(executable) + + if not os.path.isfile(executable): + for p in paths: + for ext in extensions: + f = os.path.join(p, base + ext) + if os.path.isfile(f): + return f + return None + else: + return executable + + +def create_environment_dict(overrides): + """ + Create and return a copy of os.environ with the specified overrides + """ + result = os.environ.copy() + result.update(overrides or {}) + return result diff --git a/requirements.txt b/requirements.txt index 461bf530..eb66c9f5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,6 @@ asn1crypto==0.22.0 backports.ssl-match-hostname==3.5.0.1 cffi==1.10.0 cryptography==2.3 -docker-pycreds==0.4.0 enum34==1.1.6 idna==2.5 ipaddress==1.0.18 diff --git a/setup.py b/setup.py index 677bc204..3e1afcbe 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,6 @@ SOURCE_DIR = os.path.join(ROOT_DIR) requirements = [ 'six >= 1.4.0', 'websocket-client >= 0.32.0', - 'docker-pycreds >= 0.4.0', 'requests >= 2.14.2, != 2.18.0', ] diff --git a/tests/Dockerfile b/tests/Dockerfile new file mode 100644 index 00000000..042fc703 --- /dev/null +++ b/tests/Dockerfile @@ -0,0 +1,28 @@ +ARG PYTHON_VERSION=3.6 +FROM python:$PYTHON_VERSION-jessie +RUN apt-get update && apt-get -y install \ + gnupg2 \ + pass \ + curl + +COPY ./tests/gpg-keys /gpg-keys +RUN gpg2 --import gpg-keys/secret +RUN gpg2 --import-ownertrust gpg-keys/ownertrust +RUN yes | pass init $(gpg2 --no-auto-check-trustdb --list-secret-keys | grep ^sec | cut -d/ -f2 | cut -d" " -f1) +RUN gpg2 --check-trustdb +ARG CREDSTORE_VERSION=v0.6.0 +RUN curl -sSL -o /opt/docker-credential-pass.tar.gz \ + https://github.com/docker/docker-credential-helpers/releases/download/$CREDSTORE_VERSION/docker-credential-pass-$CREDSTORE_VERSION-amd64.tar.gz && \ + tar -xf /opt/docker-credential-pass.tar.gz -O > /usr/local/bin/docker-credential-pass && \ + rm -rf /opt/docker-credential-pass.tar.gz && \ + chmod +x /usr/local/bin/docker-credential-pass + +WORKDIR /src +COPY requirements.txt /src/requirements.txt +RUN pip install -r requirements.txt + +COPY test-requirements.txt /src/test-requirements.txt +RUN pip install -r test-requirements.txt + +COPY . /src +RUN pip install . diff --git a/tests/gpg-keys/ownertrust b/tests/gpg-keys/ownertrust new file mode 100644 index 00000000..141ea57e --- /dev/null +++ b/tests/gpg-keys/ownertrust @@ -0,0 +1,3 @@ +# List of assigned trustvalues, created Wed 25 Apr 2018 01:28:17 PM PDT +# (Use "gpg --import-ownertrust" to restore them) +9781B87DAB042E6FD51388A5464ED987A7B21401:6: diff --git a/tests/gpg-keys/secret b/tests/gpg-keys/secret new file mode 100644 index 0000000000000000000000000000000000000000..412294db8492a86a109545e31888b5b230017b4d GIT binary patch literal 966 zcmV;%13CPa0lNfR;MwsJ1OT9MDfJwS(eFF4u%|6O+V;@nXyh}N3JQBjI;zcUW-XT9 zaG7iCNTsErINPbwhKkhqBEL7K%)>|l%|76`yGw3PkhkWv4a3 zE`6J5cQlHWX7-66CA@P)lWU%iSfo}pZkc13MBQb^3>ts8S&yoPUuJd5Mx$jnZAeL7 z0zAcmWp+hXz~f4lPQ{t3ERNjVp!@|-+Tf5|795qpWtFb$+R+36ipYm_E{{YIj=r~P zhDgerPA@%P9tyu^R;9JfI%G&`CN3RPT++*Iok+3wqDJ1MI#nH8bX*ba7E;mD3oKWx z4F)RmD9X8Gek8e@9)D0vFD16xns4w9V?I0;7-YhszQ6_J9jFZstH=sIsFyTOh7(9 zAUtznYjt^HKxbucb8l;TZ!T(ZK8Ruj6A=OcA_W3k;Mws48zTk_2?z%R0tOWb0tpHW z1Qr4V0RkQY0vCV)3JDNKPT7a2vJ?SJ!vLNlQ2c^gnajF&6@3oWB3pxV$5IRcpLjI| zaM)KtsV+Ogrz|M%1rjc@)SUq`1X|$P@el+6*?$94?5D}G;(n*OHUX4mi&Uz(Ekblp zx#Z#D2=83>X=T>HpnuCrx0%>I6aHAJdUBQ5OT?3M@xWi{Qhp7n_^wx*ZKjl?)arA# z!%d7}b^_>O3L)$l(zL3NZ3NBX|2Y=a1RpjR*Z0_r`V_7_DtROw1Yd~QdJwNW-&Y_f z00RXB{0M;!W|?O7&g9-Kp;{9790`zvjA5%QL168tfB>R_(~>4ekzQsQyfA| zp#lMz98D=hpP1>ygj2Rt)a9rmuw9RtI<#`f)c^qc0%I1YGSqC3F1t6W2UVO?<>3=n zob?W;+amTvGA{v(4#|i~1Q-zl00{*GTHx970vikf3JDNKPT7a2vJ?UJWB{P14n<>s o>?00~Fi*{Ffy<{9 literal 0 HcmV?d00001 diff --git a/tests/integration/credentials/__init__.py b/tests/integration/credentials/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/credentials/create_gpg_key.sh b/tests/integration/credentials/create_gpg_key.sh new file mode 100644 index 00000000..b276c20d --- /dev/null +++ b/tests/integration/credentials/create_gpg_key.sh @@ -0,0 +1,12 @@ +#!/usr/bin/sh +haveged +gpg --batch --gen-key <<-EOF +%echo Generating a standard key +Key-Type: DSA +Key-Length: 1024 +Subkey-Type: ELG-E +Subkey-Length: 1024 +Name-Real: Sakuya Izayoi +Name-Email: sakuya@gensokyo.jp +Expire-Date: 0 +EOF \ No newline at end of file diff --git a/tests/integration/credentials/store_test.py b/tests/integration/credentials/store_test.py new file mode 100644 index 00000000..dd543e24 --- /dev/null +++ b/tests/integration/credentials/store_test.py @@ -0,0 +1,87 @@ +import os +import random +import sys + +import pytest +import six +from distutils.spawn import find_executable + +from docker.credentials import ( + CredentialsNotFound, Store, StoreError, DEFAULT_LINUX_STORE, + DEFAULT_OSX_STORE +) + + +class TestStore(object): + def teardown_method(self): + for server in self.tmp_keys: + try: + self.store.erase(server) + except StoreError: + pass + + def setup_method(self): + self.tmp_keys = [] + if sys.platform.startswith('linux'): + if find_executable('docker-credential-' + DEFAULT_LINUX_STORE): + self.store = Store(DEFAULT_LINUX_STORE) + elif find_executable('docker-credential-pass'): + self.store = Store('pass') + else: + raise Exception('No supported docker-credential store in PATH') + elif sys.platform.startswith('darwin'): + self.store = Store(DEFAULT_OSX_STORE) + + def get_random_servername(self): + res = 'pycreds_test_{:x}'.format(random.getrandbits(32)) + self.tmp_keys.append(res) + return res + + def test_store_and_get(self): + key = self.get_random_servername() + self.store.store(server=key, username='user', secret='pass') + data = self.store.get(key) + assert data == { + 'ServerURL': key, + 'Username': 'user', + 'Secret': 'pass' + } + + def test_get_nonexistent(self): + key = self.get_random_servername() + with pytest.raises(CredentialsNotFound): + self.store.get(key) + + def test_store_and_erase(self): + key = self.get_random_servername() + self.store.store(server=key, username='user', secret='pass') + self.store.erase(key) + with pytest.raises(CredentialsNotFound): + self.store.get(key) + + def test_unicode_strings(self): + key = self.get_random_servername() + key = six.u(key) + self.store.store(server=key, username='user', secret='pass') + data = self.store.get(key) + assert data + self.store.erase(key) + with pytest.raises(CredentialsNotFound): + self.store.get(key) + + def test_list(self): + names = (self.get_random_servername(), self.get_random_servername()) + self.store.store(names[0], username='sakuya', secret='izayoi') + self.store.store(names[1], username='reimu', secret='hakurei') + data = self.store.list() + assert names[0] in data + assert data[names[0]] == 'sakuya' + assert names[1] in data + assert data[names[1]] == 'reimu' + + def test_execute_with_env_override(self): + self.store.exe = 'env' + self.store.environment = {'FOO': 'bar'} + data = self.store._execute('--null', '') + assert b'\0FOO=bar\0' in data + assert 'FOO' not in os.environ diff --git a/tests/integration/credentials/utils_test.py b/tests/integration/credentials/utils_test.py new file mode 100644 index 00000000..ad55f321 --- /dev/null +++ b/tests/integration/credentials/utils_test.py @@ -0,0 +1,22 @@ +import os + +from docker.credentials.utils import create_environment_dict + +try: + from unittest import mock +except ImportError: + import mock + + +@mock.patch.dict(os.environ) +def test_create_environment_dict(): + base = {'FOO': 'bar', 'BAZ': 'foobar'} + os.environ = base + assert create_environment_dict({'FOO': 'baz'}) == { + 'FOO': 'baz', 'BAZ': 'foobar', + } + assert create_environment_dict({'HELLO': 'world'}) == { + 'FOO': 'bar', 'BAZ': 'foobar', 'HELLO': 'world', + } + + assert os.environ == base diff --git a/tests/unit/auth_test.py b/tests/unit/auth_test.py index dc4d6f59..d46da503 100644 --- a/tests/unit/auth_test.py +++ b/tests/unit/auth_test.py @@ -9,8 +9,7 @@ import shutil import tempfile import unittest -from docker import auth, errors -import dockerpycreds +from docker import auth, credentials, errors import pytest try: @@ -661,7 +660,7 @@ class CredstoreTest(unittest.TestCase): } -class InMemoryStore(dockerpycreds.Store): +class InMemoryStore(credentials.Store): def __init__(self, *args, **kwargs): self.__store = {} @@ -669,7 +668,7 @@ class InMemoryStore(dockerpycreds.Store): try: return self.__store[server] except KeyError: - raise dockerpycreds.errors.CredentialsNotFound() + raise credentials.errors.CredentialsNotFound() def store(self, server, username, secret): self.__store[server] = { From 4c45067df9a71181b8cc090ef562f48842ded80b Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 9 Jan 2019 16:27:55 -0800 Subject: [PATCH 35/56] New Jenkinsfile build instructions Signed-off-by: Joffrey F --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 8724c10f..e618c5dd 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -24,8 +24,8 @@ def buildImages = { -> imageNamePy2 = "${imageNameBase}:py2-${gitCommit()}" imageNamePy3 = "${imageNameBase}:py3-${gitCommit()}" - buildImage(imageNamePy2, ".", "py2.7") - buildImage(imageNamePy3, "-f Dockerfile-py3 .", "py3.6") + buildImage(imageNamePy2, "-f tests/Dockerfile --build-arg PYTHON_VERSION=2.7 .", "py2.7") + buildImage(imageNamePy3, "-f tests/Dockerfile --build-arg PYTHON_VERSION=3.6 .", "py3.6") } } } From b06e437da89ec391d72de2158f40f2b1a37dbc43 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Tue, 30 Apr 2019 23:47:09 -0700 Subject: [PATCH 36/56] Avoid demux test flakiness Signed-off-by: Joffrey F --- tests/integration/api_exec_test.py | 48 +++++++++++++----------------- 1 file changed, 21 insertions(+), 27 deletions(-) diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index e6079eb3..602e69a3 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -1,10 +1,10 @@ +from docker.utils.proxy import ProxyConfig from docker.utils.socket import next_frame_header from docker.utils.socket import read_exactly -from docker.utils.proxy import ProxyConfig -from .base import BaseAPIIntegrationTest, BUSYBOX +from .base import BUSYBOX, BaseAPIIntegrationTest from ..helpers import ( - requires_api_version, ctrl_with, assert_cat_socket_detached_with_keys + assert_cat_socket_detached_with_keys, ctrl_with, requires_api_version, ) @@ -125,9 +125,6 @@ class ExecTest(BaseAPIIntegrationTest): script = ' ; '.join([ # Write something on stdout 'echo hello out', - # Busybox's sleep does not handle sub-second times. - # This loops takes ~0.3 second to execute on my machine. - 'for i in $(seq 1 50000); do echo $i>/dev/null; done', # Write something on stderr 'echo hello err >&2']) cmd = 'sh -c "{}"'.format(script) @@ -135,15 +132,15 @@ class ExecTest(BaseAPIIntegrationTest): # tty=False, stream=False, demux=False res = self.client.exec_create(id, cmd) exec_log = self.client.exec_start(res) - assert exec_log == b'hello out\nhello err\n' + assert 'hello out\n' in exec_log + assert 'hello err\n' in exec_log # tty=False, stream=True, demux=False res = self.client.exec_create(id, cmd) - exec_log = self.client.exec_start(res, stream=True) - assert next(exec_log) == b'hello out\n' - assert next(exec_log) == b'hello err\n' - with self.assertRaises(StopIteration): - next(exec_log) + exec_log = list(self.client.exec_start(res, stream=True)) + assert len(exec_log) == 2 + assert 'hello out\n' in exec_log + assert 'hello err\n' in exec_log # tty=False, stream=False, demux=True res = self.client.exec_create(id, cmd) @@ -152,11 +149,10 @@ class ExecTest(BaseAPIIntegrationTest): # tty=False, stream=True, demux=True res = self.client.exec_create(id, cmd) - exec_log = self.client.exec_start(res, demux=True, stream=True) - assert next(exec_log) == (b'hello out\n', None) - assert next(exec_log) == (None, b'hello err\n') - with self.assertRaises(StopIteration): - next(exec_log) + exec_log = list(self.client.exec_start(res, demux=True, stream=True)) + assert len(exec_log) == 2 + assert (b'hello out\n', None) in exec_log + assert (None, b'hello err\n') in exec_log # tty=True, stream=False, demux=False res = self.client.exec_create(id, cmd, tty=True) @@ -165,11 +161,10 @@ class ExecTest(BaseAPIIntegrationTest): # tty=True, stream=True, demux=False res = self.client.exec_create(id, cmd, tty=True) - exec_log = self.client.exec_start(res, stream=True) - assert next(exec_log) == b'hello out\r\n' - assert next(exec_log) == b'hello err\r\n' - with self.assertRaises(StopIteration): - next(exec_log) + exec_log = list(self.client.exec_start(res, stream=True)) + assert len(exec_log) == 2 + assert 'hello out\r\n' in exec_log + assert 'hello err\r\n' in exec_log # tty=True, stream=False, demux=True res = self.client.exec_create(id, cmd, tty=True) @@ -178,11 +173,10 @@ class ExecTest(BaseAPIIntegrationTest): # tty=True, stream=True, demux=True res = self.client.exec_create(id, cmd, tty=True) - exec_log = self.client.exec_start(res, demux=True, stream=True) - assert next(exec_log) == (b'hello out\r\n', None) - assert next(exec_log) == (b'hello err\r\n', None) - with self.assertRaises(StopIteration): - next(exec_log) + exec_log = list(self.client.exec_start(res, demux=True, stream=True)) + assert len(exec_log) == 2 + assert (b'hello out\r\n', None) in exec_log + assert (b'hello err\r\n', None) in exec_log def test_exec_start_socket(self): container = self.client.create_container(BUSYBOX, 'cat', From 073a21c28a11c980ec43018b12677fbcecbdc90d Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 00:20:40 -0700 Subject: [PATCH 37/56] Separate into individual tests Signed-off-by: Joffrey F --- tests/integration/api_exec_test.py | 140 ++++++++++++++++------------- 1 file changed, 77 insertions(+), 63 deletions(-) diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index 602e69a3..b9310d65 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -115,69 +115,6 @@ class ExecTest(BaseAPIIntegrationTest): res += chunk assert res == b'hello\nworld\n' - def test_exec_command_demux(self): - container = self.client.create_container( - BUSYBOX, 'cat', detach=True, stdin_open=True) - id = container['Id'] - self.client.start(id) - self.tmp_containers.append(id) - - script = ' ; '.join([ - # Write something on stdout - 'echo hello out', - # Write something on stderr - 'echo hello err >&2']) - cmd = 'sh -c "{}"'.format(script) - - # tty=False, stream=False, demux=False - res = self.client.exec_create(id, cmd) - exec_log = self.client.exec_start(res) - assert 'hello out\n' in exec_log - assert 'hello err\n' in exec_log - - # tty=False, stream=True, demux=False - res = self.client.exec_create(id, cmd) - exec_log = list(self.client.exec_start(res, stream=True)) - assert len(exec_log) == 2 - assert 'hello out\n' in exec_log - assert 'hello err\n' in exec_log - - # tty=False, stream=False, demux=True - res = self.client.exec_create(id, cmd) - exec_log = self.client.exec_start(res, demux=True) - assert exec_log == (b'hello out\n', b'hello err\n') - - # tty=False, stream=True, demux=True - res = self.client.exec_create(id, cmd) - exec_log = list(self.client.exec_start(res, demux=True, stream=True)) - assert len(exec_log) == 2 - assert (b'hello out\n', None) in exec_log - assert (None, b'hello err\n') in exec_log - - # tty=True, stream=False, demux=False - res = self.client.exec_create(id, cmd, tty=True) - exec_log = self.client.exec_start(res) - assert exec_log == b'hello out\r\nhello err\r\n' - - # tty=True, stream=True, demux=False - res = self.client.exec_create(id, cmd, tty=True) - exec_log = list(self.client.exec_start(res, stream=True)) - assert len(exec_log) == 2 - assert 'hello out\r\n' in exec_log - assert 'hello err\r\n' in exec_log - - # tty=True, stream=False, demux=True - res = self.client.exec_create(id, cmd, tty=True) - exec_log = self.client.exec_start(res, demux=True) - assert exec_log == (b'hello out\r\nhello err\r\n', None) - - # tty=True, stream=True, demux=True - res = self.client.exec_create(id, cmd, tty=True) - exec_log = list(self.client.exec_start(res, demux=True, stream=True)) - assert len(exec_log) == 2 - assert (b'hello out\r\n', None) in exec_log - assert (b'hello err\r\n', None) in exec_log - def test_exec_start_socket(self): container = self.client.create_container(BUSYBOX, 'cat', detach=True, stdin_open=True) @@ -307,3 +244,80 @@ class ExecTest(BaseAPIIntegrationTest): self.addCleanup(sock.close) assert_cat_socket_detached_with_keys(sock, [ctrl_with('x')]) + + +class ExecDemuxTest(BaseAPIIntegrationTest): + cmd = 'sh -c "{}"'.format(' ; '.join([ + # Write something on stdout + 'echo hello out', + # Busybox's sleep does not handle sub-second times. + # This loops takes ~0.3 second to execute on my machine. + 'for i in $(seq 1 50000); do echo $i>/dev/null; done', + # Write something on stderr + 'echo hello err >&2']) + ) + + def setUp(self): + super(ExecDemuxTest, self).setUp() + self.container = self.client.create_container( + BUSYBOX, 'cat', detach=True, stdin_open=True + ) + self.client.start(self.container) + self.tmp_containers.append(self.container) + + def test_exec_command_no_stream_no_demux(self): + # tty=False, stream=False, demux=False + res = self.client.exec_create(self.container, self.cmd) + exec_log = self.client.exec_start(res) + assert b'hello out\n' in exec_log + assert b'hello err\n' in exec_log + + def test_exec_command_stream_no_demux(self): + # tty=False, stream=True, demux=False + res = self.client.exec_create(self.container, self.cmd) + exec_log = list(self.client.exec_start(res, stream=True)) + assert len(exec_log) == 2 + assert b'hello out\n' in exec_log + assert b'hello err\n' in exec_log + + def test_exec_command_no_stream_demux(self): + # tty=False, stream=False, demux=True + res = self.client.exec_create(self.container, self.cmd) + exec_log = self.client.exec_start(res, demux=True) + assert exec_log == (b'hello out\n', b'hello err\n') + + def test_exec_command_stream_demux(self): + # tty=False, stream=True, demux=True + res = self.client.exec_create(self.container, self.cmd) + exec_log = list(self.client.exec_start(res, demux=True, stream=True)) + assert len(exec_log) == 2 + assert (b'hello out\n', None) in exec_log + assert (None, b'hello err\n') in exec_log + + def test_exec_command_tty_no_stream_no_demux(self): + # tty=True, stream=False, demux=False + res = self.client.exec_create(self.container, self.cmd, tty=True) + exec_log = self.client.exec_start(res) + assert exec_log == b'hello out\r\nhello err\r\n' + + def test_exec_command_tty_stream_no_demux(self): + # tty=True, stream=True, demux=False + res = self.client.exec_create(self.container, self.cmd, tty=True) + exec_log = list(self.client.exec_start(res, stream=True)) + assert len(exec_log) == 2 + assert b'hello out\r\n' in exec_log + assert b'hello err\r\n' in exec_log + + def test_exec_command_tty_no_stream_demux(self): + # tty=True, stream=False, demux=True + res = self.client.exec_create(self.container, self.cmd, tty=True) + exec_log = self.client.exec_start(res, demux=True) + assert exec_log == (b'hello out\r\nhello err\r\n', None) + + def test_exec_command_tty_stream_demux(self): + # tty=True, stream=True, demux=True + res = self.client.exec_create(self.container, self.cmd, tty=True) + exec_log = list(self.client.exec_start(res, demux=True, stream=True)) + assert len(exec_log) == 2 + assert (b'hello out\r\n', None) in exec_log + assert (b'hello err\r\n', None) in exec_log From 2e67cd1cc7ec4b00afadb9609bb235e3a2f3a0e3 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 22 Apr 2019 18:01:11 -0700 Subject: [PATCH 38/56] Improve socket_detached test helper to support future versions of the daemon Signed-off-by: Joffrey F --- tests/helpers.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index f912bd8d..9e5d2ab4 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -119,13 +119,18 @@ def assert_cat_socket_detached_with_keys(sock, inputs): # If we're using a Unix socket, the sock.send call will fail with a # BrokenPipeError ; INET sockets will just stop receiving / sending data # but will not raise an error - if getattr(sock, 'family', -9) == getattr(socket, 'AF_UNIX', -1): - with pytest.raises(socket.error): - sock.sendall(b'make sure the socket is closed\n') - elif isinstance(sock, paramiko.Channel): + if isinstance(sock, paramiko.Channel): with pytest.raises(OSError): sock.sendall(b'make sure the socket is closed\n') else: + if getattr(sock, 'family', -9) == getattr(socket, 'AF_UNIX', -1): + # We do not want to use pytest.raises here because future versions + # of the daemon no longer cause this to raise an error. + try: + sock.sendall(b'make sure the socket is closed\n') + except socket.error: + return + sock.sendall(b"make sure the socket is closed\n") data = sock.recv(128) # New in 18.06: error message is broadcast over the socket when reading From a2a2d100e841b7bb37b9c3d805ca25260f0b3bda Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Mon, 22 Apr 2019 18:03:19 -0700 Subject: [PATCH 39/56] Reorder imports Signed-off-by: Joffrey F --- tests/helpers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/helpers.py b/tests/helpers.py index 9e5d2ab4..f344e1c3 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -2,16 +2,16 @@ import functools import os import os.path import random +import re +import socket import tarfile import tempfile import time -import re -import six -import socket import docker import paramiko import pytest +import six def make_tree(dirs, files): From 62c8bcbbb600cbe26e3e12ed95207ffe63c40fc8 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 00:47:16 -0700 Subject: [PATCH 40/56] Increase timeout on test with long sleeps Signed-off-by: Joffrey F --- tests/integration/api_container_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 730c9eeb..9b770c71 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1251,7 +1251,7 @@ class AttachContainerTest(BaseAPIIntegrationTest): output = self.client.attach(container, stream=False, logs=True) assert output == 'hello\n'.encode(encoding='ascii') - @pytest.mark.timeout(5) + @pytest.mark.timeout(10) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') def test_attach_stream_and_cancel(self): From 34ffc5686546343eaa27d1fb8f9432237bdd1886 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 00:59:35 -0700 Subject: [PATCH 41/56] Streaming TTY messages sometimes get truncated. Handle gracefully in demux tests Signed-off-by: Joffrey F --- tests/integration/api_exec_test.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index b9310d65..c7e7799b 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -304,9 +304,13 @@ class ExecDemuxTest(BaseAPIIntegrationTest): # tty=True, stream=True, demux=False res = self.client.exec_create(self.container, self.cmd, tty=True) exec_log = list(self.client.exec_start(res, stream=True)) - assert len(exec_log) == 2 assert b'hello out\r\n' in exec_log - assert b'hello err\r\n' in exec_log + if len(exec_log) == 2: + assert b'hello err\r\n' in exec_log + else: + assert len(exec_log) == 3 + assert b'hello err' in exec_log + assert b'\r\n' in exec_log def test_exec_command_tty_no_stream_demux(self): # tty=True, stream=False, demux=True @@ -318,6 +322,10 @@ class ExecDemuxTest(BaseAPIIntegrationTest): # tty=True, stream=True, demux=True res = self.client.exec_create(self.container, self.cmd, tty=True) exec_log = list(self.client.exec_start(res, demux=True, stream=True)) - assert len(exec_log) == 2 assert (b'hello out\r\n', None) in exec_log - assert (b'hello err\r\n', None) in exec_log + if len(exec_log) == 2: + assert (b'hello err\r\n', None) in exec_log + else: + assert len(exec_log) == 3 + assert (b'hello err', None) in exec_log + assert (b'\r\n', None) in exec_log From 1a4881acd9d8b84135ce9d71ff01325308d8a6b0 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 01:15:41 -0700 Subject: [PATCH 42/56] Improve low_timeout test resilience Signed-off-by: Joffrey F --- tests/integration/api_container_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 9b770c71..df405ef9 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -18,7 +18,7 @@ import six from .base import BUSYBOX, BaseAPIIntegrationTest from .. import helpers from ..helpers import ( - requires_api_version, ctrl_with, assert_cat_socket_detached_with_keys + assert_cat_socket_detached_with_keys, ctrl_with, requires_api_version, ) @@ -1163,10 +1163,10 @@ class RestartContainerTest(BaseAPIIntegrationTest): def test_restart_with_low_timeout(self): container = self.client.create_container(BUSYBOX, ['sleep', '9999']) self.client.start(container) - self.client.timeout = 1 - self.client.restart(container, timeout=3) + self.client.timeout = 3 + self.client.restart(container, timeout=1) self.client.timeout = None - self.client.restart(container, timeout=3) + self.client.restart(container, timeout=1) self.client.kill(container) def test_restart_with_dict_instead_of_id(self): From acd7a8f43056007d8ae5df3d8156c34b837d97b1 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Thu, 28 Mar 2019 10:48:22 +0100 Subject: [PATCH 43/56] Return node id on swarm init Signed-off-by: Hannes Ljungberg --- docker/api/swarm.py | 5 ++--- docker/models/swarm.py | 6 +++--- tests/integration/api_swarm_test.py | 4 +++- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index bab91ee4..ea4c1e71 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -117,7 +117,7 @@ class SwarmApiMixin(object): networks created from the default subnet pool. Default: None Returns: - ``True`` if successful. + (str): The ID of the created node. Raises: :py:class:`docker.errors.APIError` @@ -155,8 +155,7 @@ class SwarmApiMixin(object): 'Spec': swarm_spec, } response = self._post_json(url, data=data) - self._raise_for_status(response) - return True + return self._result(response, json=True) @utils.minimum_version('1.24') def inspect_swarm(self): diff --git a/docker/models/swarm.py b/docker/models/swarm.py index cb27467d..f78e8e16 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -96,7 +96,7 @@ class Swarm(Model): created in the orchestrator. Returns: - ``True`` if the request went through. + (str): The ID of the created node. Raises: :py:class:`docker.errors.APIError` @@ -120,9 +120,9 @@ class Swarm(Model): 'subnet_size': subnet_size } init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs) - self.client.api.init_swarm(**init_kwargs) + node_id = self.client.api.init_swarm(**init_kwargs) self.reload() - return True + return node_id def join(self, *args, **kwargs): return self.client.api.join_swarm(*args, **kwargs) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index 37f5fa79..94ab2a63 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -186,12 +186,14 @@ class SwarmTest(BaseAPIIntegrationTest): @requires_api_version('1.24') def test_inspect_node(self): - assert self.init_swarm() + node_id = self.init_swarm() + assert node_id nodes_list = self.client.nodes() assert len(nodes_list) == 1 node = nodes_list[0] node_data = self.client.inspect_node(node['ID']) assert node['ID'] == node_data['ID'] + assert node_id == node['ID'] assert node['Version'] == node_data['Version'] @requires_api_version('1.24') From c7b9cae0a0430ec9e9ce95bb872755dfe61d4f87 Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Sun, 31 Mar 2019 23:10:09 +0200 Subject: [PATCH 44/56] Add swarm support for data_addr_path Signed-off-by: Hannes Ljungberg --- docker/api/swarm.py | 35 ++++++++++++++++++++++++----- docker/models/swarm.py | 7 ++++-- tests/integration/api_swarm_test.py | 4 ++++ 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index bab91ee4..0bd6d128 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -84,7 +84,8 @@ class SwarmApiMixin(object): @utils.minimum_version('1.24') def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377', force_new_cluster=False, swarm_spec=None, - default_addr_pool=None, subnet_size=None): + default_addr_pool=None, subnet_size=None, + data_path_addr=None): """ Initialize a new Swarm using the current connected engine as the first node. @@ -115,6 +116,8 @@ class SwarmApiMixin(object): Default: None subnet_size (int): SubnetSize specifies the subnet size of the networks created from the default subnet pool. Default: None + data_path_addr (string): Address or interface to use for data path + traffic. For example, 192.168.1.1, or an interface, like eth0. Returns: ``True`` if successful. @@ -154,6 +157,15 @@ class SwarmApiMixin(object): 'ForceNewCluster': force_new_cluster, 'Spec': swarm_spec, } + + if data_path_addr is not None: + if utils.version_lt(self._version, '1.30'): + raise errors.InvalidVersion( + 'Data address path is only available for ' + 'API version >= 1.30' + ) + data['DataPathAddr'] = data_path_addr + response = self._post_json(url, data=data) self._raise_for_status(response) return True @@ -194,7 +206,7 @@ class SwarmApiMixin(object): @utils.minimum_version('1.24') def join_swarm(self, remote_addrs, join_token, listen_addr='0.0.0.0:2377', - advertise_addr=None): + advertise_addr=None, data_path_addr=None): """ Make this Engine join a swarm that has already been created. @@ -213,6 +225,8 @@ class SwarmApiMixin(object): the port number from the listen address is used. If AdvertiseAddr is not specified, it will be automatically detected when possible. Default: ``None`` + data_path_addr (string): Address or interface to use for data path + traffic. For example, 192.168.1.1, or an interface, like eth0. Returns: ``True`` if the request went through. @@ -222,11 +236,20 @@ class SwarmApiMixin(object): If the server returns an error. """ data = { - "RemoteAddrs": remote_addrs, - "ListenAddr": listen_addr, - "JoinToken": join_token, - "AdvertiseAddr": advertise_addr, + 'RemoteAddrs': remote_addrs, + 'ListenAddr': listen_addr, + 'JoinToken': join_token, + 'AdvertiseAddr': advertise_addr, } + + if data_path_addr is not None: + if utils.version_lt(self._version, '1.30'): + raise errors.InvalidVersion( + 'Data address path is only available for ' + 'API version >= 1.30' + ) + data['DataPathAddr'] = data_path_addr + url = self._url('/swarm/join') response = self._post_json(url, data=data) self._raise_for_status(response) diff --git a/docker/models/swarm.py b/docker/models/swarm.py index cb27467d..386d23d3 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -35,7 +35,7 @@ class Swarm(Model): def init(self, advertise_addr=None, listen_addr='0.0.0.0:2377', force_new_cluster=False, default_addr_pool=None, - subnet_size=None, **kwargs): + subnet_size=None, data_path_addr=None, **kwargs): """ Initialize a new swarm on this Engine. @@ -63,6 +63,8 @@ class Swarm(Model): Default: None subnet_size (int): SubnetSize specifies the subnet size of the networks created from the default subnet pool. Default: None + data_path_addr (string): Address or interface to use for data path + traffic. For example, 192.168.1.1, or an interface, like eth0. task_history_retention_limit (int): Maximum number of tasks history stored. snapshot_interval (int): Number of logs entries between snapshot. @@ -117,7 +119,8 @@ class Swarm(Model): 'listen_addr': listen_addr, 'force_new_cluster': force_new_cluster, 'default_addr_pool': default_addr_pool, - 'subnet_size': subnet_size + 'subnet_size': subnet_size, + 'data_path_addr': data_path_addr, } init_kwargs['swarm_spec'] = self.client.api.create_swarm_spec(**kwargs) self.client.api.init_swarm(**init_kwargs) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index 37f5fa79..5e9aea1e 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -233,3 +233,7 @@ class SwarmTest(BaseAPIIntegrationTest): self.client.remove_node(node_id, True) assert e.value.response.status_code >= 400 + + @requires_api_version('1.30') + def test_init_swarm_data_path_addr(self): + assert self.init_swarm(data_path_addr='eth0') From 110c6769c93cd9c8bf20cc88c520a9f97afc040e Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Sun, 31 Mar 2019 23:10:23 +0200 Subject: [PATCH 45/56] Add test for join on already joined swarm Signed-off-by: Hannes Ljungberg --- tests/integration/models_swarm_test.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/integration/models_swarm_test.py b/tests/integration/models_swarm_test.py index f39f0d34..6c1836dc 100644 --- a/tests/integration/models_swarm_test.py +++ b/tests/integration/models_swarm_test.py @@ -31,3 +31,15 @@ class SwarmTest(unittest.TestCase): cm.value.response.status_code == 406 or cm.value.response.status_code == 503 ) + + def test_join_on_already_joined_swarm(self): + client = docker.from_env(version=TEST_API_VERSION) + client.swarm.init() + join_token = client.swarm.attrs['JoinTokens']['Manager'] + with pytest.raises(docker.errors.APIError) as cm: + client.swarm.join( + remote_addrs=['127.0.0.1'], + join_token=join_token, + ) + assert cm.value.response.status_code == 503 + assert 'This node is already part of a swarm.' in cm.value.explanation From eba8345c3726ea0ea40436507264229bf4ab56d0 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 02:16:42 -0700 Subject: [PATCH 46/56] Update some test dependencies / default values with newer versions Signed-off-by: Joffrey F --- Makefile | 2 +- tests/integration/api_container_test.py | 28 ++++++++++++------------- tests/integration/api_exec_test.py | 20 +++++++++--------- tests/integration/base.py | 5 ++--- 4 files changed, 26 insertions(+), 29 deletions(-) diff --git a/Makefile b/Makefile index 434d40e1..d64e618e 100644 --- a/Makefile +++ b/Makefile @@ -42,7 +42,7 @@ integration-test-py3: build-py3 docker run -t --rm -v /var/run/docker.sock:/var/run/docker.sock docker-sdk-python3 py.test tests/integration/${file} TEST_API_VERSION ?= 1.35 -TEST_ENGINE_VERSION ?= 17.12.0-ce +TEST_ENGINE_VERSION ?= 18.09.5 .PHONY: setup-network setup-network: diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index df405ef9..1190d91e 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -5,21 +5,20 @@ import tempfile import threading from datetime import datetime -import docker -from docker.constants import IS_WINDOWS_PLATFORM -from docker.utils.socket import next_frame_header -from docker.utils.socket import read_exactly - import pytest - import requests import six -from .base import BUSYBOX, BaseAPIIntegrationTest +import docker from .. import helpers -from ..helpers import ( - assert_cat_socket_detached_with_keys, ctrl_with, requires_api_version, -) +from ..helpers import assert_cat_socket_detached_with_keys +from ..helpers import ctrl_with +from ..helpers import requires_api_version +from .base import BaseAPIIntegrationTest +from .base import BUSYBOX +from docker.constants import IS_WINDOWS_PLATFORM +from docker.utils.socket import next_frame_header +from docker.utils.socket import read_exactly class ListContainersTest(BaseAPIIntegrationTest): @@ -38,7 +37,7 @@ class ListContainersTest(BaseAPIIntegrationTest): assert 'Command' in retrieved assert retrieved['Command'] == six.text_type('true') assert 'Image' in retrieved - assert re.search(r'busybox:.*', retrieved['Image']) + assert re.search(r'alpine:.*', retrieved['Image']) assert 'Status' in retrieved @@ -368,10 +367,9 @@ class CreateContainerTest(BaseAPIIntegrationTest): ) self.tmp_containers.append(container['Id']) config = self.client.inspect_container(container['Id']) - assert ( - sorted(config['Config']['Env']) == - sorted(['Foo', 'Other=one', 'Blank=']) - ) + assert 'Foo' in config['Config']['Env'] + assert 'Other=one' in config['Config']['Env'] + assert 'Blank=' in config['Config']['Env'] @requires_api_version('1.22') def test_create_with_tmpfs(self): diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index c7e7799b..80b63ffc 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -1,12 +1,12 @@ +from ..helpers import assert_cat_socket_detached_with_keys +from ..helpers import ctrl_with +from ..helpers import requires_api_version +from .base import BaseAPIIntegrationTest +from .base import BUSYBOX from docker.utils.proxy import ProxyConfig from docker.utils.socket import next_frame_header from docker.utils.socket import read_exactly -from .base import BUSYBOX, BaseAPIIntegrationTest -from ..helpers import ( - assert_cat_socket_detached_with_keys, ctrl_with, requires_api_version, -) - class ExecTest(BaseAPIIntegrationTest): def test_execute_command_with_proxy_env(self): @@ -81,11 +81,11 @@ class ExecTest(BaseAPIIntegrationTest): self.client.start(id) self.tmp_containers.append(id) - res = self.client.exec_create(id, 'whoami', user='default') + res = self.client.exec_create(id, 'whoami', user='postgres') assert 'Id' in res exec_log = self.client.exec_start(res) - assert exec_log == b'default\n' + assert exec_log == b'postgres\n' def test_exec_command_as_root(self): container = self.client.create_container(BUSYBOX, 'cat', @@ -188,9 +188,9 @@ class ExecTest(BaseAPIIntegrationTest): self.tmp_containers.append(container) self.client.start(container) - res = self.client.exec_create(container, 'pwd', workdir='/var/www') + res = self.client.exec_create(container, 'pwd', workdir='/var/opt') exec_log = self.client.exec_start(res) - assert exec_log == b'/var/www\n' + assert exec_log == b'/var/opt\n' def test_detach_with_default(self): container = self.client.create_container( @@ -252,7 +252,7 @@ class ExecDemuxTest(BaseAPIIntegrationTest): 'echo hello out', # Busybox's sleep does not handle sub-second times. # This loops takes ~0.3 second to execute on my machine. - 'for i in $(seq 1 50000); do echo $i>/dev/null; done', + 'sleep 0.5', # Write something on stderr 'echo hello err >&2']) ) diff --git a/tests/integration/base.py b/tests/integration/base.py index 262769de..0ebf5b99 100644 --- a/tests/integration/base.py +++ b/tests/integration/base.py @@ -3,11 +3,10 @@ import shutil import unittest import docker +from .. import helpers from docker.utils import kwargs_from_env -from .. import helpers - -BUSYBOX = 'busybox:buildroot-2014.02' +BUSYBOX = 'alpine:3.9.3' # FIXME: this should probably be renamed TEST_API_VERSION = os.environ.get('DOCKER_TEST_API_VERSION') From 532c62ee51aacbf0416ae2bc2cf53212ad6eb9db Mon Sep 17 00:00:00 2001 From: Hannes Ljungberg Date: Thu, 28 Mar 2019 10:04:18 +0100 Subject: [PATCH 47/56] Add support for rotate_manager_unlock_key Signed-off-by: Hannes Ljungberg --- docker/api/swarm.py | 22 +++++++++++++++++----- docker/models/swarm.py | 8 +++++--- tests/integration/api_swarm_test.py | 13 +++++++++++++ 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/docker/api/swarm.py b/docker/api/swarm.py index 13e4fd5c..897f08e4 100644 --- a/docker/api/swarm.py +++ b/docker/api/swarm.py @@ -406,8 +406,10 @@ class SwarmApiMixin(object): return True @utils.minimum_version('1.24') - def update_swarm(self, version, swarm_spec=None, rotate_worker_token=False, - rotate_manager_token=False): + def update_swarm(self, version, swarm_spec=None, + rotate_worker_token=False, + rotate_manager_token=False, + rotate_manager_unlock_key=False): """ Update the Swarm's configuration @@ -421,6 +423,8 @@ class SwarmApiMixin(object): ``False``. rotate_manager_token (bool): Rotate the manager join token. Default: ``False``. + rotate_manager_unlock_key (bool): Rotate the manager unlock key. + Default: ``False``. Returns: ``True`` if the request went through. @@ -429,12 +433,20 @@ class SwarmApiMixin(object): :py:class:`docker.errors.APIError` If the server returns an error. """ - url = self._url('/swarm/update') - response = self._post_json(url, data=swarm_spec, params={ + params = { 'rotateWorkerToken': rotate_worker_token, 'rotateManagerToken': rotate_manager_token, 'version': version - }) + } + if rotate_manager_unlock_key: + if utils.version_lt(self._version, '1.25'): + raise errors.InvalidVersion( + 'Rotate manager unlock key ' + 'is only available for API version >= 1.25' + ) + params['rotateManagerUnlockKey'] = rotate_manager_unlock_key + + response = self._post_json(url, data=swarm_spec, params=params) self._raise_for_status(response) return True diff --git a/docker/models/swarm.py b/docker/models/swarm.py index f8c5fff6..755c17db 100644 --- a/docker/models/swarm.py +++ b/docker/models/swarm.py @@ -151,7 +151,7 @@ class Swarm(Model): unlock.__doc__ = APIClient.unlock_swarm.__doc__ def update(self, rotate_worker_token=False, rotate_manager_token=False, - **kwargs): + rotate_manager_unlock_key=False, **kwargs): """ Update the swarm's configuration. @@ -164,7 +164,8 @@ class Swarm(Model): ``False``. rotate_manager_token (bool): Rotate the manager join token. Default: ``False``. - + rotate_manager_unlock_key (bool): Rotate the manager unlock key. + Default: ``False``. Raises: :py:class:`docker.errors.APIError` If the server returns an error. @@ -178,5 +179,6 @@ class Swarm(Model): version=self.version, swarm_spec=self.client.api.create_swarm_spec(**kwargs), rotate_worker_token=rotate_worker_token, - rotate_manager_token=rotate_manager_token + rotate_manager_token=rotate_manager_token, + rotate_manager_unlock_key=rotate_manager_unlock_key ) diff --git a/tests/integration/api_swarm_test.py b/tests/integration/api_swarm_test.py index 5d4086a6..bf809bd0 100644 --- a/tests/integration/api_swarm_test.py +++ b/tests/integration/api_swarm_test.py @@ -236,6 +236,19 @@ class SwarmTest(BaseAPIIntegrationTest): assert e.value.response.status_code >= 400 + @requires_api_version('1.25') + def test_rotate_manager_unlock_key(self): + spec = self.client.create_swarm_spec(autolock_managers=True) + assert self.init_swarm(swarm_spec=spec) + swarm_info = self.client.inspect_swarm() + key_1 = self.client.get_unlock_key() + assert self.client.update_swarm( + version=swarm_info['Version']['Index'], + rotate_manager_unlock_key=True + ) + key_2 = self.client.get_unlock_key() + assert key_1['UnlockKey'] != key_2['UnlockKey'] + @requires_api_version('1.30') def test_init_swarm_data_path_addr(self): assert self.init_swarm(data_path_addr='eth0') From 1aae20d13abde8fa4747d5f9c4f5b5c22d534925 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 19:44:23 -0700 Subject: [PATCH 48/56] Remove obsolete win32-requirements file Signed-off-by: Joffrey F --- win32-requirements.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 win32-requirements.txt diff --git a/win32-requirements.txt b/win32-requirements.txt deleted file mode 100644 index bc04b496..00000000 --- a/win32-requirements.txt +++ /dev/null @@ -1 +0,0 @@ --r requirements.txt From bdc954b00963f0b8b6b98f03d7939bd9184d548a Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 19:49:01 -0700 Subject: [PATCH 49/56] Stop supporting EOL Python 3.4 Signed-off-by: Joffrey F --- .travis.yml | 2 -- setup.py | 6 +++--- test-requirements.txt | 3 +-- tox.ini | 2 +- 4 files changed, 5 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1c837a26..577b893f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,8 +4,6 @@ matrix: include: - python: 2.7 env: TOXENV=py27 - - python: 3.4 - env: TOXENV=py34 - python: 3.5 env: TOXENV=py35 - python: 3.6 diff --git a/setup.py b/setup.py index 3e1afcbe..c29787b6 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,8 @@ from __future__ import print_function import codecs import os -from setuptools import setup, find_packages +from setuptools import find_packages +from setuptools import setup ROOT_DIR = os.path.dirname(__file__) SOURCE_DIR = os.path.join(ROOT_DIR) @@ -71,7 +72,7 @@ setup( install_requires=requirements, tests_require=test_requirements, extras_require=extras_require, - python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', + python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*', zip_safe=False, test_suite='tests', classifiers=[ @@ -83,7 +84,6 @@ setup( 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', diff --git a/test-requirements.txt b/test-requirements.txt index df369881..b89f6462 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,5 @@ coverage==4.5.2 -flake8==3.6.0; python_version != '3.3' -flake8==3.4.1; python_version == '3.3' +flake8==3.6.0 mock==1.0.1 pytest==4.1.0 pytest-cov==2.6.1 diff --git a/tox.ini b/tox.ini index 5396147e..df797f41 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27, py34, py35, py36, py37, flake8 +envlist = py27, py35, py36, py37, flake8 skipsdist=True [testenv] From 87ee18aa39698f91b24116592abbf817a0d2b738 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 20:47:41 -0700 Subject: [PATCH 50/56] Change use_config_proxy default value to True to match CLI behavior Signed-off-by: Joffrey F --- docker/api/build.py | 2 +- docker/api/container.py | 14 ++++++++------ tests/integration/api_exec_test.py | 1 - tests/integration/models_containers_test.py | 13 +++++++------ 4 files changed, 16 insertions(+), 14 deletions(-) diff --git a/docker/api/build.py b/docker/api/build.py index 5176afb3..e0a4ac96 100644 --- a/docker/api/build.py +++ b/docker/api/build.py @@ -20,7 +20,7 @@ class BuildApiMixin(object): decode=False, buildargs=None, gzip=False, shmsize=None, labels=None, cache_from=None, target=None, network_mode=None, squash=None, extra_hosts=None, platform=None, isolation=None, - use_config_proxy=False): + use_config_proxy=True): """ Similar to the ``docker build`` command. Either ``path`` or ``fileobj`` needs to be set. ``path`` can be a local path (to a directory diff --git a/docker/api/container.py b/docker/api/container.py index 94f53ff2..2977f282 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -1,13 +1,15 @@ -import six from datetime import datetime +import six + from .. import errors from .. import utils from ..constants import DEFAULT_DATA_CHUNK_SIZE -from ..types import ( - CancellableStream, ContainerConfig, EndpointConfig, HostConfig, - NetworkingConfig -) +from ..types import CancellableStream +from ..types import ContainerConfig +from ..types import EndpointConfig +from ..types import HostConfig +from ..types import NetworkingConfig class ContainerApiMixin(object): @@ -222,7 +224,7 @@ class ContainerApiMixin(object): mac_address=None, labels=None, stop_signal=None, networking_config=None, healthcheck=None, stop_timeout=None, runtime=None, - use_config_proxy=False): + use_config_proxy=True): """ Creates a container. Parameters are similar to those for the ``docker run`` command except it doesn't support the attach options (``-a``). diff --git a/tests/integration/api_exec_test.py b/tests/integration/api_exec_test.py index 80b63ffc..dda0ed90 100644 --- a/tests/integration/api_exec_test.py +++ b/tests/integration/api_exec_test.py @@ -17,7 +17,6 @@ class ExecTest(BaseAPIIntegrationTest): container = self.client.create_container( BUSYBOX, 'cat', detach=True, stdin_open=True, - use_config_proxy=True, ) self.client.start(container) self.tmp_containers.append(container) diff --git a/tests/integration/models_containers_test.py b/tests/integration/models_containers_test.py index 951a08ae..eac4c979 100644 --- a/tests/integration/models_containers_test.py +++ b/tests/integration/models_containers_test.py @@ -2,10 +2,13 @@ import os import tempfile import threading -import docker import pytest -from .base import BaseIntegrationTest, TEST_API_VERSION -from ..helpers import random_name, requires_api_version + +import docker +from ..helpers import random_name +from ..helpers import requires_api_version +from .base import BaseIntegrationTest +from .base import TEST_API_VERSION class ContainerCollectionTest(BaseIntegrationTest): @@ -174,9 +177,7 @@ class ContainerCollectionTest(BaseIntegrationTest): ftp='sakuya.jp:4967' ) - out = client.containers.run( - 'alpine', 'sh -c "env"', use_config_proxy=True - ) + out = client.containers.run('alpine', 'sh -c "env"') assert b'FTP_PROXY=sakuya.jp:4967\n' in out assert b'ftp_proxy=sakuya.jp:4967\n' in out From 7f56f7057c3448373600534fa4cdb1fb04d51524 Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Thu, 2 May 2019 12:46:56 -0700 Subject: [PATCH 51/56] Don't add superfluous arguments Signed-off-by: Joffrey F --- docker/api/container.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/api/container.py b/docker/api/container.py index 2977f282..2dca68a1 100644 --- a/docker/api/container.py +++ b/docker/api/container.py @@ -416,7 +416,7 @@ class ContainerApiMixin(object): if use_config_proxy: environment = self._proxy_configs.inject_proxy_environment( environment - ) + ) or None config = self.create_container_config( image, command, hostname, user, detach, stdin_open, tty, From 12d73c6d381760840256a4cbc768933555abb512 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 26 Mar 2019 15:15:40 +0100 Subject: [PATCH 52/56] Xfail test_attach_stream_and_cancel on TLS This test is quite flaky on ssl integration test Signed-off-by: Ulysses Souza --- tests/integration/api_container_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integration/api_container_test.py b/tests/integration/api_container_test.py index 1190d91e..26245c1f 100644 --- a/tests/integration/api_container_test.py +++ b/tests/integration/api_container_test.py @@ -1252,6 +1252,9 @@ class AttachContainerTest(BaseAPIIntegrationTest): @pytest.mark.timeout(10) @pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'), reason='No cancellable streams over SSH') + @pytest.mark.xfail(condition=os.environ.get('DOCKER_TLS_VERIFY') or + os.environ.get('DOCKER_CERT_PATH'), + reason='Flaky test on TLS') def test_attach_stream_and_cancel(self): container = self.client.create_container( BUSYBOX, 'sh -c "sleep 2 && echo hello && sleep 60"', From d863f729398911e2f918a1bc822b8f4f32151783 Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Thu, 28 Mar 2019 14:23:19 +0100 Subject: [PATCH 53/56] Bump 3.7.2 Signed-off-by: Ulysses Souza --- docs/change-log.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index 9edfee2f..d7c33611 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,17 @@ Change log ========== +3.7.2 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/59?closed=1) + +### Bugfixes + +* Fix base_url to keep TCP protocol on utils.py by letting the responsability of changing the +protocol to `parse_host` afterwards, letting `base_url` with the original value. +* XFAIL test_attach_stream_and_cancel on TLS + 3.7.1 ----- From f6781575c12a8a9aebe1e1ccee4716eaabf88b3d Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 19:58:18 -0700 Subject: [PATCH 54/56] Bump version 4.0.0 Signed-off-by: Joffrey F --- docker/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/version.py b/docker/version.py index 93d068eb..68d64c8a 100644 --- a/docker/version.py +++ b/docker/version.py @@ -1,2 +1,2 @@ -version = "4.0.0-dev" +version = "4.0.0" version_info = tuple([int(d) for d in version.split("-")[0].split(".")]) From 589e76ea3c13d469f141bf89aba3f142789da0ba Mon Sep 17 00:00:00 2001 From: Joffrey F Date: Wed, 1 May 2019 20:38:33 -0700 Subject: [PATCH 55/56] Update changelog for 4.0.0 Signed-off-by: Joffrey F --- docs/change-log.md | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/docs/change-log.md b/docs/change-log.md index d7c33611..53e9f207 100644 --- a/docs/change-log.md +++ b/docs/change-log.md @@ -1,6 +1,43 @@ Change log ========== +4.0.0 +----- + +[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/57?closed=1) + +### Breaking changes + +- Support for Python 3.3 and Python 3.4 has been dropped +- `APIClient.update_service`, `APIClient.init_swarm`, and + `DockerClient.swarm.init` now return a `dict` from the API's response body +- In `APIClient.build` and `DockerClient.images.build`, the `use_config_proxy` + parameter now defaults to True +- `init_path` is no longer a valid parameter for `HostConfig` + +### Features + +- It is now possible to provide `SCTP` ports for port mappings +- `ContainerSpec`s now support the `init` parameter +- `DockerClient.swarm.init` and `APIClient.init_swarm` now support the + `data_path_addr` parameter +- `APIClient.update_swarm` and `DockerClient.swarm.update` now support the + `rotate_manager_unlock_key` parameter +- `APIClient.update_service` returns the API's response body as a `dict` +- `APIClient.init_swarm`, and `DockerClient.swarm.init` now return the API's + response body as a `dict` + +### Bugfixes + +- Fixed `PlacementPreference` instances to produce a valid API type +- Fixed a bug where not setting a value for `buildargs` in `build` could cause + the library to attempt accessing attributes of a `None` value +- Fixed a bug where setting the `volume_driver` parameter in + `DockerClient.containers.create` would result in an error +- `APIClient.inspect_distribution` now correctly sets the authentication + headers on the request, allowing it to be used with private repositories + This change also applies to `DockerClient.get_registry_data` + 3.7.2 ----- From 5de5af115563d2f9a647bdeb4234fa440b2da58c Mon Sep 17 00:00:00 2001 From: Ulysses Souza Date: Tue, 14 May 2019 13:16:27 +0200 Subject: [PATCH 56/56] Bump urllib3 -> 1.24.3 Signed-off-by: Ulysses Souza --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index eb66c9f5..70f37e20 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,5 +15,5 @@ pypiwin32==219; sys_platform == 'win32' and python_version < '3.6' pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6' requests==2.20.0 six==1.10.0 -urllib3==1.24.1 +urllib3==1.24.3 websocket-client==0.40.0